gt
stringclasses
1 value
context
stringlengths
2.49k
119k
''' New Integration Test for mem allocator strategy. @author: SyZhao ''' import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_state as test_state import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.operations.resource_operations as res_ops import zstackwoodpecker.zstack_test.zstack_test_vm as test_vm_header import zstackwoodpecker.operations.vm_operations as vm_ops import time import os import threading import random vm = None pre_vms = [] vms = [] ts = [] invs = [] exec_info = [] vm_num = 9 test_obj_dict = test_state.TestStateDict() def create_vm_wrapper(i, vm_creation_option): global invs, vms, exec_info try: vm = test_vm_header.ZstackTestVm() vm_creation_option.set_name("vm-%s" %(i)) vm.set_creation_option(vm_creation_option) inv = vm.create() vms.append(vm) except: exec_info.append("vm-%s" %(i)) def check_threads_exception(): """ """ global exec_info if exec_info: issue_vms_string = ' '.join(exec_info) test_util.test_fail("%s is failed to be created." %(issue_vms_string)) def prepare_host_with_different_mem_scenario(): """ Prepare vms in hosts """ global pre_vms vm_creation_option = test_util.VmOption() image_name = os.environ.get('imageName_s') image_uuid = test_lib.lib_get_image_by_name(image_name).uuid l3_name = os.environ.get('l3VlanNetworkName1') #l3_name = os.environ.get('l3PublicNetworkName') l3_net_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid vm_creation_option.set_l3_uuids([l3_net_uuid]) vm_creation_option.set_image_uuid(image_uuid) ps_type = res_ops.query_resource(res_ops.PRIMARY_STORAGE)[0].type if ps_type != 'MiniStorage': conditions = res_ops.gen_query_conditions('type', '=', 'UserVm') instance_offering_uuid = res_ops.query_resource(res_ops.INSTANCE_OFFERING, conditions)[0].uuid vm_creation_option.set_instance_offering_uuid(instance_offering_uuid) else: # 1Core512M vm_creation_option.set_cpu_num(1) vm_creation_option.set_memory_size(536870912) ps_uuid = res_ops.query_resource(res_ops.PRIMARY_STORAGE)[0].uuid hosts = test_lib.lib_find_hosts_by_ps_uuid(ps_uuid) host_id = 0 for host, max_vm_num in zip(hosts,[2,3,1,2]): host_id +=1 for i in range(max_vm_num): print "host_id=%s; i=%s" %(host_id, i) vm_creation_option.set_name('pre-create-vm-%s-%s' %(host_id, i)) vm = test_vm_header.ZstackTestVm() vm_creation_option.set_host_uuid(host.uuid) vm.set_creation_option(vm_creation_option) vm.create() pre_vms.append(vm) def get_vm_num_based_mem_available_on_host(host_uuid, each_vm_mem_consume): """ This function is used to compute available mem num based on host current have """ #host_total_cpu = test_lib.lib_get_cpu_memory_capacity(host_uuids = [host_uuid]).totalCpu #host_avail_cpu = test_lib.lib_get_cpu_memory_capacity(host_uuids = [host_uuid]).availableCpu host_total_mem = test_lib.lib_get_cpu_memory_capacity(host_uuids = [host_uuid]).totalMemory host_avail_mem = test_lib.lib_get_cpu_memory_capacity(host_uuids = [host_uuid]).availableMemory print "total: %s; avail: %s" %(host_total_mem, host_avail_mem) return host_avail_mem / each_vm_mem_consume def compute_total_vm_num_based_on_ps(ps_uuid, each_vm_mem_consume): """ """ total_vm_num = 0 hosts = test_lib.lib_find_hosts_by_ps_uuid(ps_uuid) for host in hosts: vm_num_on_host = get_vm_num_based_mem_available_on_host(host.uuid, each_vm_mem_consume) total_vm_num += vm_num_on_host print "@ALLOCATE: <host uuid: %s; vm num: %s>" %(host.uuid, vm_num_on_host) print "@TOTAL ALLOCATE: <total vm num: %s>" %(total_vm_num) return total_vm_num def clean_host_with_different_mem_scenario(): """ Clean all the vms that generated from prepare function """ global pre_vms for vm in pre_vms: try: vm.destroy() except: pass def clean_parallel_created_vm(): """ """ global vms for vm in vms: try: vm.destroy() except: pass def test(): global vms image_name = os.environ.get('imageName_s') image_uuid = test_lib.lib_get_image_by_name(image_name).uuid l3_name = os.environ.get('l3VlanNetworkName1') #l3_name = os.environ.get('l3PublicNetworkName') l3_net_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid cpuNum = 1 #cpuSpeed = 8 memorySize = 536870912 each_vm_mem_consume = memorySize vm_creation_option = test_util.VmOption() vm_creation_option.set_l3_uuids([l3_net_uuid]) vm_creation_option.set_image_uuid(image_uuid) vm_creation_option.set_timeout(600000) ps_type = res_ops.query_resource(res_ops.PRIMARY_STORAGE)[0].type if ps_type != 'MiniStoragy': name = 'vm-offering-allocator-strategy' new_offering_option = test_util.InstanceOfferingOption() new_offering_option.set_cpuNum(cpuNum) #new_offering_option.set_cpuSpeed(cpuSpeed) new_offering_option.set_memorySize(memorySize) new_offering_option.set_name(name) new_offering = vm_ops.create_instance_offering(new_offering_option) test_obj_dict.add_instance_offering(new_offering) #conditions = res_ops.gen_query_conditions('type', '=', 'UserVm') #instance_offering_inv = res_ops.query_resource(res_ops.INSTANCE_OFFERING, conditions)[0] #instance_offering_uuid = instance_offering_inv.uuid instance_offering_uuid = new_offering.uuid vm_creation_option.set_instance_offering_uuid(instance_offering_uuid) else: vm_creation_option.set_cpu_num(cpuNum) vm_creation_option.set_memory_size(memorySize) #create different mem usage of hosts scenario prepare_host_with_different_mem_scenario() ps_uuid = res_ops.query_resource(res_ops.PRIMARY_STORAGE)[0].uuid vm_num = compute_total_vm_num_based_on_ps(ps_uuid, each_vm_mem_consume) #trigger vm create for i in range(vm_num): t = threading.Thread(target=create_vm_wrapper, args=(i, vm_creation_option)) ts.append(t) t.start() for t in ts: t.join() check_threads_exception() #clean the prepare scenario clean_host_with_different_mem_scenario() clean_parallel_created_vm() test_util.test_pass('Create VM Test Success') #Will be called only if exception happens in test(). def error_cleanup(): global vms global pre_vms global test_obj_dict test_lib.lib_error_cleanup(test_obj_dict) clean_host_with_different_mem_scenario() clean_parallel_created_vm()
from datetime import timedelta from textwrap import dedent from zipline import TradingAlgorithm from zipline.finance.commission import PerTrade, PerShare, PerDollar from zipline.finance.order import Order from zipline.finance.transaction import Transaction from zipline.testing import ZiplineTestCase, trades_by_sid_to_dfs from zipline.testing.fixtures import ( WithAssetFinder, WithSimParams, WithDataPortal ) from zipline.utils import factory class CommissionUnitTests(WithAssetFinder, ZiplineTestCase): ASSET_FINDER_EQUITY_SIDS = 1, 2 def generate_order_and_txns(self): asset1 = self.asset_finder.retrieve_asset(1) # one order order = Order(dt=None, sid=asset1, amount=500) # three fills txn1 = Transaction(sid=asset1, amount=230, dt=None, price=100, order_id=order.id) txn2 = Transaction(sid=asset1, amount=170, dt=None, price=101, order_id=order.id) txn3 = Transaction(sid=asset1, amount=100, dt=None, price=102, order_id=order.id) return order, [txn1, txn2, txn3] def test_per_trade(self): model = PerTrade(cost=10) order, txns = self.generate_order_and_txns() self.assertEqual(10, model.calculate(order, txns[0])) order.commission = 10 self.assertEqual(0, model.calculate(order, txns[1])) self.assertEqual(0, model.calculate(order, txns[2])) def test_per_share_no_minimum(self): model = PerShare(cost=0.0075, min_trade_cost=None) order, txns = self.generate_order_and_txns() # make sure each commission is pro-rated self.assertAlmostEqual(1.725, model.calculate(order, txns[0])) self.assertAlmostEqual(1.275, model.calculate(order, txns[1])) self.assertAlmostEqual(0.75, model.calculate(order, txns[2])) def verify_per_share_commissions(self, model, commission_totals): order, txns = self.generate_order_and_txns() for i, commission_total in enumerate(commission_totals): order.commission += model.calculate(order, txns[i]) self.assertAlmostEqual(commission_total, order.commission) order.filled += txns[i].amount def test_per_share_with_minimum(self): # minimum is met by the first trade self.verify_per_share_commissions( PerShare(cost=0.0075, min_trade_cost=1), [1.725, 3, 3.75] ) # minimum is met by the second trade self.verify_per_share_commissions( PerShare(cost=0.0075, min_trade_cost=2.5), [2.5, 3, 3.75] ) # minimum is met by the third trade self.verify_per_share_commissions( PerShare(cost=0.0075, min_trade_cost=3.5), [3.5, 3.5, 3.75] ) # minimum is not met by any of the trades self.verify_per_share_commissions( PerShare(cost=0.0075, min_trade_cost=5.5), [5.5, 5.5, 5.5] ) def test_per_dollar(self): model = PerDollar(cost=0.0015) order, txns = self.generate_order_and_txns() # make sure each commission is pro-rated self.assertAlmostEqual(34.5, model.calculate(order, txns[0])) self.assertAlmostEqual(25.755, model.calculate(order, txns[1])) self.assertAlmostEqual(15.3, model.calculate(order, txns[2])) class CommissionAlgorithmTests(WithDataPortal, WithSimParams, ZiplineTestCase): # make sure order commissions are properly incremented sidint, = ASSET_FINDER_EQUITY_SIDS = (133,) code = dedent( """ from zipline.api import ( sid, order, set_slippage, slippage, FixedSlippage, set_commission, commission ) def initialize(context): # for these tests, let us take out the entire bar with no price # impact set_slippage(slippage.VolumeShareSlippage(1.0, 0)) {0} context.ordered = False def handle_data(context, data): if not context.ordered: order(sid(133), {1}) context.ordered = True """, ) @classmethod def make_daily_bar_data(cls): num_days = len(cls.sim_params.trading_days) return trades_by_sid_to_dfs( { cls.sidint: factory.create_trade_history( cls.sidint, [10.0] * num_days, [100.0] * num_days, timedelta(days=1), cls.sim_params, cls.env, ), }, index=cls.sim_params.trading_days, ) def get_results(self, algo_code): algo = TradingAlgorithm( script=algo_code, env=self.env, sim_params=self.sim_params ) return algo.run(self.data_portal) def test_per_trade(self): results = self.get_results( self.code.format("set_commission(commission.PerTrade(1))", 300) ) # should be 3 fills at 100 shares apiece # one order split among 3 days, each copy of the order should have a # commission of one dollar for orders in results.orders[1:4]: self.assertEqual(1, orders[0]["commission"]) self.verify_capital_used(results, [-1001, -1000, -1000]) def test_per_share_no_minimum(self): results = self.get_results( self.code.format("set_commission(commission.PerShare(0.05, None))", 300) ) # should be 3 fills at 100 shares apiece # one order split among 3 days, each fill generates an additional # 100 * 0.05 = $5 in commission for i, orders in enumerate(results.orders[1:4]): self.assertEqual((i + 1) * 5, orders[0]["commission"]) self.verify_capital_used(results, [-1005, -1005, -1005]) def test_per_share_with_minimum(self): # minimum hit by first trade results = self.get_results( self.code.format("set_commission(commission.PerShare(0.05, 3))", 300) ) # commissions should be 5, 10, 15 for i, orders in enumerate(results.orders[1:4]): self.assertEqual((i + 1) * 5, orders[0]["commission"]) self.verify_capital_used(results, [-1005, -1005, -1005]) # minimum hit by second trade results = self.get_results( self.code.format("set_commission(commission.PerShare(0.05, 8))", 300) ) # commissions should be 8, 10, 15 self.assertEqual(8, results.orders[1][0]["commission"]) self.assertEqual(10, results.orders[2][0]["commission"]) self.assertEqual(15, results.orders[3][0]["commission"]) self.verify_capital_used(results, [-1008, -1002, -1005]) # minimum hit by third trade results = self.get_results( self.code.format("set_commission(commission.PerShare(0.05, 12))", 300) ) # commissions should be 12, 12, 15 self.assertEqual(12, results.orders[1][0]["commission"]) self.assertEqual(12, results.orders[2][0]["commission"]) self.assertEqual(15, results.orders[3][0]["commission"]) self.verify_capital_used(results, [-1012, -1000, -1003]) # minimum never hit results = self.get_results( self.code.format("set_commission(commission.PerShare(0.05, 18))", 300) ) # commissions should be 18, 18, 18 self.assertEqual(18, results.orders[1][0]["commission"]) self.assertEqual(18, results.orders[2][0]["commission"]) self.assertEqual(18, results.orders[3][0]["commission"]) self.verify_capital_used(results, [-1018, -1000, -1000]) def test_per_dollar(self): results = self.get_results( self.code.format("set_commission(commission.PerDollar(0.01))", 300) ) # should be 3 fills at 100 shares apiece, each fill is worth $1k, so # incremental commission of $1000 * 0.01 = $10 # commissions should be $10, $20, $30 for i, orders in enumerate(results.orders[1:4]): self.assertEqual((i + 1) * 10, orders[0]["commission"]) self.verify_capital_used(results, [-1010, -1010, -1010]) def verify_capital_used(self, results, values): self.assertEqual(values[0], results.capital_used[1]) self.assertEqual(values[1], results.capital_used[2]) self.assertEqual(values[2], results.capital_used[3])
from datetime import datetime, timedelta from django.core.exceptions import ValidationError from django.test import TestCase from django.urls import reverse from django.utils import timezone from OpenSSL import crypto from swapper import load_model from .. import settings as app_settings from ..base.models import datetime_to_string, generalized_time, utc_time from . import TestX509Mixin Ca = load_model('django_x509', 'Ca') Cert = load_model('django_x509', 'Cert') class TestCa(TestX509Mixin, TestCase): """ tests for Ca model """ def _prepare_revoked(self): ca = self._create_ca() crl = crypto.load_crl(crypto.FILETYPE_PEM, ca.crl) self.assertIsNone(crl.get_revoked()) cert = self._create_cert(ca=ca) cert.revoke() return (ca, cert) import_certificate = """ -----BEGIN CERTIFICATE----- MIIB4zCCAY2gAwIBAwIDAeJAMA0GCSqGSIb3DQEBBQUAMHcxCzAJBgNVBAYTAlVT MQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwE QUNNRTEfMB0GCSqGSIb3DQEJARYQY29udGFjdEBhY21lLmNvbTETMBEGA1UEAwwK aW1wb3J0dGVzdDAiGA8yMDE1MDEwMTAwMDAwMFoYDzIwMjAwMTAxMDAwMDAwWjB3 MQswCQYDVQQGEwJVUzELMAkGA1UECAwCQ0ExFjAUBgNVBAcMDVNhbiBGcmFuY2lz Y28xDTALBgNVBAoMBEFDTUUxHzAdBgkqhkiG9w0BCQEWEGNvbnRhY3RAYWNtZS5j b20xEzARBgNVBAMMCmltcG9ydHRlc3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEA v42Y9u9pYUiFRb36lwqdLmG8hCjl0g0HlMo2WqvHCTLk2CJvprBEuggSnaRCAmG9 ipCIds/ggaJ/w4KqJabNQQIDAQABMA0GCSqGSIb3DQEBBQUAA0EAAfEPPqbY1TLw 6IXNVelAXKxUp2f8FYCnlb0pQ3tswvefpad3h3oHrI2RGkIsM70axo7dAEk05Tj0 Zt3jXRLGAQ== -----END CERTIFICATE----- """ import_private_key = """ -----BEGIN PRIVATE KEY----- MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAv42Y9u9pYUiFRb36 lwqdLmG8hCjl0g0HlMo2WqvHCTLk2CJvprBEuggSnaRCAmG9ipCIds/ggaJ/w4Kq JabNQQIDAQABAkEAqpB3CEqeVxWwNi24GQ5Gb6pvpm6UVblsary0MYCLtk+jK6fg KCptUIryQ4cblZF54y3+wrLzJ9LUOStkk10DwQIhAPItbg5PqSZTCE/Ql20jUggo BHpXO7FI157oMxXnBJtVAiEAynx4ocYpgVtmJ9iSooZRtPp9ullEdUtU2pedSgY6 oj0CIHtcBs6FZ20dKIO3hhrSvgtnjvhejQp+R08rijIi7ibNAiBUOhR/zosjSN6k gnz0aAUC0BOOeWV1mQFR8DE4QoEPTQIhAIdGrho1hsZ3Cs7mInJiLLhh4zwnndQx WRyKPvMvJzWT -----END PRIVATE KEY----- """ def test_new(self): ca = self._create_ca() self.assertNotEqual(ca.certificate, '') self.assertNotEqual(ca.private_key, '') cert = crypto.load_certificate(crypto.FILETYPE_PEM, ca.certificate) self.assertEqual(int(cert.get_serial_number()), int(ca.serial_number)) subject = cert.get_subject() self.assertEqual(subject.countryName, ca.country_code) self.assertEqual(subject.stateOrProvinceName, ca.state) self.assertEqual(subject.localityName, ca.city) self.assertEqual(subject.organizationName, ca.organization_name) self.assertEqual(subject.emailAddress, ca.email) self.assertEqual(subject.commonName, ca.common_name) issuer = cert.get_issuer() self.assertEqual(issuer.countryName, ca.country_code) self.assertEqual(issuer.stateOrProvinceName, ca.state) self.assertEqual(issuer.localityName, ca.city) self.assertEqual(issuer.organizationName, ca.organization_name) self.assertEqual(issuer.emailAddress, ca.email) self.assertEqual(issuer.commonName, ca.common_name) # ensure version is 3 self.assertEqual(cert.get_version(), 2) # basic constraints e = cert.get_extension(0) self.assertEqual(e.get_critical(), 1) self.assertEqual(e.get_short_name().decode(), 'basicConstraints') self.assertEqual(e.get_data(), b'0\x06\x01\x01\xff\x02\x01\x00') def test_x509_property(self): ca = self._create_ca() cert = crypto.load_certificate(crypto.FILETYPE_PEM, ca.certificate) self.assertEqual(ca.x509.get_subject(), cert.get_subject()) self.assertEqual(ca.x509.get_issuer(), cert.get_issuer()) def test_x509_property_none(self): self.assertIsNone(Ca().x509) def test_pkey_property(self): ca = self._create_ca() self.assertIsInstance(ca.pkey, crypto.PKey) def test_pkey_property_none(self): self.assertIsNone(Ca().pkey) def test_default_validity_end(self): ca = Ca() self.assertEqual(ca.validity_end.year, datetime.now().year + 10) def test_default_validity_start(self): ca = Ca() expected = datetime.now() - timedelta(days=1) self.assertEqual(ca.validity_start.year, expected.year) self.assertEqual(ca.validity_start.month, expected.month) self.assertEqual(ca.validity_start.day, expected.day) self.assertEqual(ca.validity_start.hour, 0) self.assertEqual(ca.validity_start.minute, 0) self.assertEqual(ca.validity_start.second, 0) def test_import_ca(self): ca = Ca(name='ImportTest') ca.certificate = self.import_certificate ca.private_key = self.import_private_key ca.full_clean() ca.save() cert = ca.x509 # verify attributes self.assertEqual(cert.get_serial_number(), 123456) subject = cert.get_subject() self.assertEqual(subject.countryName, 'US') self.assertEqual(subject.stateOrProvinceName, 'CA') self.assertEqual(subject.localityName, 'San Francisco') self.assertEqual(subject.organizationName, 'ACME') self.assertEqual(subject.emailAddress, 'contact@acme.com') self.assertEqual(subject.commonName, 'importtest') issuer = cert.get_issuer() self.assertEqual(issuer.countryName, 'US') self.assertEqual(issuer.stateOrProvinceName, 'CA') self.assertEqual(issuer.localityName, 'San Francisco') self.assertEqual(issuer.organizationName, 'ACME') self.assertEqual(issuer.emailAddress, 'contact@acme.com') self.assertEqual(issuer.commonName, 'importtest') # verify field attribtues self.assertEqual(ca.key_length, '512') self.assertEqual(ca.digest, 'sha1') start = timezone.make_aware( datetime.strptime('20150101000000Z', generalized_time) ) self.assertEqual(ca.validity_start, start) end = timezone.make_aware( datetime.strptime('20200101000000Z', generalized_time) ) self.assertEqual(ca.validity_end, end) self.assertEqual(ca.country_code, 'US') self.assertEqual(ca.state, 'CA') self.assertEqual(ca.city, 'San Francisco') self.assertEqual(ca.organization_name, 'ACME') self.assertEqual(ca.email, 'contact@acme.com') self.assertEqual(ca.common_name, 'importtest') self.assertEqual(ca.name, 'ImportTest') self.assertEqual(int(ca.serial_number), 123456) # ensure version is 3 self.assertEqual(cert.get_version(), 3) ca.delete() # test auto name ca = Ca( certificate=self.import_certificate, private_key=self.import_private_key ) ca.full_clean() ca.save() self.assertEqual(ca.name, 'importtest') def test_import_private_key_empty(self): ca = Ca(name='ImportTest') ca.certificate = self.import_certificate try: ca.full_clean() except ValidationError as e: # verify error message self.assertIn('importing an existing certificate', str(e)) else: self.fail('ValidationError not raised') def test_basic_constraints_not_critical(self): setattr(app_settings, 'CA_BASIC_CONSTRAINTS_CRITICAL', False) ca = self._create_ca() e = ca.x509.get_extension(0) self.assertEqual(e.get_critical(), 0) setattr(app_settings, 'CA_BASIC_CONSTRAINTS_CRITICAL', True) def test_basic_constraints_pathlen(self): setattr(app_settings, 'CA_BASIC_CONSTRAINTS_PATHLEN', 2) ca = self._create_ca() e = ca.x509.get_extension(0) self.assertEqual(e.get_data(), b'0\x06\x01\x01\xff\x02\x01\x02') setattr(app_settings, 'CA_BASIC_CONSTRAINTS_PATHLEN', 0) def test_basic_constraints_pathlen_none(self): setattr(app_settings, 'CA_BASIC_CONSTRAINTS_PATHLEN', None) ca = self._create_ca() e = ca.x509.get_extension(0) self.assertEqual(e.get_data(), b'0\x03\x01\x01\xff') setattr(app_settings, 'CA_BASIC_CONSTRAINTS_PATHLEN', 0) def test_keyusage(self): ca = self._create_ca() e = ca.x509.get_extension(1) self.assertEqual(e.get_short_name().decode(), 'keyUsage') self.assertEqual(e.get_critical(), True) self.assertEqual(e.get_data(), b'\x03\x02\x01\x06') def test_keyusage_not_critical(self): setattr(app_settings, 'CA_KEYUSAGE_CRITICAL', False) ca = self._create_ca() e = ca.x509.get_extension(1) self.assertEqual(e.get_short_name().decode(), 'keyUsage') self.assertEqual(e.get_critical(), False) setattr(app_settings, 'CA_KEYUSAGE_CRITICAL', True) def test_keyusage_value(self): setattr(app_settings, 'CA_KEYUSAGE_VALUE', 'cRLSign, keyCertSign, keyAgreement') ca = self._create_ca() e = ca.x509.get_extension(1) self.assertEqual(e.get_short_name().decode(), 'keyUsage') self.assertEqual(e.get_data(), b'\x03\x02\x01\x0e') setattr(app_settings, 'CA_KEYUSAGE_VALUE', 'cRLSign, keyCertSign') def test_subject_key_identifier(self): ca = self._create_ca() e = ca.x509.get_extension(2) self.assertEqual(e.get_short_name().decode(), 'subjectKeyIdentifier') self.assertEqual(e.get_critical(), False) e2 = crypto.X509Extension( b'subjectKeyIdentifier', False, b'hash', subject=ca.x509 ) self.assertEqual(e.get_data(), e2.get_data()) def test_authority_key_identifier(self): ca = self._create_ca() e = ca.x509.get_extension(3) self.assertEqual(e.get_short_name().decode(), 'authorityKeyIdentifier') self.assertEqual(e.get_critical(), False) e2 = crypto.X509Extension( b'authorityKeyIdentifier', False, b'keyid:always,issuer:always', issuer=ca.x509, ) self.assertEqual(e.get_data(), e2.get_data()) def test_extensions(self): extensions = [ { 'name': 'nsComment', 'critical': False, 'value': 'CA - autogenerated Certificate', } ] ca = self._create_ca(extensions=extensions) e1 = ca.x509.get_extension(4) self.assertEqual(e1.get_short_name().decode(), 'nsComment') self.assertEqual(e1.get_critical(), False) self.assertEqual(e1.get_data(), b'\x16\x1eCA - autogenerated Certificate') def test_extensions_error1(self): extensions = {} try: self._create_ca(extensions=extensions) except ValidationError as e: # verify error message self.assertIn('Extension format invalid', str(e.message_dict['__all__'][0])) else: self.fail('ValidationError not raised') def test_extensions_error2(self): extensions = [{'wrong': 'wrong'}] try: self._create_ca(extensions=extensions) except ValidationError as e: # verify error message self.assertIn('Extension format invalid', str(e.message_dict['__all__'][0])) else: self.fail('ValidationError not raised') def test_get_revoked_certs(self): ca = self._create_ca() c1 = self._create_cert(ca=ca) c2 = self._create_cert(ca=ca) self._create_cert(ca=ca) self.assertEqual(ca.get_revoked_certs().count(), 0) c1.revoke() self.assertEqual(ca.get_revoked_certs().count(), 1) c2.revoke() self.assertEqual(ca.get_revoked_certs().count(), 2) now = timezone.now() # expired certificates are not counted start = now - timedelta(days=6650) end = now - timedelta(days=6600) c4 = self._create_cert(ca=ca, validity_start=start, validity_end=end) c4.revoke() self.assertEqual(ca.get_revoked_certs().count(), 2) # inactive not counted yet start = now + timedelta(days=2) end = now + timedelta(days=365) c5 = self._create_cert(ca=ca, validity_start=start, validity_end=end) c5.revoke() self.assertEqual(ca.get_revoked_certs().count(), 2) def test_crl(self): ca, cert = self._prepare_revoked() crl = crypto.load_crl(crypto.FILETYPE_PEM, ca.crl) revoked_list = crl.get_revoked() self.assertIsNotNone(revoked_list) self.assertEqual(len(revoked_list), 1) self.assertEqual(int(revoked_list[0].get_serial()), cert.serial_number) def test_crl_view(self): ca, cert = self._prepare_revoked() response = self.client.get(reverse('admin:crl', args=[ca.pk])) self.assertEqual(response.status_code, 200) crl = crypto.load_crl(crypto.FILETYPE_PEM, response.content) revoked_list = crl.get_revoked() self.assertIsNotNone(revoked_list) self.assertEqual(len(revoked_list), 1) self.assertEqual(int(revoked_list[0].get_serial()), cert.serial_number) def test_crl_view_403(self): setattr(app_settings, 'CRL_PROTECTED', True) ca, _ = self._prepare_revoked() response = self.client.get(reverse('admin:crl', args=[ca.pk])) self.assertEqual(response.status_code, 403) setattr(app_settings, 'CRL_PROTECTED', False) def test_crl_view_404(self): self._prepare_revoked() response = self.client.get(reverse('admin:crl', args=[10])) self.assertEqual(response.status_code, 404) def test_x509_text(self): ca = self._create_ca() text = crypto.dump_certificate(crypto.FILETYPE_TEXT, ca.x509) self.assertEqual(ca.x509_text, text.decode('utf-8')) def test_x509_import_exception_fixed(self): certificate = """-----BEGIN CERTIFICATE----- MIIEBTCCAu2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJJVDEL MAkGA1UECAwCUk0xDTALBgNVBAcMBFJvbWExDzANBgNVBAoMBkNpbmVjYTEVMBMG A1UEAwwMUHJvdmEgQ2luZWNhMB4XDTE2MDkyMTA5MDQyOFoXDTM2MDkyMTA5MDQy OFowUTELMAkGA1UEBhMCSVQxCzAJBgNVBAgMAlJNMQ0wCwYDVQQHDARSb21hMQ8w DQYDVQQKDAZDaW5lY2ExFTATBgNVBAMMDFByb3ZhIENpbmVjYTCCASIwDQYJKoZI hvcNAQEBBQADggEPADCCAQoCggEBAMV26pysBdm3OqhyyZjbWZ3ThmH6QTIDScTj +1y3nGgnIwgpHWJmZiO/XrwYburLttE+NP7qwgtRcVoxTJFnhuunSei8vE9lyooD l1wRUU0qMZSWB/Q3OF+S+FhRMtymx+H6a46yC5Wqxk0apNlvAJ1avuBtZjvipQHS Z3ub5iHpHr0LZKYbqq2yXna6SbGUjnGjVieIXTilbi/9yjukhNvoHC1fSXciV8hO 8GFuR5bUF/6kQFFMZsk3vXNTsKVx5ef7+zpN6n8lGmNAC8D28EqBxar4YAhuu8Jw +gvguEOji5BsF8pTu4NVBXia0xWjD1DKLmueVLu9rd4l2HGxsA0CAwEAAaOB5zCB 5DAMBgNVHRMEBTADAQH/MC0GCWCGSAGG+EIBDQQgFh5DQSAtIGF1dG9nZW5lcmF0 ZWQgQ2VydGlmaWNhdGUwCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQjUcBhP7i26o7R iaVbmRStMVsggTB5BgNVHSMEcjBwgBQjUcBhP7i26o7RiaVbmRStMVsggaFVpFMw UTELMAkGA1UEBhMCSVQxCzAJBgNVBAgMAlJNMQ0wCwYDVQQHDARSb21hMQ8wDQYD VQQKDAZDaW5lY2ExFTATBgNVBAMMDFByb3ZhIENpbmVjYYIBATANBgkqhkiG9w0B AQUFAAOCAQEAg0yQ8CGHGl4p2peALn63HxkAxKzxc8bD/bCItXHq3QFJAYRe5nuu eGBMdlVvlzh+N/xW1Jcl3+dg9UOlB5/eFr0BWXyk/0vtnJoMKjc4eVAcOlcbgk9s c0J4ZACrfjbBH9bU7OgYy4NwVXWQFbQqDZ4/beDnuA8JZcGV5+gK3H85pqGBndev 4DUTCrYk+kRLMyWLfurH7dSyw/9DXAmOVPB6SMkTK6sqkhwUmT6hEdADFUBTujes AjGrlOCMA8XDvvxVEl5nA6JjoPAQ8EIjYvxMykZE+nk0ZO4mqMG5DWCp/2ggodAD tnpHdm8yeMsoFPm+yZVDHDXjAirS6MX28w== -----END CERTIFICATE-----""" private_key = """-----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEAxXbqnKwF2bc6qHLJmNtZndOGYfpBMgNJxOP7XLecaCcjCCkd YmZmI79evBhu6su20T40/urCC1FxWjFMkWeG66dJ6Ly8T2XKigOXXBFRTSoxlJYH 9Dc4X5L4WFEy3KbH4fprjrILlarGTRqk2W8AnVq+4G1mO+KlAdJne5vmIekevQtk phuqrbJedrpJsZSOcaNWJ4hdOKVuL/3KO6SE2+gcLV9JdyJXyE7wYW5HltQX/qRA UUxmyTe9c1OwpXHl5/v7Ok3qfyUaY0ALwPbwSoHFqvhgCG67wnD6C+C4Q6OLkGwX ylO7g1UFeJrTFaMPUMoua55Uu72t3iXYcbGwDQIDAQABAoIBAD2pWa/c4+LNncqW Na++52gqcm9MB2nHrxSFoKueRoAboIve0uc0VLba/ok8E/7L6GXEyCXGRxvjrcLd XCyXqIET9zdvIFqmza11W6GLYtj20Q62Hvu69qaZrWVezcQrbIV7fnTL0mRFNLFF Ha8sQ4Pfn3VTlDYlGyPLgTcPQrjZlwD5OlzRNEbko/LkdNXZ3pvf4q17pjsxP3E7 XqD+d+dny+pBZL748Hp1RmNo/XfhF2Y4iIV4+3/CyBiTlnn8sURqQCeuoA42iCIH y28SBz0WS2FD/yVNbH0c4ZU+/R3Fwz5l7sHfaBieJeTFeqr5kuRU7Rro0EfFpa41 rT3fTz0CgYEA9/XpNsMtRLoMLqb01zvylgLO1cKNkAmoVFhAnh9nH1n3v55Vt48h K9NkHUPbVwSIVdQxDzQy+YXw9IEjieVCBOPHTxRHfX90Azup5dFVXznw6qs1GiW2 mXK+fLToVoTSCi9sHIbIkCAnKS7B5hzKxu+OicKKvouo7UM/NWiSGpsCgYEAy93i gN8leZPRSGXgS5COXOJ7zf8mqYWbzytnD5wh3XjWA2SNap93xyclCB7rlMfnOAXy 9rIgjrDEBBW7BwUyrYcB8M/qLvFfuf3rXgdhVzvA2OctdUdyzGERXObhiRopa2kq jFj4QyRa5kv7VTe85t9Ap2bqpE2nVD1wxRdaFncCgYBN0M+ijvfq5JQkI+MclMSZ jUIJ1WeFt3IrHhMRTHuZXCui5/awh2t6jHmTsZLpKRP8E35d7hy9L+qhYNGdWeQx Eqaey5dv7AqlZRj5dYtcOhvAGYCttv4qA9eB3Wg4lrAv4BgGj8nraRvBEdpp88kz S0SpOPM/vyaBZyQ0B6AqVwKBgQCvDvV03Cj94SSRGooj2RmmQQU2uqakYwqMNyTk jpm16BE+EJYuvIjKBp8R/hslQxMVVGZx2DuEy91F9LMJMDl4MLpF4wOhE7uzpor5 zzSTB8htePXcA2Jche227Ls2U7TFeyUCJ1Pns8wqfYxwfNBFH+gQ15sdQ2EwQSIY 3BiLuQKBgGG+yqKnBceb9zybnshSAVdGt933XjEwRUbaoXGnHjnCxsTtSGa0JkCT 2yrYrwM4KOr7LrKtvz703ApicJf+oRO+vW27+N5t0pyLCjsYJyL55RpM0KWJhKhT KQV8C/ciDV+lIw2yBmlCNvUmy7GAsHSZM+C8y29+GFR7an6WV+xa -----END RSA PRIVATE KEY-----""" ca = Ca(name='ImportTest error') ca.certificate = certificate ca.private_key = private_key ca.full_clean() ca.save() self.assertEqual(ca.email, '') def test_fill_subject_non_strings(self): ca1 = self._create_ca() ca2 = Ca(name='ca', organization_name=ca1) x509 = crypto.X509() subject = ca2._fill_subject(x509.get_subject()) self.assertEqual(subject.organizationName, 'Test CA') # this certificate has an invalid country code problematic_certificate = """-----BEGIN CERTIFICATE----- MIIEjzCCA3egAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQ8wDQYDVQQGEwZJdGFs aWExFjAUBgNVBAgMDUxhbWV6aWEgVGVybWUxFjAUBgNVBAcMDUxhbWV6aWEgVGVy bWUxIDAeBgNVBAoMF0NvbXVuZSBkaSBMYW1lemlhIFRlcm1lMRgwFgYDVQQDDA9M YW1lemlhZnJlZXdpZmkwHhcNMTIwMjE3MTQzMzAyWhcNMjIwMjE3MTQzMzAyWjB9 MQ8wDQYDVQQGEwZJdGFsaWExFjAUBgNVBAgMDUxhbWV6aWEgVGVybWUxFjAUBgNV BAcMDUxhbWV6aWEgVGVybWUxIDAeBgNVBAoMF0NvbXVuZSBkaSBMYW1lemlhIFRl cm1lMRgwFgYDVQQDDA9MYW1lemlhZnJlZXdpZmkwggEiMA0GCSqGSIb3DQEBAQUA A4IBDwAwggEKAoIBAQDBsEbRkpsgl9PZO+eb6M+2XDuENaDKIWxzEqhlQWqfivM5 SJNpIBij9n8vIgRu2ie7DmomBkU93tQWwL5EcZcSuqAnBgzkNmko5bsk9w7v6Apq V4UckIhtie7KRDCrG1XJaZ/0V4uYcW7+d1fYTCfMcgchpzMQsHAdjikyzRXc5TJn noV6eZf76zQGSaZllwl90VwQvEVe3VCKSja+zpYxsOjQgnKgrDx1O0l/RGxtCWGG fY9bizlD01nH4WuMT9ObO9F1YqnBc7pWtmRm4DfArr3yW5LKxkRrilwV1UCgQ80z yMYSeEIufChexzo1JBzrL7aEKnSm5fDvt3iJV3OlAgMBAAGjggEYMIIBFDAMBgNV HRMEBTADAQH/MC0GCWCGSAGG+EIBDQQgFh5DQSAtIGF1dG9nZW5lcmF0ZWQgQ2Vy dGlmaWNhdGUwCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBSsrs2asN5B2nSL36P72EBR MOLgijCBqAYDVR0jBIGgMIGdgBSsrs2asN5B2nSL36P72EBRMOLgiqGBgaR/MH0x DzANBgNVBAYTBkl0YWxpYTEWMBQGA1UECAwNTGFtZXppYSBUZXJtZTEWMBQGA1UE BwwNTGFtZXppYSBUZXJtZTEgMB4GA1UECgwXQ29tdW5lIGRpIExhbWV6aWEgVGVy bWUxGDAWBgNVBAMMD0xhbWV6aWFmcmVld2lmaYIBATANBgkqhkiG9w0BAQUFAAOC AQEAf6qG2iFfTv31bOWeE2GBO5VyT1l2MjB/waAXT4vPE2P3RVMoZguBZLc3hmbx nF6L5JlG7VbRqEE8wJMS5WeURuJe94CVftXJhzcd8ZnsISoGAh0IiRCLuTmpa/5q 3eWjgUwr3KldEJ77Sts72qSzRAD6C6RCMxnZTvcQzEjpomLLj1ID82lTrlrYl/in MDl+i5LuDRMlgj6PQhUgV+WoRESnZ/jL2MMxA/hcFPzfDDw6A2Kzgz4wzS5FMyHM iOCe57IN5gNeO2FAL351FHBONYQMtqeEEL82eSc53oFcLKCJf3E2yo1w6p5HB08H IuRFwXXuD2zUkZtldBcYeAa2oA== -----END CERTIFICATE-----""" problematic_private_key = """-----BEGIN RSA PRIVATE KEY----- MIIEpQIBAAKCAQEAwbBG0ZKbIJfT2Tvnm+jPtlw7hDWgyiFscxKoZUFqn4rzOUiT aSAYo/Z/LyIEbtonuw5qJgZFPd7UFsC+RHGXErqgJwYM5DZpKOW7JPcO7+gKaleF HJCIbYnuykQwqxtVyWmf9FeLmHFu/ndX2EwnzHIHIaczELBwHY4pMs0V3OUyZ56F enmX++s0BkmmZZcJfdFcELxFXt1Qiko2vs6WMbDo0IJyoKw8dTtJf0RsbQlhhn2P W4s5Q9NZx+FrjE/TmzvRdWKpwXO6VrZkZuA3wK698luSysZEa4pcFdVAoEPNM8jG EnhCLnwoXsc6NSQc6y+2hCp0puXw77d4iVdzpQIDAQABAoIBAQCvQLPjftbUV+x8 ++ImRTJkm/HSP7/8BOAfAvvRmq5CK7TF2TBgh4UkHq6X1BzUvJoEfBd5zmSqhcu7 xqyiO3FppemxRZ02hTEDq1J5MP6X/oomDIjJ/tEi5BJne+nZeMNXmjX8HZaW2dSH dS7L7KR6LZbcUXA4Ip1fcLlAWSb2Fe0bcuSLPaZZSmiA1Q3B/Q6nIOqPXDWq1/yz Vs7doSfniAt8CQse+NeWybevAHhaLjHIbqtvmAqmq91ehEiy87Cyj9VA5l4ggM8n O6DcmjSaiXfkLgJlrMQ50Ddxoqf35pf+vzebwFdYmyt3fGlIP1OaeVsfIGbkNFZG NQkdjEwhAoGBAObDqy8HMv070U+EXSdbv2x1A1glkA2ZUI1Ki+zXIrNV8ohZ4w71 /v2UsAAXxTCtx28EMFo923dHGk9OXM3EhmyNqYBRX97rB5V7Gt5FxmJs75punYaB IfMvo83Hn8mrBUUb74pQhhJ2TVVv/N3nefuElys6lMwyVgUBsu0xPt1pAoGBANbe qKouEl+lKdhfABbLCsgCp5yXhFEgNMuGArj5Op/mw/RWOYs4TuN35WmzpmsQZ2PD +cr+/oN+eJ7zgyStDJmMkeG4vtUVJ5F4wWFWgwgY7zU1J3tu0e/EvgaaLkqWtLRE xGJ0zc0qHQdOGGxnQPUy49yvMsdrVwHT/RQiJdDdAoGAAnxlIbKQKA426QZiAoSI gWCZUp/E94CJT5xX+YsvwoLQhAuD2Ktpvc2WP8oBw857cYS4CKDV9mj7rZMIiObv E8hK5Sj7QWmCwWd8GJzj0DegNSev5r0JYpdGyna2D/QZsG7mm7TWXOiNWLhGHxXZ SI5bGoodBD4ekxs7lDaNmNECgYEAoVVd3ynosdgZq1TphDPATJ1xrKo3t5IvEgH1 WV4JHrbuuy9i1Z3Z3gHQR6WUdx9CAi7MCBeekq0LdI3zEj69Dy30+z70Spovs5Kv 4J5MlG/kbFcU5iE3kIhxBhQOXgL6e8CGlEaPoFTWpv2EaSC+LV2gqbsCralzEvRR OiTJsCECgYEAzdFUEea4M6Uavsd36mBbCLAYkYvhMMYUcrebFpDFwZUFaOrNV0ju 5YkQTn0EQuwQWKcfs+Z+HRiqMmqj5RdgxQs6pCQG9nfp0uVSflZATOiweshGjn6f wZWuZRQLPPTAdiW+drs3gz8w0u3Y9ihgvHQqFcGJ1+j6ANJ0XdE/D5Y= -----END RSA PRIVATE KEY-----""" def test_ca_invalid_country(self): ca = self._create_ca( name='ImportTest error', certificate=self.problematic_certificate, private_key=self.problematic_private_key, ) self.assertEqual(ca.country_code, '') def test_import_ca_cert_validation_error(self): certificate = self.import_certificate[20:] private_key = self.import_private_key ca = Ca(name='TestCaCertValidation') try: ca.certificate = certificate ca.private_key = private_key ca.full_clean() except ValidationError as e: # cryptography 2.4 and 2.6 have different error message formats error_msg = str(e.message_dict['certificate'][0]) self.assertTrue( "('PEM routines', 'PEM_read_bio', 'no start line')" in error_msg # cryptography 2.4+ or "('PEM routines', 'get_name', 'no start line')" in error_msg # cryptography 2.6+ ) else: self.fail('ValidationError not raised') def test_import_ca_key_validation_error(self): certificate = self.import_certificate private_key = self.import_private_key[20:] ca = Ca(name='TestCaKeyValidation') try: ca.certificate = certificate ca.private_key = private_key ca.full_clean() ca.save() except ValidationError as e: # cryptography 2.4 and 2.6 have different error message formats error_msg = str(e.message_dict['private_key'][0]) self.assertTrue( "('PEM routines', 'PEM_read_bio', 'no start line')" in error_msg # cryptography 2.4+ or "('PEM routines', 'get_name', 'no start line')" in error_msg # cryptography 2.6+ ) else: self.fail('ValidationError not raised') def test_create_old_serial_ca(self): ca = self._create_ca(serial_number=3) self.assertEqual(int(ca.serial_number), 3) cert = crypto.load_certificate(crypto.FILETYPE_PEM, ca.certificate) self.assertEqual(int(cert.get_serial_number()), int(ca.serial_number)) def test_bad_serial_number_ca(self): try: self._create_ca(serial_number='notIntegers') except ValidationError as e: self.assertEqual( 'Serial number must be an integer', str(e.message_dict['serial_number'][0]), ) def test_import_ca_key_with_passphrase(self): ca = Ca(name='ImportTest') ca.certificate = """-----BEGIN CERTIFICATE----- MIICrzCCAhigAwIBAgIJANCybYj5LwUWMA0GCSqGSIb3DQEBCwUAMG8xCzAJBgNV BAYTAklOMQwwCgYDVQQIDANhc2QxDDAKBgNVBAcMA2FzZDEMMAoGA1UECgwDYXNk MQwwCgYDVQQLDANhc2QxDDAKBgNVBAMMA2FzZDEaMBgGCSqGSIb3DQEJARYLYXNk QGFzZC5hc2QwHhcNMTgwODI5MjExMDQ1WhcNMTkwODI5MjExMDQ1WjBvMQswCQYD VQQGEwJJTjEMMAoGA1UECAwDYXNkMQwwCgYDVQQHDANhc2QxDDAKBgNVBAoMA2Fz ZDEMMAoGA1UECwwDYXNkMQwwCgYDVQQDDANhc2QxGjAYBgkqhkiG9w0BCQEWC2Fz ZEBhc2QuYXNkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDBuDdlU20Ydie8 tmbq2hn8Ski6aSH2IyVVMxUj3+i6QZmoJ4sZzcAMCLPIkCAxby5pP0V6/DSqjxTL ShYy/7QMCovmj3O+23eYR/JGNAfsk6uDsWJL6OLHTNdx19mL0NioeFNEUJt14Cbz uqUizT7UdONLer0UK4uP2sE09Eo4cQIDAQABo1MwUTAdBgNVHQ4EFgQURUEc1+ho on8xaoSU+HU6CRkn0/owHwYDVR0jBBgwFoAURUEc1+hoon8xaoSU+HU6CRkn0/ow DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQB2zU8qtkXVM25yrL9s FC5oSqTky2c9KI/hwdsSronSvwaMoASgfl7UjzXlovq9FWZpNSVZ06wetkJVjq5N Xn3APftPSmKw0J1tzUfZuvq8Z8q6uXQ4B2+BsiCkG/PwXizbKDc29yzXsXTL4+cQ J7RrWKwDUi/GKVvqc+JjgsQ/nA== -----END CERTIFICATE----- """ ca.private_key = """-----BEGIN RSA PRIVATE KEY----- Proc-Type: 4,ENCRYPTED DEK-Info: DES-EDE3-CBC,D7DDAD38C7462384 CUEPD7buBQqv/uipFz/tXYURNcQrY5HKU904IVsKbM233KPA6qU6IaRF6RRxxUtE ejrmY2es9ZmU63gO/G/16E0CxzWhm3G2pOBsWHsBGGYcMpqZ842E3NoWimfQuRyO E7TtMKW+Jdl6mzkw8s/KkSeGkGvZFKrclSN37CtkexRn4cXQkhNgPztyeRaQjIBM SveP2qbODU+lr8g2oUjx05Ftcv1zJin85tzifJlQyaQz8ozKYtHA/RSpLEFZ19HG mXn4Rvvai8r2zhdqfT/0/G6dABDrhQLxQhPE2MrY0hAlr7DnDrYNQQ/QyGoiAdcR ee7QUDNfDnjzU6k/EjYPU1827/Kw8R4al8yDtVcUqfDuEsKabot+krEx4IZ5LOk9 PkcSW8UR0cIm7QE2BzQEzaZKQIpVwjSsSKm+RcFktiCKVun3Sps+GtXBr+AmF5Na r6xeg+j9kz8lT8F5lnpFTk6c8cD8GDCRiLsFzPo652BQ24dAEPvsSbYmKwr1gEe8 tfsARqOuvSafQNzqBYFV7abFr8DFiE1Kghk6d6x2u7qVREvOh0RYHRWqsTRf4MMn WlEnL9zfYST9Ur3gJgBOH2WHboDlQZu1k7yoLMfiGTQSQ2/xg1zS+5IWxt4tg029 B+f39N5zyDjuGFYcf3J6J4zybHmvdSAa62qxnkeDIbLz4axTU8+hNNOWxIsAh5vs OO8quCk6DE4j4u3Yzk7810dkJtliwboQiTlitEbCjiyjkOrabIICKMte8nhylZX6 BxZA3knyYRiB0FNYSxI6YuCIqTjr0AoBvNHdkdjkv2VFomYNBd8ruA== -----END RSA PRIVATE KEY----- """ ca.passphrase = 'test123' ca.full_clean() ca.save() self.assertIsInstance(ca.pkey, crypto.PKey) def test_import_ca_key_with_incorrect_passphrase(self): ca = Ca(name='ImportTest') ca.certificate = """-----BEGIN CERTIFICATE----- MIICrzCCAhigAwIBAgIJANCybYj5LwUWMA0GCSqGSIb3DQEBCwUAMG8xCzAJBgNV BAYTAklOMQwwCgYDVQQIDANhc2QxDDAKBgNVBAcMA2FzZDEMMAoGA1UECgwDYXNk MQwwCgYDVQQLDANhc2QxDDAKBgNVBAMMA2FzZDEaMBgGCSqGSIb3DQEJARYLYXNk QGFzZC5hc2QwHhcNMTgwODI5MjExMDQ1WhcNMTkwODI5MjExMDQ1WjBvMQswCQYD VQQGEwJJTjEMMAoGA1UECAwDYXNkMQwwCgYDVQQHDANhc2QxDDAKBgNVBAoMA2Fz ZDEMMAoGA1UECwwDYXNkMQwwCgYDVQQDDANhc2QxGjAYBgkqhkiG9w0BCQEWC2Fz ZEBhc2QuYXNkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDBuDdlU20Ydie8 tmbq2hn8Ski6aSH2IyVVMxUj3+i6QZmoJ4sZzcAMCLPIkCAxby5pP0V6/DSqjxTL ShYy/7QMCovmj3O+23eYR/JGNAfsk6uDsWJL6OLHTNdx19mL0NioeFNEUJt14Cbz uqUizT7UdONLer0UK4uP2sE09Eo4cQIDAQABo1MwUTAdBgNVHQ4EFgQURUEc1+ho on8xaoSU+HU6CRkn0/owHwYDVR0jBBgwFoAURUEc1+hoon8xaoSU+HU6CRkn0/ow DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQB2zU8qtkXVM25yrL9s FC5oSqTky2c9KI/hwdsSronSvwaMoASgfl7UjzXlovq9FWZpNSVZ06wetkJVjq5N Xn3APftPSmKw0J1tzUfZuvq8Z8q6uXQ4B2+BsiCkG/PwXizbKDc29yzXsXTL4+cQ J7RrWKwDUi/GKVvqc+JjgsQ/nA== -----END CERTIFICATE----- """ ca.private_key = """-----BEGIN RSA PRIVATE KEY----- Proc-Type: 4,ENCRYPTED DEK-Info: DES-EDE3-CBC,D7DDAD38C7462384 CUEPD7buBQqv/uipFz/tXYURNcQrY5HKU904IVsKbM233KPA6qU6IaRF6RRxxUtE ejrmY2es9ZmU63gO/G/16E0CxzWhm3G2pOBsWHsBGGYcMpqZ842E3NoWimfQuRyO E7TtMKW+Jdl6mzkw8s/KkSeGkGvZFKrclSN37CtkexRn4cXQkhNgPztyeRaQjIBM SveP2qbODU+lr8g2oUjx05Ftcv1zJin85tzifJlQyaQz8ozKYtHA/RSpLEFZ19HG mXn4Rvvai8r2zhdqfT/0/G6dABDrhQLxQhPE2MrY0hAlr7DnDrYNQQ/QyGoiAdcR ee7QUDNfDnjzU6k/EjYPU1827/Kw8R4al8yDtVcUqfDuEsKabot+krEx4IZ5LOk9 PkcSW8UR0cIm7QE2BzQEzaZKQIpVwjSsSKm+RcFktiCKVun3Sps+GtXBr+AmF5Na r6xeg+j9kz8lT8F5lnpFTk6c8cD8GDCRiLsFzPo652BQ24dAEPvsSbYmKwr1gEe8 tfsARqOuvSafQNzqBYFV7abFr8DFiE1Kghk6d6x2u7qVREvOh0RYHRWqsTRf4MMn WlEnL9zfYST9Ur3gJgBOH2WHboDlQZu1k7yoLMfiGTQSQ2/xg1zS+5IWxt4tg029 B+f39N5zyDjuGFYcf3J6J4zybHmvdSAa62qxnkeDIbLz4axTU8+hNNOWxIsAh5vs OO8quCk6DE4j4u3Yzk7810dkJtliwboQiTlitEbCjiyjkOrabIICKMte8nhylZX6 BxZA3knyYRiB0FNYSxI6YuCIqTjr0AoBvNHdkdjkv2VFomYNBd8ruA== -----END RSA PRIVATE KEY----- """ try: ca.passphrase = 'incorrect_passphrase' ca.full_clean() ca.save() except ValidationError as e: self.assertIn('Incorrect Passphrase', str(e.message_dict['passphrase'][0])) else: self.fail('ValidationError not raised') def test_generate_ca_with_passphrase(self): ca = self._create_ca(passphrase='123') ca.full_clean() ca.save() self.assertIsInstance(ca.pkey, crypto.PKey) def test_datetime_to_string(self): generalized_datetime = datetime(2050, 1, 1, 0, 0, 0, 0) utc_datetime = datetime(2049, 12, 31, 0, 0, 0, 0) self.assertEqual( datetime_to_string(generalized_datetime), generalized_datetime.strftime(generalized_time), ) self.assertEqual( datetime_to_string(utc_datetime), utc_datetime.strftime(utc_time) ) def test_renew(self): ca = self._create_ca() cert1 = self._create_cert(ca=ca, name='cert1') cert2 = self._create_cert(ca=ca, name='cert2') old_ca_cert = ca.certificate old_ca_key = ca.private_key old_ca_end = ca.validity_end old_ca_serial_number = ca.serial_number old_cert1_cert = cert1.certificate old_cert1_key = cert1.private_key old_cert1_serial_number = cert1.serial_number old_cert1_end = cert1.validity_end old_cert2_cert = cert2.certificate old_cert2_key = cert2.private_key old_cert2_serial_number = cert2.serial_number old_cert2_end = cert2.validity_end ca.renew() cert1.refresh_from_db() cert2.refresh_from_db() self.assertNotEqual(old_ca_cert, ca.certificate) self.assertNotEqual(old_ca_key, ca.private_key) self.assertLess(old_ca_end, ca.validity_end) self.assertNotEqual(old_ca_serial_number, ca.serial_number) self.assertNotEqual(old_cert1_cert, cert1.certificate) self.assertNotEqual(old_cert1_key, cert1.private_key) self.assertLess(old_cert1_end, cert1.validity_end) self.assertNotEqual(old_cert1_serial_number, cert1.serial_number) self.assertNotEqual(old_cert2_cert, cert2.certificate) self.assertNotEqual(old_cert2_key, cert2.private_key) self.assertLess(old_cert2_end, cert2.validity_end) self.assertNotEqual(old_cert2_serial_number, cert2.serial_number) def test_ca_common_name_length(self): common_name = ( 'this is a very very very very very very' ' very very very very very very long name' ) with self.assertRaises(ValidationError) as context_manager: self._create_ca(common_name=common_name) msg = ( f'Ensure this value has at most 64 characters (it has {len(common_name)}).' ) message_dict = context_manager.exception.message_dict self.assertIn('common_name', message_dict) self.assertEqual(message_dict['common_name'][0], msg) def test_ca_without_key_length_and_digest_algo(self): try: self._create_ca(key_length='', digest='') except ValidationError as e: self.assertIn('key_length', e.error_dict) self.assertIn('digest', e.error_dict) except Exception as e: self.fail(f'Got exception: {e}') else: self.fail('ValidationError not raised as expected')
"""Unicode Properties.""" from __future__ import unicode_literals from . import unidata import sys NARROW = sys.maxunicode == 0xFFFF if NARROW: UNICODE_RANGE = '\u0000-\uffff' else: UNICODE_RANGE = '\u0000-\U0010ffff' PY3 = sys.version_info >= (3, 0) and sys.version_info[0:2] < (4, 0) PY35 = sys.version_info >= (3, 5) PY37 = sys.version_info >= (3, 7) if PY3: binary_type = bytes # noqa else: binary_type = str # noqa POSIX = 0 POSIX_BINARY = 1 POSIX_UNICODE = 2 def get_posix_property(value, mode=POSIX): """Retrieve the posix category.""" if mode == POSIX_BINARY: return unidata.ascii_posix_properties[value] elif mode == POSIX_UNICODE: return unidata.unicode_binary[ ('^posix' + value[1:]) if value.startswith('^') else ('posix' + value) ] else: return unidata.unicode_posix_properties[value] def get_gc_property(value, binary=False): """Get `GC` property.""" obj = unidata.ascii_properties if binary else unidata.unicode_properties if value.startswith('^'): negate = True value = value[1:] else: negate = False value = unidata.unicode_alias['generalcategory'].get(value, value) assert 1 <= len(value) <= 2, 'Invalid property!' if not negate: p1, p2 = (value[0], value[1]) if len(value) > 1 else (value[0], None) value = ''.join( [v for k, v in obj.get(p1, {}).items() if not k.startswith('^')] ) if p2 is None else obj.get(p1, {}).get(p2, '') else: p1, p2 = (value[0], value[1]) if len(value) > 1 else (value[0], '') value = obj.get(p1, {}).get('^' + p2, '') assert value, 'Invalid property!' return value def get_binary_property(value, binary=False): """Get `BINARY` property.""" obj = unidata.ascii_binary if binary else unidata.unicode_binary if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['binary'].get(negated, negated) else: value = unidata.unicode_alias['binary'].get(value, value) return obj[value] def get_canonical_combining_class_property(value, binary=False): """Get `CANONICAL COMBINING CLASS` property.""" obj = unidata.ascii_canonical_combining_class if binary else unidata.unicode_canonical_combining_class if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['canonicalcombiningclass'].get(negated, negated) else: value = unidata.unicode_alias['canonicalcombiningclass'].get(value, value) return obj[value] def get_east_asian_width_property(value, binary=False): """Get `EAST ASIAN WIDTH` property.""" obj = unidata.ascii_east_asian_width if binary else unidata.unicode_east_asian_width if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['eastasianwidth'].get(negated, negated) else: value = unidata.unicode_alias['eastasianwidth'].get(value, value) return obj[value] def get_grapheme_cluster_break_property(value, binary=False): """Get `GRAPHEME CLUSTER BREAK` property.""" obj = unidata.ascii_grapheme_cluster_break if binary else unidata.unicode_grapheme_cluster_break if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['graphemeclusterbreak'].get(negated, negated) else: value = unidata.unicode_alias['graphemeclusterbreak'].get(value, value) return obj[value] def get_line_break_property(value, binary=False): """Get `LINE BREAK` property.""" obj = unidata.ascii_line_break if binary else unidata.unicode_line_break if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['linebreak'].get(negated, negated) else: value = unidata.unicode_alias['linebreak'].get(value, value) return obj[value] def get_sentence_break_property(value, binary=False): """Get `SENTENCE BREAK` property.""" obj = unidata.ascii_sentence_break if binary else unidata.unicode_sentence_break if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['sentencebreak'].get(negated, negated) else: value = unidata.unicode_alias['sentencebreak'].get(value, value) return obj[value] def get_word_break_property(value, binary=False): """Get `WORD BREAK` property.""" obj = unidata.ascii_word_break if binary else unidata.unicode_word_break if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['wordbreak'].get(negated, negated) else: value = unidata.unicode_alias['wordbreak'].get(value, value) return obj[value] def get_hangul_syllable_type_property(value, binary=False): """Get `HANGUL SYLLABLE TYPE` property.""" obj = unidata.ascii_hangul_syllable_type if binary else unidata.unicode_hangul_syllable_type if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['hangulsyllabletype'].get(negated, negated) else: value = unidata.unicode_alias['hangulsyllabletype'].get(value, value) return obj[value] def get_indic_positional_category_property(value, binary=False): """Get `INDIC POSITIONAL/MATRA CATEGORY` property.""" if PY35: obj = unidata.ascii_indic_positional_category if binary else unidata.unicode_indic_positional_category alias_key = 'indicpositionalcategory' else: obj = unidata.ascii_indic_matra_category if binary else unidata.unicode_indic_matra_category alias_key = 'indicmatracategory' if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias[alias_key].get(negated, negated) else: value = unidata.unicode_alias[alias_key].get(value, value) return obj[value] def get_indic_syllabic_category_property(value, binary=False): """Get `INDIC SYLLABIC CATEGORY` property.""" obj = unidata.ascii_indic_syllabic_category if binary else unidata.unicode_indic_syllabic_category if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['indicsyllabiccategory'].get(negated, negated) else: value = unidata.unicode_alias['indicsyllabiccategory'].get(value, value) return obj[value] def get_decomposition_type_property(value, binary=False): """Get `DECOMPOSITION TYPE` property.""" obj = unidata.ascii_decomposition_type if binary else unidata.unicode_decomposition_type if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['decompositiontype'].get(negated, negated) else: value = unidata.unicode_alias['decompositiontype'].get(value, value) return obj[value] def get_nfc_quick_check_property(value, binary=False): """Get `NFC QUICK CHECK` property.""" obj = unidata.ascii_nfc_quick_check if binary else unidata.unicode_nfc_quick_check if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['nfcquickcheck'].get(negated, negated) else: value = unidata.unicode_alias['nfcquickcheck'].get(value, value) return obj[value] def get_nfd_quick_check_property(value, binary=False): """Get `NFD QUICK CHECK` property.""" obj = unidata.ascii_nfd_quick_check if binary else unidata.unicode_nfd_quick_check if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['nfdquickcheck'].get(negated, negated) else: value = unidata.unicode_alias['nfdquickcheck'].get(value, value) return obj[value] def get_nfkc_quick_check_property(value, binary=False): """Get `NFKC QUICK CHECK` property.""" obj = unidata.ascii_nfkc_quick_check if binary else unidata.unicode_nfkc_quick_check if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['nfkcquickcheck'].get(negated, negated) else: value = unidata.unicode_alias['nfkcquickcheck'].get(value, value) return obj[value] def get_nfkd_quick_check_property(value, binary=False): """Get `NFKD QUICK CHECK` property.""" obj = unidata.ascii_nfkd_quick_check if binary else unidata.unicode_nfkd_quick_check if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['nfkdquickcheck'].get(negated, negated) else: value = unidata.unicode_alias['nfkdquickcheck'].get(value, value) return obj[value] def get_numeric_type_property(value, binary=False): """Get `NUMERIC TYPE` property.""" obj = unidata.ascii_numeric_type if binary else unidata.unicode_numeric_type if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['numerictype'].get(negated, negated) else: value = unidata.unicode_alias['numerictype'].get(value, value) return obj[value] def get_numeric_value_property(value, binary=False): """Get `NUMERIC VALUE` property.""" obj = unidata.ascii_numeric_values if binary else unidata.unicode_numeric_values if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['numericvalue'].get(negated, negated) else: value = unidata.unicode_alias['numericvalue'].get(value, value) return obj[value] def get_age_property(value, binary=False): """Get `AGE` property.""" obj = unidata.ascii_age if binary else unidata.unicode_age if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['age'].get(negated, negated) else: value = unidata.unicode_alias['age'].get(value, value) return obj[value] def get_joining_type_property(value, binary=False): """Get `JOINING TYPE` property.""" obj = unidata.ascii_joining_type if binary else unidata.unicode_joining_type if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['joiningtype'].get(negated, negated) else: value = unidata.unicode_alias['joiningtype'].get(value, value) return obj[value] def get_joining_group_property(value, binary=False): """Get `JOINING GROUP` property.""" obj = unidata.ascii_joining_group if binary else unidata.unicode_joining_group if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['joininggroup'].get(negated, negated) else: value = unidata.unicode_alias['joininggroup'].get(value, value) return obj[value] def get_script_property(value, binary=False): """Get `SC` property.""" obj = unidata.ascii_scripts if binary else unidata.unicode_scripts if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['script'].get(negated, negated) else: value = unidata.unicode_alias['script'].get(value, value) return obj[value] def get_script_extension_property(value, binary=False): """Get `SCX` property.""" obj = unidata.ascii_script_extensions if binary else unidata.unicode_script_extensions if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['script'].get(negated, negated) else: value = unidata.unicode_alias['script'].get(value, value) return obj[value] def get_block_property(value, binary=False): """Get `BLK` property.""" obj = unidata.ascii_blocks if binary else unidata.unicode_blocks if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['block'].get(negated, negated) else: value = unidata.unicode_alias['block'].get(value, value) return obj[value] def get_bidi_property(value, binary=False): """Get `BC` property.""" obj = unidata.ascii_bidi_classes if binary else unidata.unicode_bidi_classes if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['bidiclass'].get(negated, negated) else: value = unidata.unicode_alias['bidiclass'].get(value, value) return obj[value] def get_bidi_paired_bracket_type_property(value, binary=False): """Get `BPT` property.""" obj = unidata.ascii_bidi_paired_bracket_type if binary else unidata.unicode_bidi_paired_bracket_type if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['bidipairedbrackettype'].get(negated, negated) else: value = unidata.unicode_alias['bidipairedbrackettype'].get(value, value) return obj[value] def get_vertical_orientation_property(value, binary=False): """Get `VO` property.""" obj = unidata.ascii_vertical_orientation if binary else unidata.unicode_vertical_orientation if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['verticalorientation'].get(negated, negated) else: value = unidata.unicode_alias['verticalorientation'].get(value, value) return obj[value] def get_is_property(value, binary=False): """Get shortcut for `SC` or `Binary` property.""" if value.startswith('^'): prefix = value[1:3] temp = value[3:] negate = '^' else: prefix = value[:2] temp = value[2:] negate = '' if prefix != 'is': raise ValueError("Does not start with 'is'!") if PY3: script_obj = unidata.ascii_script_extensions if binary else unidata.unicode_script_extensions else: script_obj = unidata.ascii_scripts if binary else unidata.unicode_scripts bin_obj = unidata.ascii_binary if binary else unidata.unicode_binary value = negate + unidata.unicode_alias['script'].get(temp, temp) if value not in script_obj: value = negate + unidata.unicode_alias['binary'].get(temp, temp) obj = bin_obj else: obj = script_obj return obj[value] def get_in_property(value, binary=False): """Get shortcut for `Block` property.""" if value.startswith('^'): prefix = value[1:3] temp = value[3:] negate = '^' else: prefix = value[:2] temp = value[2:] negate = '' if prefix != 'in': raise ValueError("Does not start with 'in'!") value = negate + unidata.unicode_alias['block'].get(temp, temp) obj = unidata.ascii_blocks if binary else unidata.unicode_blocks return obj[value] def is_enum(name): """Check if name is an enum (not a binary) property.""" return name in unidata.enum_names def get_unicode_property(value, prop=None, binary=False): """Retrieve the Unicode category from the table.""" if prop is not None: prop = unidata.unicode_alias['_'].get(prop, prop) try: if prop == 'generalcategory': return get_gc_property(value, binary) elif prop == 'script': return get_script_property(value, binary) elif PY3 and prop == 'scriptextensions': return get_script_extension_property(value, binary) elif prop == 'block': return get_block_property(value, binary) elif prop == 'binary': return get_binary_property(value, binary) elif prop == 'bidiclass': return get_bidi_property(value, binary) elif prop == 'bidipairedbrackettype': return get_bidi_paired_bracket_type_property(value, binary) elif prop == 'age': return get_age_property(value, binary) elif prop == 'eastasianwidth': return get_east_asian_width_property(value, binary) elif PY35 and prop == 'indicpositionalcategory': return get_indic_positional_category_property(value, binary) elif PY3 and not PY35 and prop == 'indicmatracategory': return get_indic_positional_category_property(value, binary) elif PY3 and prop == 'indicsyllabiccategory': return get_indic_syllabic_category_property(value, binary) elif prop == 'hangulsyllabletype': return get_hangul_syllable_type_property(value, binary) elif prop == 'decompositiontype': return get_decomposition_type_property(value, binary) elif prop == 'canonicalcombiningclass': return get_canonical_combining_class_property(value, binary) elif prop == 'numerictype': return get_numeric_type_property(value, binary) elif prop == 'numericvalue': return get_numeric_value_property(value, binary) elif prop == 'joiningtype': return get_joining_type_property(value, binary) elif prop == 'joininggroup': return get_joining_group_property(value, binary) elif prop == 'graphemeclusterbreak': return get_grapheme_cluster_break_property(value, binary) elif prop == 'linebreak': return get_line_break_property(value, binary) elif prop == 'sentencebreak': return get_sentence_break_property(value, binary) elif prop == 'wordbreak': return get_word_break_property(value, binary) elif prop == 'nfcquickcheck': return get_nfc_quick_check_property(value, binary) elif prop == 'nfdquickcheck': return get_nfd_quick_check_property(value, binary) elif prop == 'nfkcquickcheck': return get_nfkc_quick_check_property(value, binary) elif prop == 'nfkdquickcheck': return get_nfkd_quick_check_property(value, binary) elif PY37 and prop == 'verticalorientation': return get_vertical_orientation_property(value, binary) else: raise ValueError('Invalid Unicode property!') except Exception: raise ValueError('Invalid Unicode property!') try: return get_gc_property(value, binary) except Exception: pass try: if PY3: return get_script_extension_property(value, binary) else: return get_script_property(value, binary) except Exception: pass try: return get_block_property(value, binary) except Exception: pass try: return get_binary_property(value, binary) except Exception: pass try: return get_is_property(value, binary) except Exception: pass try: return get_in_property(value, binary) except Exception: pass raise ValueError('Invalid Unicode property!')
"""Extract, format and print information about Python stack traces.""" import collections import itertools import linecache import sys __all__ = ['extract_stack', 'extract_tb', 'format_exception', 'format_exception_only', 'format_list', 'format_stack', 'format_tb', 'print_exc', 'format_exc', 'print_exception', 'print_last', 'print_stack', 'print_tb', 'clear_frames', 'FrameSummary', 'StackSummary', 'TracebackException', 'walk_stack', 'walk_tb'] # # Formatting and printing lists of traceback lines. # def print_list(extracted_list, file=None): """Print the list of tuples as returned by extract_tb() or extract_stack() as a formatted stack trace to the given file.""" if file is None: file = sys.stderr for item in StackSummary.from_list(extracted_list).format(): print(item, file=file, end="") def format_list(extracted_list): """Format a list of traceback entry tuples for printing. Given a list of tuples as returned by extract_tb() or extract_stack(), return a list of strings ready for printing. Each string in the resulting list corresponds to the item with the same index in the argument list. Each string ends in a newline; the strings may contain internal newlines as well, for those items whose source text line is not None. """ return StackSummary.from_list(extracted_list).format() # # Printing and Extracting Tracebacks. # def print_tb(tb, limit=None, file=None): """Print up to 'limit' stack trace entries from the traceback 'tb'. If 'limit' is omitted or None, all entries are printed. If 'file' is omitted or None, the output goes to sys.stderr; otherwise 'file' should be an open file or file-like object with a write() method. """ print_list(extract_tb(tb, limit=limit), file=file) def format_tb(tb, limit=None): """A shorthand for 'format_list(extract_tb(tb, limit))'.""" return extract_tb(tb, limit=limit).format() def extract_tb(tb, limit=None): """Return list of up to limit pre-processed entries from traceback. This is useful for alternate formatting of stack traces. If 'limit' is omitted or None, all entries are extracted. A pre-processed stack trace entry is a quadruple (filename, line number, function name, text) representing the information that is usually printed for a stack trace. The text is a string with leading and trailing whitespace stripped; if the source is not available it is None. """ return StackSummary.extract(walk_tb(tb), limit=limit) # # Exception formatting and output. # _cause_message = ( "\nThe above exception was the direct cause " "of the following exception:\n\n") _context_message = ( "\nDuring handling of the above exception, " "another exception occurred:\n\n") def print_exception(etype, value, tb, limit=None, file=None, chain=True): """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. This differs from print_tb() in the following ways: (1) if traceback is not None, it prints a header "Traceback (most recent call last):"; (2) it prints the exception type and value after the stack trace; (3) if type is SyntaxError and value has the appropriate format, it prints the line where the syntax error occurred with a caret on the next line indicating the approximate position of the error. """ # format_exception has ignored etype for some time, and code such as cgitb # passes in bogus values as a result. For compatibility with such code we # ignore it here (rather than in the new TracebackException API). if file is None: file = sys.stderr for line in TracebackException( type(value), value, tb, limit=limit).format(chain=chain): print(line, file=file, end="") def format_exception(etype, value, tb, limit=None, chain=True): """Format a stack trace and the exception information. The arguments have the same meaning as the corresponding arguments to print_exception(). The return value is a list of strings, each ending in a newline and some containing internal newlines. When these lines are concatenated and printed, exactly the same text is printed as does print_exception(). """ # format_exception has ignored etype for some time, and code such as cgitb # passes in bogus values as a result. For compatibility with such code we # ignore it here (rather than in the new TracebackException API). return list(TracebackException( type(value), value, tb, limit=limit).format(chain=chain)) def format_exception_only(etype, value): """Format the exception part of a traceback. The arguments are the exception type and value such as given by sys.last_type and sys.last_value. The return value is a list of strings, each ending in a newline. Normally, the list contains a single string; however, for SyntaxError exceptions, it contains several lines that (when printed) display detailed information about where the syntax error occurred. The message indicating which exception occurred is always the last string in the list. """ return list(TracebackException(etype, value, None).format_exception_only()) # -- not official API but folk probably use these two functions. def _format_final_exc_line(etype, value): valuestr = _some_str(value) if value == 'None' or value is None or not valuestr: line = "%s\n" % etype else: line = "%s: %s\n" % (etype, valuestr) return line def _some_str(value): try: return str(value) except: return '<unprintable %s object>' % type(value).__name__ # -- def print_exc(limit=None, file=None, chain=True): """Shorthand for 'print_exception(*sys.exc_info(), limit, file)'.""" print_exception(*sys.exc_info(), limit=limit, file=file, chain=chain) def format_exc(limit=None, chain=True): """Like print_exc() but return a string.""" return "".join(format_exception(*sys.exc_info(), limit=limit, chain=chain)) def print_last(limit=None, file=None, chain=True): """This is a shorthand for 'print_exception(sys.last_type, sys.last_value, sys.last_traceback, limit, file)'.""" if not hasattr(sys, "last_type"): raise ValueError("no last exception") print_exception(sys.last_type, sys.last_value, sys.last_traceback, limit, file, chain) # # Printing and Extracting Stacks. # def print_stack(f=None, limit=None, file=None): """Print a stack trace from its invocation point. The optional 'f' argument can be used to specify an alternate stack frame at which to start. The optional 'limit' and 'file' arguments have the same meaning as for print_exception(). """ if f is None: f = sys._getframe().f_back print_list(extract_stack(f, limit=limit), file=file) def format_stack(f=None, limit=None): """Shorthand for 'format_list(extract_stack(f, limit))'.""" if f is None: f = sys._getframe().f_back return format_list(extract_stack(f, limit=limit)) def extract_stack(f=None, limit=None): """Extract the raw traceback from the current stack frame. The return value has the same format as for extract_tb(). The optional 'f' and 'limit' arguments have the same meaning as for print_stack(). Each item in the list is a quadruple (filename, line number, function name, text), and the entries are in order from oldest to newest stack frame. """ if f is None: f = sys._getframe().f_back stack = StackSummary.extract(walk_stack(f), limit=limit) stack.reverse() return stack def clear_frames(tb): "Clear all references to local variables in the frames of a traceback." while tb is not None: try: tb.tb_frame.clear() except RuntimeError: # Ignore the exception raised if the frame is still executing. pass tb = tb.tb_next class FrameSummary: """A single frame from a traceback. - :attr:`filename` The filename for the frame. - :attr:`lineno` The line within filename for the frame that was active when the frame was captured. - :attr:`name` The name of the function or method that was executing when the frame was captured. - :attr:`line` The text from the linecache module for the of code that was running when the frame was captured. - :attr:`locals` Either None if locals were not supplied, or a dict mapping the name to the repr() of the variable. """ __slots__ = ('filename', 'lineno', 'name', '_line', 'locals') def __init__(self, filename, lineno, name, *, lookup_line=True, locals=None, line=None): """Construct a FrameSummary. :param lookup_line: If True, `linecache` is consulted for the source code line. Otherwise, the line will be looked up when first needed. :param locals: If supplied the frame locals, which will be captured as object representations. :param line: If provided, use this instead of looking up the line in the linecache. """ self.filename = filename self.lineno = lineno self.name = name self._line = line if lookup_line: self.line self.locals = \ dict((k, repr(v)) for k, v in locals.items()) if locals else None def __eq__(self, other): if isinstance(other, FrameSummary): return (self.filename == other.filename and self.lineno == other.lineno and self.name == other.name and self.locals == other.locals) if isinstance(other, tuple): return (self.filename, self.lineno, self.name, self.line) == other return NotImplemented def __getitem__(self, pos): return (self.filename, self.lineno, self.name, self.line)[pos] def __iter__(self): return iter([self.filename, self.lineno, self.name, self.line]) def __repr__(self): return "<FrameSummary file {filename}, line {lineno} in {name}>".format( filename=self.filename, lineno=self.lineno, name=self.name) @property def line(self): if self._line is None: self._line = linecache.getline(self.filename, self.lineno).strip() return self._line def walk_stack(f): """Walk a stack yielding the frame and line number for each frame. This will follow f.f_back from the given frame. If no frame is given, the current stack is used. Usually used with StackSummary.extract. """ if f is None: f = sys._getframe().f_back.f_back while f is not None: yield f, f.f_lineno f = f.f_back def walk_tb(tb): """Walk a traceback yielding the frame and line number for each frame. This will follow tb.tb_next (and thus is in the opposite order to walk_stack). Usually used with StackSummary.extract. """ while tb is not None: yield tb.tb_frame, tb.tb_lineno tb = tb.tb_next class StackSummary(list): """A stack of frames.""" @classmethod def extract(klass, frame_gen, *, limit=None, lookup_lines=True, capture_locals=False): """Create a StackSummary from a traceback or stack object. :param frame_gen: A generator that yields (frame, lineno) tuples to include in the stack. :param limit: None to include all frames or the number of frames to include. :param lookup_lines: If True, lookup lines for each frame immediately, otherwise lookup is deferred until the frame is rendered. :param capture_locals: If True, the local variables from each frame will be captured as object representations into the FrameSummary. """ if limit is None: limit = getattr(sys, 'tracebacklimit', None) if limit is not None and limit < 0: limit = 0 if limit is not None: if limit >= 0: frame_gen = itertools.islice(frame_gen, limit) else: frame_gen = collections.deque(frame_gen, maxlen=-limit) result = klass() fnames = set() for f, lineno in frame_gen: co = f.f_code filename = co.co_filename name = co.co_name fnames.add(filename) linecache.lazycache(filename, f.f_globals) # Must defer line lookups until we have called checkcache. if capture_locals: f_locals = f.f_locals else: f_locals = None result.append(FrameSummary( filename, lineno, name, lookup_line=False, locals=f_locals)) for filename in fnames: linecache.checkcache(filename) # If immediate lookup was desired, trigger lookups now. if lookup_lines: for f in result: f.line return result @classmethod def from_list(klass, a_list): """Create a StackSummary from a simple list of tuples. This method supports the older Python API. Each tuple should be a 4-tuple with (filename, lineno, name, line) elements. """ # While doing a fast-path check for isinstance(a_list, StackSummary) is # appealing, idlelib.run.cleanup_traceback and other similar code may # break this by making arbitrary frames plain tuples, so we need to # check on a frame by frame basis. result = StackSummary() for frame in a_list: if isinstance(frame, FrameSummary): result.append(frame) else: filename, lineno, name, line = frame result.append(FrameSummary(filename, lineno, name, line=line)) return result def format(self): """Format the stack ready for printing. Returns a list of strings ready for printing. Each string in the resulting list corresponds to a single frame from the stack. Each string ends in a newline; the strings may contain internal newlines as well, for those items with source text lines. For long sequences of the same frame and line, the first few repetitions are shown, followed by a summary line stating the exact number of further repetitions. """ result = [] last_file = None last_line = None last_name = None count = 0 for frame in self: if (last_file is not None and last_file == frame.filename and last_line is not None and last_line == frame.lineno and last_name is not None and last_name == frame.name): count += 1 else: if count > 3: result.append(f' [Previous line repeated {count-3} more times]\n') last_file = frame.filename last_line = frame.lineno last_name = frame.name count = 0 if count >= 3: continue row = [] row.append(' File "{}", line {}, in {}\n'.format( frame.filename, frame.lineno, frame.name)) if frame.line: row.append(' {}\n'.format(frame.line.strip())) if frame.locals: for name, value in sorted(frame.locals.items()): row.append(' {name} = {value}\n'.format(name=name, value=value)) result.append(''.join(row)) if count > 3: result.append(f' [Previous line repeated {count-3} more times]\n') return result class TracebackException: """An exception ready for rendering. The traceback module captures enough attributes from the original exception to this intermediary form to ensure that no references are held, while still being able to fully print or format it. Use `from_exception` to create TracebackException instances from exception objects, or the constructor to create TracebackException instances from individual components. - :attr:`__cause__` A TracebackException of the original *__cause__*. - :attr:`__context__` A TracebackException of the original *__context__*. - :attr:`__suppress_context__` The *__suppress_context__* value from the original exception. - :attr:`stack` A `StackSummary` representing the traceback. - :attr:`exc_type` The class of the original traceback. - :attr:`filename` For syntax errors - the filename where the error occurred. - :attr:`lineno` For syntax errors - the linenumber where the error occurred. - :attr:`text` For syntax errors - the text where the error occurred. - :attr:`offset` For syntax errors - the offset into the text where the error occurred. - :attr:`msg` For syntax errors - the compiler error message. """ def __init__(self, exc_type, exc_value, exc_traceback, *, limit=None, lookup_lines=True, capture_locals=False, _seen=None): # NB: we need to accept exc_traceback, exc_value, exc_traceback to # permit backwards compat with the existing API, otherwise we # need stub thunk objects just to glue it together. # Handle loops in __cause__ or __context__. if _seen is None: _seen = set() _seen.add(exc_value) # Gracefully handle (the way Python 2.4 and earlier did) the case of # being called with no type or value (None, None, None). if (exc_value and exc_value.__cause__ is not None and exc_value.__cause__ not in _seen): cause = TracebackException( type(exc_value.__cause__), exc_value.__cause__, exc_value.__cause__.__traceback__, limit=limit, lookup_lines=False, capture_locals=capture_locals, _seen=_seen) else: cause = None if (exc_value and exc_value.__context__ is not None and exc_value.__context__ not in _seen): context = TracebackException( type(exc_value.__context__), exc_value.__context__, exc_value.__context__.__traceback__, limit=limit, lookup_lines=False, capture_locals=capture_locals, _seen=_seen) else: context = None self.exc_traceback = exc_traceback self.__cause__ = cause self.__context__ = context self.__suppress_context__ = \ exc_value.__suppress_context__ if exc_value else False # TODO: locals. self.stack = StackSummary.extract( walk_tb(exc_traceback), limit=limit, lookup_lines=lookup_lines, capture_locals=capture_locals) self.exc_type = exc_type # Capture now to permit freeing resources: only complication is in the # unofficial API _format_final_exc_line self._str = _some_str(exc_value) if exc_type and issubclass(exc_type, SyntaxError): # Handle SyntaxError's specially self.filename = exc_value.filename self.lineno = str(exc_value.lineno) self.text = exc_value.text self.offset = exc_value.offset self.msg = exc_value.msg if lookup_lines: self._load_lines() @classmethod def from_exception(cls, exc, *args, **kwargs): """Create a TracebackException from an exception.""" return cls(type(exc), exc, exc.__traceback__, *args, **kwargs) def _load_lines(self): """Private API. force all lines in the stack to be loaded.""" for frame in self.stack: frame.line if self.__context__: self.__context__._load_lines() if self.__cause__: self.__cause__._load_lines() def __eq__(self, other): return self.__dict__ == other.__dict__ def __str__(self): return self._str def format_exception_only(self): """Format the exception part of the traceback. The return value is a generator of strings, each ending in a newline. Normally, the generator emits a single string; however, for SyntaxError exceptions, it emites several lines that (when printed) display detailed information about where the syntax error occurred. The message indicating which exception occurred is always the last string in the output. """ if self.exc_type is None: yield _format_final_exc_line(None, self._str) return stype = self.exc_type.__qualname__ smod = self.exc_type.__module__ if smod not in ("__main__", "builtins"): stype = smod + '.' + stype if not issubclass(self.exc_type, SyntaxError): yield _format_final_exc_line(stype, self._str) return # It was a syntax error; show exactly where the problem was found. filename = self.filename or "<string>" lineno = str(self.lineno) or '?' yield ' File "{}", line {}\n'.format(filename, lineno) badline = self.text offset = self.offset if badline is not None: yield ' {}\n'.format(badline.strip()) if offset is not None: caretspace = badline.rstrip('\n') offset = min(len(caretspace), offset) - 1 caretspace = caretspace[:offset].lstrip() # non-space whitespace (likes tabs) must be kept for alignment caretspace = ((c.isspace() and c or ' ') for c in caretspace) yield ' {}^\n'.format(''.join(caretspace)) msg = self.msg or "<no detail available>" yield "{}: {}\n".format(stype, msg) def format(self, *, chain=True): """Format the exception. If chain is not *True*, *__cause__* and *__context__* will not be formatted. The return value is a generator of strings, each ending in a newline and some containing internal newlines. `print_exception` is a wrapper around this method which just prints the lines to a file. The message indicating which exception occurred is always the last string in the output. """ if chain: if self.__cause__ is not None: yield from self.__cause__.format(chain=chain) yield _cause_message elif (self.__context__ is not None and not self.__suppress_context__): yield from self.__context__.format(chain=chain) yield _context_message if self.exc_traceback is not None: yield 'Traceback (most recent call last):\n' yield from self.stack.format() yield from self.format_exception_only()
import unittest from aiotelegram.types_base import * class TgTypeTest(unittest.TestCase): def setUp(self): self.old_fields = TgType._fields self.old_fields_required = TgType._fields_required def tearDown(self): TgType._fields = self.old_fields TgType._fields_required = self.old_fields_required def test_tgtype_instance(self): tgtype = TgType.fromJson('{}') self.assertIsInstance(tgtype, TgType) def test_tgtype_custom_fields(self): TgType._fields = [('id', int)] tgtype = TgType.fromJson('{"id": 10}') self.assertIsInstance(tgtype, TgType) self.assertEqual(tgtype.id, 10) self.assertRaises(AttributeError, getattr, tgtype, 'name') def test_tgtype_null_in_fields(self): TgType._fields = [('id', int), ('name', str)] tgtype = TgType.fromJson('{"id": 10, "name": null}') self.assertEqual(tgtype.id, 10) self.assertIsNone(tgtype.name) def test_tgtype_empty_field(self): TgType._fields = [('id', int), ('name', str)] tgtype = TgType.fromJson('{"id": 10}') self.assertEqual(tgtype.id, 10) self.assertIsNone(tgtype.name) def test_tgtype_init_all(self): TgType._fields = [('id', int), ('name', str)] tgtype = TgType(id=1, name="John") self.assertEqual(tgtype.id, 1) self.assertEqual(tgtype.name, "John") def test_tgtype_init_required(self): TgType._fields = [('id', int), ('name', str)] TgType._fields_required = ['id'] tgtype = TgType(id=1) self.assertEqual(tgtype.id, 1) self.assertIsNone(tgtype.name) def test_tgtype_forget_init_required(self): TgType._fields = [('id', int), ('name', str)] TgType._fields_required = ['id'] self.assertRaises(TypeError, TgType, name="John") def test_tgtype_more_fields(self): TgType._fields = [('id', int), ('name', str)] tgtype = TgType(id=1, name="John", username="@johndoe") self.assertEqual(tgtype.id, 1) self.assertEqual(tgtype.name, "John") self.assertRaises(AttributeError, getattr, tgtype, 'username') def test_tgtype_init_incorrect_type(self): TgType._fields = [('id', int), ('name', str)] self.assertRaises(TypeError, TgType, id=1, name=2) def test_tgtype_init_missing_required(self): TgType._fields = [('id', int), ('name', str)] TgType._fields_required = ['id'] self.assertRaises(TypeError, TgType, name="test") def test_tgtype_fromobject_instance(self): tgtype = TgType.fromObject({}) self.assertIsInstance(tgtype, TgType) def test_tgtype_fromobject_custom_fields(self): TgType._fields = [('id', int)] tgtype = TgType.fromObject({"id": 10}) self.assertIsInstance(tgtype, TgType) self.assertEqual(tgtype.id, 10) self.assertRaises(AttributeError, getattr, tgtype, 'name') def test_tgtype_fromobject_null_in_fields(self): TgType._fields = [('id', int), ('name', str)] tgtype = TgType.fromObject({"id": 10, "name": None}) self.assertEqual(tgtype.id, 10) self.assertIsNone(tgtype.name) def test_tgtype_fromobject_empty_field(self): TgType._fields = [('id', int), ('name', str)] tgtype = TgType.fromObject({"id": 10}) self.assertEqual(tgtype.id, 10) self.assertIsNone(tgtype.name) def test_tgtype_incorrect_object(self): self.assertRaises(ValueError, TgType.fromObject, []) self.assertRaises(ValueError, TgType.fromObject, 5) self.assertRaises(ValueError, TgType.fromObject, True) def test_tgtype_tojson_all_fields(self): TgType._fields = [('id', int), ('name', str)] TgType._fields_required = ['id'] tgtype = TgType(id=1, name="John") self.assertEqual(tgtype.toJson(), '{"id": 1, "name": "John"}') def test_tgtype_tojson_all_fields(self): TgType._fields = [('id', int), ('name', str)] TgType._fields_required = ['id'] tgtype = TgType(id=1) self.assertEqual(tgtype.toJson(), '{"id": 1}') def test_tgtype_eq(self): TgType._fields = [('id', int), ('name', str)] a = TgType(id=1, name="X") b = TgType(id=1, name="X") c = TgType(id=2, name="X") d = TgType(id=1, name="Y") self.assertEqual(a, b) self.assertNotEqual(a, c) self.assertNotEqual(a, d) def test_tgtype_complex_object(self): TgType._fields = [('id', int), ('obj', TgType)] TgType._fields_required = ['id'] a = TgType(id=1, obj=TgType(id=2)) j = a.toJson() b = TgType.fromJson(j) self.assertEqual(a, b) def test_tgtype_field_underbar(self): TgType._fields = [('id', int)] TgType._fields_required = ['id'] x = TgType(id_=5) self.assertEqual(x.id, 5) class ArrayTest(unittest.TestCase): def test_array(self): a_int = Array(int) self.assertTrue(a_int.check([1, 2, 3])) self.assertTrue(a_int.check([])) self.assertFalse(a_int.check([1, 2, []])) self.assertFalse(a_int.check([1.0, 2])) self.assertFalse(a_int.check([[1], [2]])) a_str = Array(str) self.assertTrue(a_str.check(["a", "b", "c"])) self.assertTrue(a_str.check([])) self.assertFalse(a_str.check(["a", "b", []])) self.assertFalse(a_str.check(["a", 2])) self.assertFalse(a_str.check([["a"], ["b"]])) def test_array_of_arrays(self): a_int = Array(Array(int)) self.assertTrue(a_int.check([[1, 2], [3, 4]])) self.assertTrue(a_int.check([[], []])) self.assertTrue(a_int.check([[1, 2], [2]])) self.assertFalse(a_int.check([1, 2, [4, 5]])) self.assertFalse(a_int.check([[1.0, 2], [3, 4]])) class T1(TgType): _fields = [('id', int), ('name', str)] class T2(TgType): _fields = [('id', int), ('data', Array(T1))] class ArrayTgTypeTest(unittest.TestCase): def test_array_tgtype(self): t1 = T1(id=1, name="John") t2 = T2(id=2, data=[t1, t1]) t2 = T2.fromJson(t2.toJson()) self.assertEqual(t2.id, 2) self.assertEqual(t2.data[0], t1) self.assertRaises(TypeError, T2, id=2, data=[t1, [t1]]) self.assertRaises(TypeError, T2, id=2, data=[t1, 1]) def test_array_fromlist(self): t1 = T1(id=1, name="John") typ = Array(T1) lst = typ.fromList([t1, t1]) self.assertEqual(lst[0], {'id': 1, 'name': 'John'}) self.assertEqual(lst[1], {'id': 1, 'name': 'John'}) typ = Array(int) lst = typ.fromList([1, 2]) self.assertEqual(lst[0], 1) self.assertEqual(lst[1], 2) t1 = T1(id=1, name="John") typ = Array(Array(T1)) lst = typ.fromList([[t1]]) self.assertEqual(lst[0][0], {'id': 1, 'name': 'John'}) def test_array_tolist(self): data = {'id': 1, 'name': 'John'} t1 = T1.fromObject(data) typ = Array(T1) lst = typ.toList([data, data]) self.assertEqual(lst[0], t1) self.assertEqual(lst[1], t1) typ = Array(int) lst = typ.toList([1, 2]) self.assertEqual(lst[0], 1) self.assertEqual(lst[1], 2) def test_array_to_from_none(self): typ = Array(T1) self.assertIsNone(typ.toList(None)) self.assertIsNone(typ.fromList(None))
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2010 Citrix Systems, Inc. # Copyright 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import binascii import os import time import uuid from oslo.config import cfg from nova.api.metadata import password from nova import context from nova import crypto from nova.openstack.common import jsonutils from nova.openstack.common import log as logging from nova import utils LOG = logging.getLogger(__name__) xenapi_agent_opts = [ cfg.IntOpt('agent_timeout', default=30, help='number of seconds to wait for agent reply'), cfg.IntOpt('agent_version_timeout', default=300, help='number of seconds to wait for agent ' 'to be fully operational'), cfg.IntOpt('agent_resetnetwork_timeout', default=60, help='number of seconds to wait for agent reply ' 'to resetnetwork request'), cfg.StrOpt('xenapi_agent_path', default='usr/sbin/xe-update-networking', help='Specifies the path in which the xenapi guest agent ' 'should be located. If the agent is present, network ' 'configuration is not injected into the image. ' 'Used if compute_driver=xenapi.XenAPIDriver and ' ' flat_injected=True'), cfg.BoolOpt('xenapi_disable_agent', default=False, help='Disable XenAPI agent. Reduces the amount of time ' 'it takes nova to detect that a VM has started, when ' 'that VM does not have the agent installed'), ] CONF = cfg.CONF CONF.register_opts(xenapi_agent_opts) def _call_agent(session, instance, vm_ref, method, addl_args=None, timeout=None): """Abstracts out the interaction with the agent xenapi plugin.""" if addl_args is None: addl_args = {} if timeout is None: timeout = CONF.agent_timeout vm_rec = session.call_xenapi("VM.get_record", vm_ref) args = { 'id': str(uuid.uuid4()), 'dom_id': vm_rec['domid'], 'timeout': str(timeout), } args.update(addl_args) try: ret = session.call_plugin('agent', method, args) except session.XenAPI.Failure, e: err_msg = e.details[-1].splitlines()[-1] if 'TIMEOUT:' in err_msg: LOG.error(_('TIMEOUT: The call to %(method)s timed out. ' 'args=%(args)r'), locals(), instance=instance) return {'returncode': 'timeout', 'message': err_msg} elif 'NOT IMPLEMENTED:' in err_msg: LOG.error(_('NOT IMPLEMENTED: The call to %(method)s is not' ' supported by the agent. args=%(args)r'), locals(), instance=instance) return {'returncode': 'notimplemented', 'message': err_msg} else: LOG.error(_('The call to %(method)s returned an error: %(e)s. ' 'args=%(args)r'), locals(), instance=instance) return {'returncode': 'error', 'message': err_msg} return None if isinstance(ret, dict): return ret try: return jsonutils.loads(ret) except TypeError: LOG.error(_('The agent call to %(method)s returned an invalid' ' response: %(ret)r. path=%(path)s; args=%(args)r'), locals(), instance=instance) return {'returncode': 'error', 'message': 'unable to deserialize response'} def _get_agent_version(session, instance, vm_ref): resp = _call_agent(session, instance, vm_ref, 'version') if resp['returncode'] != '0': LOG.error(_('Failed to query agent version: %(resp)r'), locals(), instance=instance) return None # Some old versions of the Windows agent have a trailing \\r\\n # (ie CRLF escaped) for some reason. Strip that off. return resp['message'].replace('\\r\\n', '') class XenAPIBasedAgent(object): def __init__(self, session, virtapi, instance, vm_ref): self.session = session self.virtapi = virtapi self.instance = instance self.vm_ref = vm_ref def get_agent_version(self): """Get the version of the agent running on the VM instance.""" LOG.debug(_('Querying agent version'), instance=self.instance) # The agent can be slow to start for a variety of reasons. On Windows, # it will generally perform a setup process on first boot that can # take a couple of minutes and then reboot. On Linux, the system can # also take a while to boot. So we need to be more patient than # normal as well as watch for domid changes expiration = time.time() + CONF.agent_version_timeout while time.time() < expiration: ret = _get_agent_version(self.session, self.instance, self.vm_ref) if ret: return ret LOG.info(_('Reached maximum time attempting to query agent version'), instance=self.instance) return None def agent_update(self, agent_build): """Update agent on the VM instance.""" LOG.info(_('Updating agent to %s'), agent_build['version'], instance=self.instance) # Send the encrypted password args = {'url': agent_build['url'], 'md5sum': agent_build['md5hash']} resp = _call_agent( self.session, self.instance, self.vm_ref, 'agentupdate', args) if resp['returncode'] != '0': LOG.error(_('Failed to update agent: %(resp)r'), locals(), instance=self.instance) return None return resp['message'] def set_admin_password(self, new_pass): """Set the root/admin password on the VM instance. This is done via an agent running on the VM. Communication between nova and the agent is done via writing xenstore records. Since communication is done over the XenAPI RPC calls, we need to encrypt the password. We're using a simple Diffie-Hellman class instead of a more advanced library (such as M2Crypto) for compatibility with the agent code. """ LOG.debug(_('Setting admin password'), instance=self.instance) dh = SimpleDH() # Exchange keys args = {'pub': str(dh.get_public())} resp = _call_agent( self.session, self.instance, self.vm_ref, 'key_init', args) # Successful return code from key_init is 'D0' if resp['returncode'] != 'D0': msg = _('Failed to exchange keys: %(resp)r') % locals() LOG.error(msg, instance=self.instance) raise NotImplementedError(msg) # Some old versions of the Windows agent have a trailing \\r\\n # (ie CRLF escaped) for some reason. Strip that off. agent_pub = int(resp['message'].replace('\\r\\n', '')) dh.compute_shared(agent_pub) # Some old versions of Linux and Windows agent expect trailing \n # on password to work correctly. enc_pass = dh.encrypt(new_pass + '\n') # Send the encrypted password args = {'enc_pass': enc_pass} resp = _call_agent( self.session, self.instance, self.vm_ref, 'password', args) # Successful return code from password is '0' if resp['returncode'] != '0': msg = _('Failed to update password: %(resp)r') % locals() LOG.error(msg, instance=self.instance) raise NotImplementedError(msg) sshkey = self.instance.get('key_data') if sshkey: ctxt = context.get_admin_context() enc = crypto.ssh_encrypt_text(sshkey, new_pass) sys_meta = utils.metadata_to_dict(self.instance['system_metadata']) sys_meta.update(password.convert_password(ctxt, base64.b64encode(enc))) self.virtapi.instance_update(ctxt, self.instance['uuid'], {'system_metadata': sys_meta}) return resp['message'] def inject_file(self, path, contents): LOG.debug(_('Injecting file path: %r'), path, instance=self.instance) # Files/paths must be base64-encoded for transmission to agent b64_path = base64.b64encode(path) b64_contents = base64.b64encode(contents) args = {'b64_path': b64_path, 'b64_contents': b64_contents} # If the agent doesn't support file injection, a NotImplementedError # will be raised with the appropriate message. resp = _call_agent( self.session, self.instance, self.vm_ref, 'inject_file', args) if resp['returncode'] != '0': LOG.error(_('Failed to inject file: %(resp)r'), locals(), instance=self.instance) return None return resp['message'] def resetnetwork(self): LOG.debug(_('Resetting network'), instance=self.instance) resp = _call_agent( self.session, self.instance, self.vm_ref, 'resetnetwork', timeout=CONF.agent_resetnetwork_timeout) if resp['returncode'] != '0': LOG.error(_('Failed to reset network: %(resp)r'), locals(), instance=self.instance) return None return resp['message'] def find_guest_agent(base_dir): """ tries to locate a guest agent at the path specificed by agent_rel_path """ if CONF.xenapi_disable_agent: return False agent_rel_path = CONF.xenapi_agent_path agent_path = os.path.join(base_dir, agent_rel_path) if os.path.isfile(agent_path): # The presence of the guest agent # file indicates that this instance can # reconfigure the network from xenstore data, # so manipulation of files in /etc is not # required LOG.info(_('XenServer tools installed in this ' 'image are capable of network injection. ' 'Networking files will not be' 'manipulated')) return True xe_daemon_filename = os.path.join(base_dir, 'usr', 'sbin', 'xe-daemon') if os.path.isfile(xe_daemon_filename): LOG.info(_('XenServer tools are present ' 'in this image but are not capable ' 'of network injection')) else: LOG.info(_('XenServer tools are not ' 'installed in this image')) return False class SimpleDH(object): """ This class wraps all the functionality needed to implement basic Diffie-Hellman-Merkle key exchange in Python. It features intelligent defaults for the prime and base numbers needed for the calculation, while allowing you to supply your own. It requires that the openssl binary be installed on the system on which this is run, as it uses that to handle the encryption and decryption. If openssl is not available, a RuntimeError will be raised. """ def __init__(self): self._prime = 162259276829213363391578010288127 self._base = 5 self._public = None self._shared = None self.generate_private() def generate_private(self): self._private = int(binascii.hexlify(os.urandom(10)), 16) return self._private def get_public(self): self._public = self.mod_exp(self._base, self._private, self._prime) return self._public def compute_shared(self, other): self._shared = self.mod_exp(other, self._private, self._prime) return self._shared @staticmethod def mod_exp(num, exp, mod): """Efficient implementation of (num ** exp) % mod.""" result = 1 while exp > 0: if (exp & 1) == 1: result = (result * num) % mod exp = exp >> 1 num = (num * num) % mod return result def _run_ssl(self, text, decrypt=False): cmd = ['openssl', 'aes-128-cbc', '-A', '-a', '-pass', 'pass:%s' % self._shared, '-nosalt'] if decrypt: cmd.append('-d') out, err = utils.execute(*cmd, process_input=text) if err: raise RuntimeError(_('OpenSSL error: %s') % err) return out def encrypt(self, text): return self._run_ssl(text).strip('\n') def decrypt(self, text): return self._run_ssl(text, decrypt=True)
""" Functions which are common and require SciPy Base and Level 1 SciPy (special, linalg) """ from numpy import exp, log, asarray, arange, newaxis, hstack, product, array, \ where, zeros, extract, place, pi, sqrt, eye, poly1d, dot, \ r_, rollaxis, sum __all__ = ['logsumexp', 'factorial','factorial2','factorialk','comb', 'central_diff_weights', 'derivative', 'pade', 'lena'] # XXX: the factorial functions could move to scipy.special, and the others # to numpy perhaps? def logsumexp(a, axis=None, b=None): """Compute the log of the sum of exponentials of input elements. Parameters ---------- a : array_like Input array. axis : int, optional Axis over which the sum is taken. By default `axis` is None, and all elements are summed. b : array-like, optional Scaling factor for exp(`a`) must be of the same shape as `a` or broadcastable to `a`. Returns ------- res : ndarray The result, ``np.log(np.sum(np.exp(a)))`` calculated in a numerically more stable way. If `b` is given then ``np.log(np.sum(b*np.exp(a)))`` is returned. See Also -------- numpy.logaddexp, numpy.logaddexp2 Notes ----- Numpy has a logaddexp function which is very similar to `logsumexp`, but only handles two arguments. `logaddexp.reduce` is similar to this function, but may be less stable. Examples -------- >>> from scipy.misc import logsumexp >>> a = np.arange(10) >>> np.log(np.sum(np.exp(a))) 9.4586297444267107 >>> logsumexp(a) 9.4586297444267107 With weights >>> a = np.arange(10) >>> b = np.arange(10, 0, -1) >>> logsumexp(a, b=b) 9.9170178533034665 >>> np.log(np.sum(b*np.exp(a))) 9.9170178533034647 """ a = asarray(a) if axis is None: a = a.ravel() else: a = rollaxis(a, axis) a_max = a.max(axis=0) if b is not None: b = asarray(b) if axis is None: b = b.ravel() else: b = rollaxis(b, axis) out = log(sum(b * exp(a - a_max), axis=0)) else: out = log(sum(exp(a - a_max), axis=0)) out += a_max return out def factorial(n,exact=0): """ The factorial function, n! = special.gamma(n+1). If exact is 0, then floating point precision is used, otherwise exact long integer is computed. - Array argument accepted only for exact=0 case. - If n<0, the return value is 0. Parameters ---------- n : int or array_like of ints Calculate ``n!``. Arrays are only supported with `exact` set to False. If ``n < 0``, the return value is 0. exact : bool, optional The result can be approximated rapidly using the gamma-formula above. If `exact` is set to True, calculate the answer exactly using integer arithmetic. Default is False. Returns ------- nf : float or int Factorial of `n`, as an integer or a float depending on `exact`. Examples -------- >>> arr = np.array([3,4,5]) >>> sc.factorial(arr, exact=False) array([ 6., 24., 120.]) >>> sc.factorial(5, exact=True) 120L """ if exact: if n < 0: return 0L val = 1L for k in xrange(1,n+1): val *= k return val else: from scipy import special n = asarray(n) sv = special.errprint(0) vals = special.gamma(n+1) sv = special.errprint(sv) return where(n>=0,vals,0) def factorial2(n, exact=False): """ Double factorial. This is the factorial with every second value skipped, i.e., ``7!! = 7 * 5 * 3 * 1``. It can be approximated numerically as:: n!! = special.gamma(n/2+1)*2**((m+1)/2)/sqrt(pi) n odd = 2**(n/2) * (n/2)! n even Parameters ---------- n : int or array_like Calculate ``n!!``. Arrays are only supported with `exact` set to False. If ``n < 0``, the return value is 0. exact : bool, optional The result can be approximated rapidly using the gamma-formula above (default). If `exact` is set to True, calculate the answer exactly using integer arithmetic. Returns ------- nff : float or int Double factorial of `n`, as an int or a float depending on `exact`. Examples -------- >>> factorial2(7, exact=False) array(105.00000000000001) >>> factorial2(7, exact=True) 105L """ if exact: if n < -1: return 0L if n <= 0: return 1L val = 1L for k in xrange(n,0,-2): val *= k return val else: from scipy import special n = asarray(n) vals = zeros(n.shape,'d') cond1 = (n % 2) & (n >= -1) cond2 = (1-(n % 2)) & (n >= -1) oddn = extract(cond1,n) evenn = extract(cond2,n) nd2o = oddn / 2.0 nd2e = evenn / 2.0 place(vals,cond1,special.gamma(nd2o+1)/sqrt(pi)*pow(2.0,nd2o+0.5)) place(vals,cond2,special.gamma(nd2e+1) * pow(2.0,nd2e)) return vals def factorialk(n,k,exact=1): """ n(!!...!) = multifactorial of order k k times Parameters ---------- n : int, array_like Calculate multifactorial. Arrays are only supported with exact set to False. If n < 0, the return value is 0. exact : bool, optional If exact is set to True, calculate the answer exactly using integer arithmetic. Returns ------- val : int Multi factorial of n. Raises ------ NotImplementedError Raises when exact is False Examples -------- >>> sc.factorialk(5, 1, exact=True) 120L >>> sc.factorialk(5, 3, exact=True) 10L """ if exact: if n < 1-k: return 0L if n<=0: return 1L val = 1L for j in xrange(n,0,-k): val = val*j return val else: raise NotImplementedError def comb(N,k,exact=0): """ The number of combinations of N things taken k at a time. This is often expressed as "N choose k". Parameters ---------- N : int, array Number of things. k : int, array Number of elements taken. exact : int, optional If exact is 0, then floating point precision is used, otherwise exact long integer is computed. Returns ------- val : int, array The total number of combinations. Notes ----- - Array arguments accepted only for exact=0 case. - If k > N, N < 0, or k < 0, then a 0 is returned. Examples -------- >>> k = np.array([3, 4]) >>> n = np.array([10, 10]) >>> sc.comb(n, k, exact=False) array([ 120., 210.]) >>> sc.comb(10, 3, exact=True) 120L """ if exact: if (k > N) or (N < 0) or (k < 0): return 0L val = 1L for j in xrange(min(k, N-k)): val = (val*(N-j))//(j+1) return val else: from scipy import special k,N = asarray(k), asarray(N) lgam = special.gammaln cond = (k <= N) & (N >= 0) & (k >= 0) sv = special.errprint(0) vals = exp(lgam(N+1) - lgam(N-k+1) - lgam(k+1)) sv = special.errprint(sv) return where(cond, vals, 0.0) def central_diff_weights(Np, ndiv=1): """ Return weights for an Np-point central derivative of order ndiv assuming equally-spaced function points. If weights are in the vector w, then derivative is w[0] * f(x-ho*dx) + ... + w[-1] * f(x+h0*dx) Notes ----- Can be inaccurate for large number of points. """ if Np < ndiv + 1: raise ValueError("Number of points must be at least the derivative order + 1.") if Np % 2 == 0: raise ValueError("The number of points must be odd.") from scipy import linalg ho = Np >> 1 x = arange(-ho,ho+1.0) x = x[:,newaxis] X = x**0.0 for k in range(1,Np): X = hstack([X,x**k]) w = product(arange(1,ndiv+1),axis=0)*linalg.inv(X)[ndiv] return w def derivative(func, x0, dx=1.0, n=1, args=(), order=3): """ Find the n-th derivative of a function at point x0. Given a function, use a central difference formula with spacing `dx` to compute the n-th derivative at `x0`. Parameters ---------- func : function Input function. x0 : float The point at which nth derivative is found. dx : int, optional Spacing. n : int, optional Order of the derivative. Default is 1. args : tuple, optional Arguments order : int, optional Number of points to use, must be odd. Notes ----- Decreasing the step size too small can result in round-off error. Examples -------- >>> def x2(x): ... return x*x ... >>> derivative(x2, 2) 4.0 """ if order < n + 1: raise ValueError("'order' (the number of points used to compute the derivative), " "must be at least the derivative order 'n' + 1.") if order % 2 == 0: raise ValueError("'order' (the number of points used to compute the derivative) " "must be odd.") # pre-computed for n=1 and 2 and low-order for speed. if n==1: if order == 3: weights = array([-1,0,1])/2.0 elif order == 5: weights = array([1,-8,0,8,-1])/12.0 elif order == 7: weights = array([-1,9,-45,0,45,-9,1])/60.0 elif order == 9: weights = array([3,-32,168,-672,0,672,-168,32,-3])/840.0 else: weights = central_diff_weights(order,1) elif n==2: if order == 3: weights = array([1,-2.0,1]) elif order == 5: weights = array([-1,16,-30,16,-1])/12.0 elif order == 7: weights = array([2,-27,270,-490,270,-27,2])/180.0 elif order == 9: weights = array([-9,128,-1008,8064,-14350,8064,-1008,128,-9])/5040.0 else: weights = central_diff_weights(order,2) else: weights = central_diff_weights(order, n) val = 0.0 ho = order >> 1 for k in range(order): val += weights[k]*func(x0+(k-ho)*dx,*args) return val / product((dx,)*n,axis=0) def pade(an, m): """Given Taylor series coefficients in an, return a Pade approximation to the function as the ratio of two polynomials p / q where the order of q is m. """ from scipy import linalg an = asarray(an) N = len(an) - 1 n = N - m if n < 0: raise ValueError("Order of q <m> must be smaller than len(an)-1.") Akj = eye(N+1, n+1) Bkj = zeros((N+1, m), 'd') for row in range(1, m+1): Bkj[row,:row] = -(an[:row])[::-1] for row in range(m+1, N+1): Bkj[row,:] = -(an[row-m:row])[::-1] C = hstack((Akj, Bkj)) pq = linalg.solve(C, an) p = pq[:n+1] q = r_[1.0, pq[n+1:]] return poly1d(p[::-1]), poly1d(q[::-1]) def lena(): """ Get classic image processing example image, Lena, at 8-bit grayscale bit-depth, 512 x 512 size. Parameters ---------- None Returns ------- lena : ndarray Lena image Examples -------- >>> import scipy.misc >>> lena = scipy.misc.lena() >>> lena.shape (512, 512) >>> lena.max() 245 >>> lena.dtype dtype('int32') >>> import matplotlib.pyplot as plt >>> plt.gray() >>> plt.imshow(lena) >>> plt.show() """ import cPickle, os fname = os.path.join(os.path.dirname(__file__),'lena.dat') f = open(fname,'rb') lena = array(cPickle.load(f)) f.close() return lena
import secd import pytest @pytest.fixture(scope="session") def machine(): return secd.SECD() def test_perform_add(machine): machine.loadProgram([],[1,1]) machine.perform_add() assert machine.s.pop() == 2 def test_perform_sub(machine): machine.loadProgram([],[8,10]) machine.perform_sub() assert machine.s.pop() == 2 def test_perform_mul(machine): machine.loadProgram([],[10,8]) machine.perform_mul() assert machine.s.pop() == 80 def test_perform_div(machine): machine.loadProgram([],[100,500]) machine.perform_div() assert machine.s.pop() == 5 def test_perform_nil(machine): machine.perform_nil() assert machine.s.pop() == [] def test_perform_ldc(machine): machine.loadProgram([10],[1,2]) machine.perform_ldc() assert machine.s.pop() == 10 def test_perform_ldf(machine): fn = [1,2,3] machine.loadProgram([fn],[0]) machine.e = [10,100] machine.perform_ldf() loaded = machine.s.pop() assert loaded.pop() == fn assert loaded.pop() == machine.e def test_perform_ap(machine): machine.s = [500, [4, 3], [[[99,999]],['rtn','add',[1,1],'ld',[2,1],'ld']]] machine.e = [[99,999]] machine.c = ['lol'] machine.d = [7] machine.perform_ap() assert machine.s == [] assert machine.e == [[99,999],[4,3]] assert machine.c == ['rtn','add',[1,1],'ld',[2,1],'ld'] assert machine.d == [7,['lol'],[[99,999]],[500]] def test_perform_rtn(machine): machine.s = [3,2,1,0] machine.c = [10000] machine.e = [20000] machine.d = [7,[6],[5],[4]] machine.perform_rtn() assert machine.s == [4,0] assert machine.e == [5] assert machine.c == [6] assert machine.d == [7] def test_perform_join(machine): machine.s = [0] machine.e = [1] machine.c = [5000] machine.d = [3,2] machine.perform_join() assert machine.s == [0] assert machine.e == [1] assert machine.c == [2] assert machine.d == [3] def test_perform_sel(machine): # case: True machine.s = [2,1,True] machine.e = [3] machine.c = [7,6,5,4] machine.d = [9,8] machine.perform_sel() assert machine.s == [2,1] assert machine.e == [3] assert machine.c == [4] assert machine.d == [9,8,[7,6]] # case: False machine.s = [2,1,False] machine.e = [3] machine.c = [7,6,5,4] machine.d = [9,8] machine.perform_sel() assert machine.s == [2,1] assert machine.e == [3] assert machine.c == [5] assert machine.d == [9,8,[7,6]] def test_perform_ld(machine): machine.s = [] machine.e = [[3,2,1],[[2,2],4],[8]] machine.c = [[2,3],[1,3],[2,2],[1,2],[1,1]] machine.d = [5000] machine.perform_ld() assert machine.s == [8] assert machine.e == [[3,2,1],[[2,2],4],[8]] assert machine.c == [[2,3],[1,3],[2,2],[1,2]] assert machine.d == [5000] machine.perform_ld() assert machine.s == [8,4] machine.perform_ld() assert machine.s == [8,4,[2,2]] machine.perform_ld() assert machine.s == [8,4,[2,2],1] machine.perform_ld() assert machine.s == [8,4,[2,2],1,2] def test_perform_dum(machine): machine.s = [3000] machine.e = [3,2,1] machine.c = [4000] machine.d = [5000] machine.perform_dum() assert machine.s == [3000] assert machine.e == [3,2,1,[]] assert machine.c == [4000] assert machine.d == [5000] def test_perform_rap(machine): machine.s = [4,3,2,[[1,0,[]],'f']] machine.e = [6,5,[]] machine.c = [8,7] machine.d = [10,9] machine.perform_rap() assert machine.s == [] assert machine.e == [1,0,2] assert machine.c == 'f' assert machine.d == [10,9,[8,7],[6,5],[4,3]] def test_perform_atom(machine): # true case machine.s = [3,2,1,[]] machine.e = [] machine.c = [] machine.d = [] machine.perform_atom() assert machine.s == [3,2,1,[],True] assert machine.e == [] assert machine.c == [] assert machine.d == [] # false case machine.s = [3,2,1] machine.e = [] machine.c = [] machine.d = [] machine.perform_atom() assert machine.s == [3,2,1,False] assert machine.e == [] assert machine.c == [] assert machine.d == [] def test_perform_cons(machine): machine.s = [3,2,1,[],0] machine.e = [] machine.c = [] machine.d = [] machine.perform_cons() assert machine.s == [3,2,1,[0]] assert machine.e == [] assert machine.c == [] assert machine.d == [] def test_perform_car(machine): machine.s = [100,[3,2,1]] machine.e = [] machine.c = [] machine.d = [] machine.perform_car() assert machine.s == [100,1] assert machine.e == [] assert machine.c == [] assert machine.d == [] def test_perform_cdr(machine): machine.s = [100,[3,2,1]] machine.e = [] machine.c = [] machine.d = [] machine.perform_cdr() assert machine.s == [100,[3,2]] assert machine.e == [] assert machine.c == [] assert machine.d == [] def test_perform_zero(machine): machine.s = [100,0] machine.e = [] machine.c = [] machine.d = [] machine.perform_zero() assert machine.s == [100,0,True] assert machine.e == [] assert machine.c == [] assert machine.d == [] machine.s = [100,99] machine.perform_zero() assert machine.s == [100,99,False] def test_perform_eq(machine): machine.s = [100,100] machine.e = [] machine.c = [] machine.d = [] machine.perform_eq() assert machine.s == [100,100,True] assert machine.e == [] assert machine.c == [] assert machine.d == [] machine.s = [100,99] machine.perform_eq() assert machine.s == [100,99,False] def test_perform_leq(machine): machine.s = [100,100] machine.e = [] machine.c = [] machine.d = [] machine.perform_leq() assert machine.s == [100,100,True] assert machine.e == [] assert machine.c == [] assert machine.d == [] machine.s = [100,101] machine.perform_leq() assert machine.s == [100,101,False] machine.s = [100,99] machine.perform_leq() assert machine.s == [100,99,True] def test_execute(machine): machine.loadProgram(['ADD'],[1,1]) machine.execute() assert machine.s == [2] machine.loadProgram([0,'ldc'],[2,1]) machine.execute() assert machine.s == [2,1,0] machine.loadProgram([0,'eq'],[2,1]) machine.execute() assert machine.s == [2,1,False]
import copy from direct.actor import Actor from direct.distributed.ClockDelta import * from direct.fsm import ClassicFSM, State from direct.fsm import State from direct.interval.IntervalGlobal import * from pandac.PandaModules import * import random from otp.avatar import Avatar from toontown.chat.ChatGlobals import * from toontown.nametag.NametagGroup import * from otp.otpbase import OTPGlobals from toontown.distributed import DelayDelete from toontown.effects import Bubbles from toontown.hood import ZoneUtil from toontown.safezone.OZPlayground import OZPlayground from toontown.safezone.SafeZoneLoader import SafeZoneLoader from toontown.toon import Toon, ToonDNA class OZSafeZoneLoader(SafeZoneLoader): def __init__(self, hood, parentFSM, doneEvent): SafeZoneLoader.__init__(self, hood, parentFSM, doneEvent) self.musicFile = 'phase_6/audio/bgm/OZ_SZ.ogg' self.activityMusicFile = 'phase_6/audio/bgm/GS_KartShop.ogg' self.dnaFile = 'phase_6/dna/outdoor_zone_sz.pdna' self.safeZoneStorageDNAFile = 'phase_6/dna/storage_OZ_sz.pdna' self.__toonTracks = {} del self.fsm self.fsm = ClassicFSM.ClassicFSM('SafeZoneLoader', [State.State('start', self.enterStart, self.exitStart, ['quietZone', 'playground', 'toonInterior']), State.State('playground', self.enterPlayground, self.exitPlayground, ['quietZone', 'golfcourse']), State.State('toonInterior', self.enterToonInterior, self.exitToonInterior, ['quietZone']), State.State('quietZone', self.enterQuietZone, self.exitQuietZone, ['playground', 'toonInterior', 'golfcourse']), State.State('golfcourse', self.enterGolfCourse, self.exitGolfCourse, ['quietZone', 'playground']), State.State('final', self.enterFinal, self.exitFinal, ['start'])], 'start', 'final') def load(self): self.done = 0 self.geyserTrack = None SafeZoneLoader.load(self) self.birdSound = map(base.loadSfx, ['phase_4/audio/sfx/SZ_TC_bird1.ogg', 'phase_4/audio/sfx/SZ_TC_bird2.ogg', 'phase_4/audio/sfx/SZ_TC_bird3.ogg']) self.underwaterSound = base.loadSfx('phase_4/audio/sfx/AV_ambient_water.ogg') self.swimSound = base.loadSfx('phase_4/audio/sfx/AV_swim_single_stroke.ogg') self.submergeSound = base.loadSfx('phase_5.5/audio/sfx/AV_jump_in_water.ogg') geyserPlacer = self.geom.find('**/geyser*') waterfallPlacer = self.geom.find('**/waterfall*') binMgr = CullBinManager.getGlobalPtr() binMgr.addBin('water', CullBinManager.BTFixed, 29) binMgr = CullBinManager.getGlobalPtr() water = self.geom.find('**/water1*') water.setTransparency(1) water.setColorScale(1, 1, 1, 1) water.setBin('water', 51, 1) pool = self.geom.find('**/pPlane5*') pool.setTransparency(1) pool.setColorScale(1.0, 1.0, 1.0, 1.0) pool.setBin('water', 50, 1) self.geyserModel = loader.loadModel('phase_6/models/golf/golf_geyser_model') self.geyserSound = loader.loadSfx('phase_6/audio/sfx/OZ_Geyser.ogg') self.geyserSoundInterval = SoundInterval(self.geyserSound, node=geyserPlacer, listenerNode=base.camera, seamlessLoop=False, volume=1.0, cutOff=120) self.geyserSoundNoToon = loader.loadSfx('phase_6/audio/sfx/OZ_Geyser_No_Toon.ogg') self.geyserSoundNoToonInterval = SoundInterval(self.geyserSoundNoToon, node=geyserPlacer, listenerNode=base.camera, seamlessLoop=False, volume=1.0, cutOff=120) if self.geyserModel: self.geyserActor = Actor.Actor(self.geyserModel) self.geyserActor.loadAnims({'idle': 'phase_6/models/golf/golf_geyser'}) self.geyserActor.reparentTo(render) self.geyserActor.setPlayRate(8.6, 'idle') self.geyserActor.loop('idle') self.geyserActor.setDepthWrite(0) self.geyserActor.setTwoSided(True, 11) self.geyserActor.setColorScale(1.0, 1.0, 1.0, 1.0) self.geyserActor.setBin('fixed', 0) mesh = self.geyserActor.find('**/mesh_tide1') joint = self.geyserActor.find('**/uvj_WakeWhiteTide1') mesh.setTexProjector(mesh.findTextureStage('default'), joint, self.geyserActor) self.geyserActor.setPos(geyserPlacer.getPos()) self.geyserActor.setZ(geyserPlacer.getZ() - 100.0) self.geyserPos = geyserPlacer.getPos() self.geyserPlacer = geyserPlacer self.startGeyser() base.sfxPlayer.setCutoffDistance(160) self.geyserPoolSfx = loader.loadSfx('phase_6/audio/sfx/OZ_Geyser_BuildUp_Loop.ogg') self.geyserPoolSoundInterval = SoundInterval(self.geyserPoolSfx, node=self.geyserPlacer, listenerNode=base.camera, seamlessLoop=True, volume=1.0, cutOff=120) self.geyserPoolSoundInterval.loop() self.bubbles = Bubbles.Bubbles(self.geyserPlacer, render) self.bubbles.renderParent.setDepthWrite(0) self.bubbles.start() self.collBase = render.attachNewNode('collisionBase') self.geyserCollSphere = CollisionSphere(0, 0, 0, 7.5) self.geyserCollSphere.setTangible(1) self.geyserCollNode = CollisionNode('barrelSphere') self.geyserCollNode.setIntoCollideMask(OTPGlobals.WallBitmask) self.geyserCollNode.addSolid(self.geyserCollSphere) self.geyserNodePath = self.collBase.attachNewNode(self.geyserCollNode) self.geyserNodePath.setPos(self.geyserPos[0], self.geyserPos[1], self.geyserPos[2] - 100.0) self.waterfallModel = loader.loadModel('phase_6/models/golf/golf_waterfall_model') if self.waterfallModel: self.waterfallActor = Actor.Actor(self.waterfallModel) self.waterfallActor.loadAnims({'idle': 'phase_6/models/golf/golf_waterfall'}) self.waterfallActor.reparentTo(render) self.waterfallActor.setPlayRate(3.5, 'idle') self.waterfallActor.loop('idle') mesh = self.waterfallActor.find('**/mesh_tide1') joint = self.waterfallActor.find('**/uvj_WakeWhiteTide1') mesh.setTexProjector(mesh.findTextureStage('default'), joint, self.waterfallActor) self.waterfallActor.setPos(waterfallPlacer.getPos()) self.accept('clientLogout', self._handleLogout) self.constructionSign = loader.loadModel('phase_4/models/props/construction_sign.bam') self.constructionSign.reparentTo(render) self.constructionSign.setPosHpr(-47.941, -138.724, 0.122, 181, 0, 0) def exit(self): self.clearToonTracks() SafeZoneLoader.exit(self) self.ignore('clientLogout') def startGeyser(self, task = None): if hasattr(base.cr, 'DTimer') and base.cr.DTimer: self.geyserCycleTime = 20.0 useTime = base.cr.DTimer.getTime() timeToNextGeyser = 20.0 - useTime % 20.0 taskMgr.doMethodLater(timeToNextGeyser, self.doGeyser, 'geyser Task') else: taskMgr.doMethodLater(5.0, self.startGeyser, 'start geyser Task') def doGeyser(self, task = None): if not self.done: self.setGeyserAnim() useTime = base.cr.DTimer.getTime() timeToNextGeyser = 20.0 - useTime % 20.0 taskMgr.doMethodLater(timeToNextGeyser, self.doGeyser, 'geyser Task') return task.done def restoreLocal(self, task = None): place = base.cr.playGame.getPlace() if place: place.fsm.request('walk') base.localAvatar.setTeleportAvailable(1) base.localAvatar.collisionsOn() base.localAvatar.dropShadow.show() def restoreRemote(self, remoteAv, task = None): if remoteAv in Avatar.Avatar.ActiveAvatars: remoteAv.startSmooth() remoteAv.dropShadow.show() def setGeyserAnim(self, task = None): if self.done: return maxSize = 0.4 * random.random() + 0.75 time = 1.0 self.geyserTrack = Sequence() upPos = Vec3(self.geyserPos[0], self.geyserPos[1], self.geyserPos[2]) downPos = Vec3(self.geyserPos[0], self.geyserPos[1], self.geyserPos[2] - 8.0) avList = copy.copy(Avatar.Avatar.ActiveAvatars) avList.append(base.localAvatar) playSound = 0 for av in avList: distance = self.geyserPlacer.getDistance(av) if distance < 7.0: place = base.cr.playGame.getPlace() local = 0 avPos = av.getPos() upToon = Vec3(avPos[0], avPos[1], maxSize * self.geyserPos[2] + 40.0) midToon = Vec3(avPos[0], avPos[1], maxSize * self.geyserPos[2] + 30.0) downToon = Vec3(avPos[0], avPos[1], self.geyserPos[2]) returnPoints = [(7, 7), (8, 0), (-8, 3), (-7, 7), (3, -7), (0, 8), (-10, 0), (8, -3), (5, 8), (-8, 5), (-1, 7)] pick = int((float(av.doId) - 11.0) / 13.0 % len(returnPoints)) returnChoice = returnPoints[pick] toonReturn = Vec3(self.geyserPos[0] + returnChoice[0], self.geyserPos[1] + returnChoice[1], self.geyserPos[2] - 1.5) topTrack = Sequence() av.dropShadow.hide() playSound = 1 if av == base.localAvatar: base.cr.playGame.getPlace().setState('fishing') base.localAvatar.setTeleportAvailable(0) base.localAvatar.collisionsOff() local = 1 else: topTrack.delayDeletes = [DelayDelete.DelayDelete(av, 'OZSafeZoneLoader.setGeyserAnim')] av.stopSmooth() animTrack = Parallel() toonTrack = Sequence() toonTrack.append(Wait(0.5)) animTrack.append(ActorInterval(av, 'jump-idle', loop=1, endTime=11.5 * time)) animTrack.append(ActorInterval(av, 'neutral', loop=0, endTime=0.25 * time)) holder = render.attachNewNode('toon hold') base.holder = holder toonPos = av.getPos(render) toonHpr = av.getHpr(render) print 'av Pos %s' % av.getPos() base.toonPos = toonPos holder.setPos(toonPos) av.reparentTo(holder) av.setPos(0, 0, 0) lookAt = 180 toonH = (lookAt + toonHpr[0]) % 360 newHpr = Vec3(toonH, toonHpr[1], toonHpr[2]) if toonH < 180: lookIn = Vec3(0 + lookAt, -30, 0) else: lookIn = Vec3(360 + lookAt, -30, 0) print 'Camera Hprs toon %s; lookIn %s; final %s' % (newHpr, lookIn, lookIn - newHpr) if local == 1: camPosOriginal = camera.getPos() camHprOriginal = camera.getHpr() camParentOriginal = camera.getParent() cameraPivot = holder.attachNewNode('camera pivot') chooseHeading = random.choice([-10.0, 15.0, 40.0]) cameraPivot.setHpr(chooseHeading, -20.0, 0.0) cameraArm = cameraPivot.attachNewNode('camera arm') cameraArm.setPos(0.0, -23.0, 3.0) camPosStart = Point3(0.0, 0.0, 0.0) camHprStart = Vec3(0.0, 0.0, 0.0) self.changeCamera(cameraArm, camPosStart, camHprStart) cameraTrack = Sequence() cameraTrack.append(Wait(11.0 * time)) cameraTrack.append(Func(self.changeCamera, camParentOriginal, camPosOriginal, camHprOriginal)) cameraTrack.start() moveTrack = Sequence() moveTrack.append(Wait(0.5)) moveTrack.append(LerpPosInterval(holder, 3.0 * time, pos=upToon, startPos=downToon, blendType='easeOut')) moveTrack.append(LerpPosInterval(holder, 2.0 * time, pos=midToon, startPos=upToon, blendType='easeInOut')) moveTrack.append(LerpPosInterval(holder, 1.0 * time, pos=upToon, startPos=midToon, blendType='easeInOut')) moveTrack.append(LerpPosInterval(holder, 2.0 * time, pos=midToon, startPos=upToon, blendType='easeInOut')) moveTrack.append(LerpPosInterval(holder, 1.0 * time, pos=upToon, startPos=midToon, blendType='easeInOut')) moveTrack.append(LerpPosInterval(holder, 2.5 * time, pos=toonReturn, startPos=upToon, blendType='easeIn')) animTrack.append(moveTrack) animTrack.append(toonTrack) topTrack.append(animTrack) topTrack.append(Func(av.setPos, toonReturn)) topTrack.append(Func(av.reparentTo, render)) topTrack.append(Func(holder.remove)) if local == 1: topTrack.append(Func(self.restoreLocal)) else: topTrack.append(Func(self.restoreRemote, av)) topTrack.append(Func(self.clearToonTrack, av.doId)) self.storeToonTrack(av.doId, topTrack) topTrack.start() self.geyserTrack.append(Func(self.doPrint, 'geyser start')) self.geyserTrack.append(Func(self.geyserNodePath.setPos, self.geyserPos[0], self.geyserPos[1], self.geyserPos[2])) self.geyserTrack.append(Parallel(LerpScaleInterval(self.geyserActor, 2.0 * time, 0.75, 0.01), LerpPosInterval(self.geyserActor, 2.0 * time, pos=downPos, startPos=downPos))) self.geyserTrack.append(Parallel(LerpScaleInterval(self.geyserActor, time, maxSize, 0.75), LerpPosInterval(self.geyserActor, time, pos=upPos, startPos=downPos))) self.geyserTrack.append(Parallel(LerpScaleInterval(self.geyserActor, 2.0 * time, 0.75, maxSize), LerpPosInterval(self.geyserActor, 2.0 * time, pos=downPos, startPos=upPos))) self.geyserTrack.append(Parallel(LerpScaleInterval(self.geyserActor, time, maxSize, 0.75), LerpPosInterval(self.geyserActor, time, pos=upPos, startPos=downPos))) self.geyserTrack.append(Parallel(LerpScaleInterval(self.geyserActor, 2.0 * time, 0.75, maxSize), LerpPosInterval(self.geyserActor, 2.0 * time, pos=downPos, startPos=upPos))) self.geyserTrack.append(Parallel(LerpScaleInterval(self.geyserActor, time, maxSize, 0.75), LerpPosInterval(self.geyserActor, time, pos=upPos, startPos=downPos))) self.geyserTrack.append(Parallel(LerpScaleInterval(self.geyserActor, 4.0 * time, 0.01, maxSize), LerpPosInterval(self.geyserActor, 4.0 * time, pos=downPos, startPos=upPos))) self.geyserTrack.append(Func(self.geyserNodePath.setPos, self.geyserPos[0], self.geyserPos[1], self.geyserPos[2] - 100.0)) self.geyserTrack.append(Func(self.doPrint, 'geyser end')) self.geyserTrack.start() if playSound: self.geyserSoundInterval.start() else: self.geyserSoundNoToonInterval.start() def changeCamera(self, newParent, newPos, newHpr): camera.reparentTo(newParent) camera.setPosHpr(newPos, newHpr) def doPrint(self, thing): return 0 print thing def unload(self): del self.birdSound SafeZoneLoader.unload(self) self.done = 1 self.collBase.removeNode() if self.geyserTrack: self.geyserTrack.finish() self.geyserTrack = None self.geyserActor.cleanup() self.geyserModel.removeNode() self.waterfallActor.cleanup() self.waterfallModel.removeNode() self.bubbles.destroy() del self.bubbles self.geyserPoolSoundInterval.finish() self.geyserPoolSfx.stop() self.geyserPoolSfx = None self.geyserPoolSoundInterval = None self.geyserSoundInterval.finish() self.geyserSound.stop() self.geyserSoundInterval = None self.geyserSound = None self.geyserSoundNoToonInterval.finish() self.geyserSoundNoToon.stop() self.geyserSoundNoToonInterval = None self.geyserSoundNoToon = None if self.constructionSign is not None: self.constructionSign.removeNode() self.constructionSign = None def enterPlayground(self, requestStatus): self.playgroundClass = OZPlayground SafeZoneLoader.enterPlayground(self, requestStatus) def exitPlayground(self): taskMgr.remove('titleText') self.hood.hideTitleText() SafeZoneLoader.exitPlayground(self) self.playgroundClass = None return def handlePlaygroundDone(self): status = self.place.doneStatus self.doneStatus = status messenger.send(self.doneEvent) def enteringARace(self, status): if not status['where'] == 'golfcourse': return 0 if ZoneUtil.isDynamicZone(status['zoneId']): return status['hoodId'] == self.hood.hoodId else: return ZoneUtil.getHoodId(status['zoneId']) == self.hood.hoodId def enteringAGolfCourse(self, status): if not status['where'] == 'golfcourse': return 0 if ZoneUtil.isDynamicZone(status['zoneId']): return status['hoodId'] == self.hood.hoodId else: return ZoneUtil.getHoodId(status['zoneId']) == self.hood.hoodId def enterGolfCourse(self, requestStatus): if 'curseId' in requestStatus: self.golfCourseId = requestStatus['courseId'] else: self.golfCourseId = 0 self.accept('raceOver', self.handleRaceOver) self.accept('leavingGolf', self.handleLeftGolf) base.transitions.irisOut(t=0.2) def exitGolfCourse(self): del self.golfCourseId def handleRaceOver(self): print 'you done!!' def handleLeftGolf(self): req = {'loader': 'safeZoneLoader', 'where': 'playground', 'how': 'teleportIn', 'zoneId': 6000, 'hoodId': 6000, 'shardId': None} self.fsm.request('quietZone', [req]) return def _handleLogout(self): self.clearToonTracks() def storeToonTrack(self, avId, track): self.clearToonTrack(avId) self.__toonTracks[avId] = track def clearToonTrack(self, avId): oldTrack = self.__toonTracks.get(avId) if oldTrack: oldTrack.pause() DelayDelete.cleanupDelayDeletes(oldTrack) del self.__toonTracks[avId] def clearToonTracks(self): keyList = [] for key in self.__toonTracks: keyList.append(key) for key in keyList: if key in self.__toonTracks: self.clearToonTrack(key)
# # Created by: Pearu Peterson, March 2002 # """ Test functions for scipy.linalg.matfuncs module """ from __future__ import division, print_function, absolute_import import math import numpy as np from numpy import array, eye, exp, random from numpy.linalg import matrix_power from numpy.testing import ( assert_allclose, assert_, assert_array_almost_equal, assert_equal, assert_array_almost_equal_nulp) from scipy._lib._numpy_compat import suppress_warnings from scipy.sparse import csc_matrix, SparseEfficiencyWarning from scipy.sparse.construct import eye as speye from scipy.sparse.linalg.matfuncs import (expm, _expm, ProductOperator, MatrixPowerOperator, _onenorm_matrix_power_nnm) from scipy.linalg import logm from scipy.special import factorial import scipy.sparse import scipy.sparse.linalg def _burkardt_13_power(n, p): """ A helper function for testing matrix functions. Parameters ---------- n : integer greater than 1 Order of the square matrix to be returned. p : non-negative integer Power of the matrix. Returns ------- out : ndarray representing a square matrix A Forsythe matrix of order n, raised to the power p. """ # Input validation. if n != int(n) or n < 2: raise ValueError('n must be an integer greater than 1') n = int(n) if p != int(p) or p < 0: raise ValueError('p must be a non-negative integer') p = int(p) # Construct the matrix explicitly. a, b = divmod(p, n) large = np.power(10.0, -n*a) small = large * np.power(10.0, -n) return np.diag([large]*(n-b), b) + np.diag([small]*b, b-n) def test_onenorm_matrix_power_nnm(): np.random.seed(1234) for n in range(1, 5): for p in range(5): M = np.random.random((n, n)) Mp = np.linalg.matrix_power(M, p) observed = _onenorm_matrix_power_nnm(M, p) expected = np.linalg.norm(Mp, 1) assert_allclose(observed, expected) class TestExpM(object): def test_zero_ndarray(self): a = array([[0.,0],[0,0]]) assert_array_almost_equal(expm(a),[[1,0],[0,1]]) def test_zero_sparse(self): a = csc_matrix([[0.,0],[0,0]]) assert_array_almost_equal(expm(a).toarray(),[[1,0],[0,1]]) def test_zero_matrix(self): a = np.matrix([[0.,0],[0,0]]) assert_array_almost_equal(expm(a),[[1,0],[0,1]]) def test_misc_types(self): A = expm(np.array([[1]])) assert_allclose(expm(((1,),)), A) assert_allclose(expm([[1]]), A) assert_allclose(expm(np.matrix([[1]])), A) assert_allclose(expm(np.array([[1]])), A) assert_allclose(expm(csc_matrix([[1]])).A, A) B = expm(np.array([[1j]])) assert_allclose(expm(((1j,),)), B) assert_allclose(expm([[1j]]), B) assert_allclose(expm(np.matrix([[1j]])), B) assert_allclose(expm(csc_matrix([[1j]])).A, B) def test_bidiagonal_sparse(self): A = csc_matrix([ [1, 3, 0], [0, 1, 5], [0, 0, 2]], dtype=float) e1 = math.exp(1) e2 = math.exp(2) expected = np.array([ [e1, 3*e1, 15*(e2 - 2*e1)], [0, e1, 5*(e2 - e1)], [0, 0, e2]], dtype=float) observed = expm(A).toarray() assert_array_almost_equal(observed, expected) def test_padecases_dtype_float(self): for dtype in [np.float32, np.float64]: for scale in [1e-2, 1e-1, 5e-1, 1, 10]: A = scale * eye(3, dtype=dtype) observed = expm(A) expected = exp(scale) * eye(3, dtype=dtype) assert_array_almost_equal_nulp(observed, expected, nulp=100) def test_padecases_dtype_complex(self): for dtype in [np.complex64, np.complex128]: for scale in [1e-2, 1e-1, 5e-1, 1, 10]: A = scale * eye(3, dtype=dtype) observed = expm(A) expected = exp(scale) * eye(3, dtype=dtype) assert_array_almost_equal_nulp(observed, expected, nulp=100) def test_padecases_dtype_sparse_float(self): # float32 and complex64 lead to errors in spsolve/UMFpack dtype = np.float64 for scale in [1e-2, 1e-1, 5e-1, 1, 10]: a = scale * speye(3, 3, dtype=dtype, format='csc') e = exp(scale) * eye(3, dtype=dtype) with suppress_warnings() as sup: sup.filter(SparseEfficiencyWarning, "Changing the sparsity structure of a csc_matrix is expensive.") exact_onenorm = _expm(a, use_exact_onenorm=True).toarray() inexact_onenorm = _expm(a, use_exact_onenorm=False).toarray() assert_array_almost_equal_nulp(exact_onenorm, e, nulp=100) assert_array_almost_equal_nulp(inexact_onenorm, e, nulp=100) def test_padecases_dtype_sparse_complex(self): # float32 and complex64 lead to errors in spsolve/UMFpack dtype = np.complex128 for scale in [1e-2, 1e-1, 5e-1, 1, 10]: a = scale * speye(3, 3, dtype=dtype, format='csc') e = exp(scale) * eye(3, dtype=dtype) with suppress_warnings() as sup: sup.filter(SparseEfficiencyWarning, "Changing the sparsity structure of a csc_matrix is expensive.") assert_array_almost_equal_nulp(expm(a).toarray(), e, nulp=100) def test_logm_consistency(self): random.seed(1234) for dtype in [np.float64, np.complex128]: for n in range(1, 10): for scale in [1e-4, 1e-3, 1e-2, 1e-1, 1, 1e1, 1e2]: # make logm(A) be of a given scale A = (eye(n) + random.rand(n, n) * scale).astype(dtype) if np.iscomplexobj(A): A = A + 1j * random.rand(n, n) * scale assert_array_almost_equal(expm(logm(A)), A) def test_integer_matrix(self): Q = np.array([ [-3, 1, 1, 1], [1, -3, 1, 1], [1, 1, -3, 1], [1, 1, 1, -3]]) assert_allclose(expm(Q), expm(1.0 * Q)) def test_triangularity_perturbation(self): # Experiment (1) of # Awad H. Al-Mohy and Nicholas J. Higham (2012) # Improved Inverse Scaling and Squaring Algorithms # for the Matrix Logarithm. A = np.array([ [3.2346e-1, 3e4, 3e4, 3e4], [0, 3.0089e-1, 3e4, 3e4], [0, 0, 3.221e-1, 3e4], [0, 0, 0, 3.0744e-1]], dtype=float) A_logm = np.array([ [-1.12867982029050462e+00, 9.61418377142025565e+04, -4.52485573953179264e+09, 2.92496941103871812e+14], [0.00000000000000000e+00, -1.20101052953082288e+00, 9.63469687211303099e+04, -4.68104828911105442e+09], [0.00000000000000000e+00, 0.00000000000000000e+00, -1.13289322264498393e+00, 9.53249183094775653e+04], [0.00000000000000000e+00, 0.00000000000000000e+00, 0.00000000000000000e+00, -1.17947533272554850e+00]], dtype=float) assert_allclose(expm(A_logm), A, rtol=1e-4) # Perturb the upper triangular matrix by tiny amounts, # so that it becomes technically not upper triangular. random.seed(1234) tiny = 1e-17 A_logm_perturbed = A_logm.copy() A_logm_perturbed[1, 0] = tiny with suppress_warnings() as sup: sup.filter(RuntimeWarning, "scipy.linalg.solve\nIll-conditioned.*") A_expm_logm_perturbed = expm(A_logm_perturbed) rtol = 1e-4 atol = 100 * tiny assert_(not np.allclose(A_expm_logm_perturbed, A, rtol=rtol, atol=atol)) def test_burkardt_1(self): # This matrix is diagonal. # The calculation of the matrix exponential is simple. # # This is the first of a series of matrix exponential tests # collected by John Burkardt from the following sources. # # Alan Laub, # Review of "Linear System Theory" by Joao Hespanha, # SIAM Review, # Volume 52, Number 4, December 2010, pages 779--781. # # Cleve Moler and Charles Van Loan, # Nineteen Dubious Ways to Compute the Exponential of a Matrix, # Twenty-Five Years Later, # SIAM Review, # Volume 45, Number 1, March 2003, pages 3--49. # # Cleve Moler, # Cleve's Corner: A Balancing Act for the Matrix Exponential, # 23 July 2012. # # Robert Ward, # Numerical computation of the matrix exponential # with accuracy estimate, # SIAM Journal on Numerical Analysis, # Volume 14, Number 4, September 1977, pages 600--610. exp1 = np.exp(1) exp2 = np.exp(2) A = np.array([ [1, 0], [0, 2], ], dtype=float) desired = np.array([ [exp1, 0], [0, exp2], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_2(self): # This matrix is symmetric. # The calculation of the matrix exponential is straightforward. A = np.array([ [1, 3], [3, 2], ], dtype=float) desired = np.array([ [39.322809708033859, 46.166301438885753], [46.166301438885768, 54.711576854329110], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_3(self): # This example is due to Laub. # This matrix is ill-suited for the Taylor series approach. # As powers of A are computed, the entries blow up too quickly. exp1 = np.exp(1) exp39 = np.exp(39) A = np.array([ [0, 1], [-39, -40], ], dtype=float) desired = np.array([ [ 39/(38*exp1) - 1/(38*exp39), -np.expm1(-38) / (38*exp1)], [ 39*np.expm1(-38) / (38*exp1), -1/(38*exp1) + 39/(38*exp39)], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_4(self): # This example is due to Moler and Van Loan. # The example will cause problems for the series summation approach, # as well as for diagonal Pade approximations. A = np.array([ [-49, 24], [-64, 31], ], dtype=float) U = np.array([[3, 1], [4, 2]], dtype=float) V = np.array([[1, -1/2], [-2, 3/2]], dtype=float) w = np.array([-17, -1], dtype=float) desired = np.dot(U * np.exp(w), V) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_5(self): # This example is due to Moler and Van Loan. # This matrix is strictly upper triangular # All powers of A are zero beyond some (low) limit. # This example will cause problems for Pade approximations. A = np.array([ [0, 6, 0, 0], [0, 0, 6, 0], [0, 0, 0, 6], [0, 0, 0, 0], ], dtype=float) desired = np.array([ [1, 6, 18, 36], [0, 1, 6, 18], [0, 0, 1, 6], [0, 0, 0, 1], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_6(self): # This example is due to Moler and Van Loan. # This matrix does not have a complete set of eigenvectors. # That means the eigenvector approach will fail. exp1 = np.exp(1) A = np.array([ [1, 1], [0, 1], ], dtype=float) desired = np.array([ [exp1, exp1], [0, exp1], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_7(self): # This example is due to Moler and Van Loan. # This matrix is very close to example 5. # Mathematically, it has a complete set of eigenvectors. # Numerically, however, the calculation will be suspect. exp1 = np.exp(1) eps = np.spacing(1) A = np.array([ [1 + eps, 1], [0, 1 - eps], ], dtype=float) desired = np.array([ [exp1, exp1], [0, exp1], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_8(self): # This matrix was an example in Wikipedia. exp4 = np.exp(4) exp16 = np.exp(16) A = np.array([ [21, 17, 6], [-5, -1, -6], [4, 4, 16], ], dtype=float) desired = np.array([ [13*exp16 - exp4, 13*exp16 - 5*exp4, 2*exp16 - 2*exp4], [-9*exp16 + exp4, -9*exp16 + 5*exp4, -2*exp16 + 2*exp4], [16*exp16, 16*exp16, 4*exp16], ], dtype=float) * 0.25 actual = expm(A) assert_allclose(actual, desired) def test_burkardt_9(self): # This matrix is due to the NAG Library. # It is an example for function F01ECF. A = np.array([ [1, 2, 2, 2], [3, 1, 1, 2], [3, 2, 1, 2], [3, 3, 3, 1], ], dtype=float) desired = np.array([ [740.7038, 610.8500, 542.2743, 549.1753], [731.2510, 603.5524, 535.0884, 542.2743], [823.7630, 679.4257, 603.5524, 610.8500], [998.4355, 823.7630, 731.2510, 740.7038], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_10(self): # This is Ward's example #1. # It is defective and nonderogatory. A = np.array([ [4, 2, 0], [1, 4, 1], [1, 1, 4], ], dtype=float) assert_allclose(sorted(scipy.linalg.eigvals(A)), (3, 3, 6)) desired = np.array([ [147.8666224463699, 183.7651386463682, 71.79703239999647], [127.7810855231823, 183.7651386463682, 91.88256932318415], [127.7810855231824, 163.6796017231806, 111.9681062463718], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_11(self): # This is Ward's example #2. # It is a symmetric matrix. A = np.array([ [29.87942128909879, 0.7815750847907159, -2.289519314033932], [0.7815750847907159, 25.72656945571064, 8.680737820540137], [-2.289519314033932, 8.680737820540137, 34.39400925519054], ], dtype=float) assert_allclose(scipy.linalg.eigvalsh(A), (20, 30, 40)) desired = np.array([ [ 5.496313853692378E+15, -1.823188097200898E+16, -3.047577080858001E+16], [ -1.823188097200899E+16, 6.060522870222108E+16, 1.012918429302482E+17], [ -3.047577080858001E+16, 1.012918429302482E+17, 1.692944112408493E+17], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_12(self): # This is Ward's example #3. # Ward's algorithm has difficulty estimating the accuracy # of its results. A = np.array([ [-131, 19, 18], [-390, 56, 54], [-387, 57, 52], ], dtype=float) assert_allclose(sorted(scipy.linalg.eigvals(A)), (-20, -2, -1)) desired = np.array([ [-1.509644158793135, 0.3678794391096522, 0.1353352811751005], [-5.632570799891469, 1.471517758499875, 0.4060058435250609], [-4.934938326088363, 1.103638317328798, 0.5413411267617766], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) def test_burkardt_13(self): # This is Ward's example #4. # This is a version of the Forsythe matrix. # The eigenvector problem is badly conditioned. # Ward's algorithm has difficulty esimating the accuracy # of its results for this problem. # # Check the construction of one instance of this family of matrices. A4_actual = _burkardt_13_power(4, 1) A4_desired = [[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1], [1e-4, 0, 0, 0]] assert_allclose(A4_actual, A4_desired) # Check the expm for a few instances. for n in (2, 3, 4, 10): # Approximate expm using Taylor series. # This works well for this matrix family # because each matrix in the summation, # even before dividing by the factorial, # is entrywise positive with max entry 10**(-floor(p/n)*n). k = max(1, int(np.ceil(16/n))) desired = np.zeros((n, n), dtype=float) for p in range(n*k): Ap = _burkardt_13_power(n, p) assert_equal(np.min(Ap), 0) assert_allclose(np.max(Ap), np.power(10, -np.floor(p/n)*n)) desired += Ap / factorial(p) actual = expm(_burkardt_13_power(n, 1)) assert_allclose(actual, desired) def test_burkardt_14(self): # This is Moler's example. # This badly scaled matrix caused problems for MATLAB's expm(). A = np.array([ [0, 1e-8, 0], [-(2e10 + 4e8/6.), -3, 2e10], [200./3., 0, -200./3.], ], dtype=float) desired = np.array([ [0.446849468283175, 1.54044157383952e-09, 0.462811453558774], [-5743067.77947947, -0.0152830038686819, -4526542.71278401], [0.447722977849494, 1.54270484519591e-09, 0.463480648837651], ], dtype=float) actual = expm(A) assert_allclose(actual, desired) class TestOperators(object): def test_product_operator(self): random.seed(1234) n = 5 k = 2 nsamples = 10 for i in range(nsamples): A = np.random.randn(n, n) B = np.random.randn(n, n) C = np.random.randn(n, n) D = np.random.randn(n, k) op = ProductOperator(A, B, C) assert_allclose(op.matmat(D), A.dot(B).dot(C).dot(D)) assert_allclose(op.T.matmat(D), (A.dot(B).dot(C)).T.dot(D)) def test_matrix_power_operator(self): random.seed(1234) n = 5 k = 2 p = 3 nsamples = 10 for i in range(nsamples): A = np.random.randn(n, n) B = np.random.randn(n, k) op = MatrixPowerOperator(A, p) assert_allclose(op.matmat(B), matrix_power(A, p).dot(B)) assert_allclose(op.T.matmat(B), matrix_power(A, p).T.dot(B))
## # Copyright (c) 2015-2017 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## from twext.python.log import Logger from twisted.internet.defer import returnValue, inlineCallbacks from twisted.python.failure import Failure from twistedcaldav.accounting import emitAccounting from txdav.caldav.datastore.sql import ManagedAttachment, CalendarBindRecord from txdav.caldav.icalendarstore import ComponentUpdateState from txdav.common.datastore.podding.migration.sync_metadata import CalendarMigrationRecord, \ CalendarObjectMigrationRecord, AttachmentMigrationRecord from txdav.common.datastore.podding.migration.work import HomeCleanupWork, MigrationCleanupWork from txdav.common.datastore.sql_external import NotificationCollectionExternal from txdav.common.datastore.sql_notification import NotificationCollection from txdav.common.datastore.sql_tables import _HOME_STATUS_MIGRATING, _HOME_STATUS_DISABLED, \ _HOME_STATUS_EXTERNAL, _HOME_STATUS_NORMAL from txdav.common.idirectoryservice import DirectoryRecordNotFoundError from functools import wraps from uuid import uuid4 import datetime log = Logger() ACCOUNTING_TYPE = "migration" ACCOUNTING_LOG = "migration.log" def inTransactionWrapper(operation): """ This wrapper converts an instance method that takes a transaction as its first parameter into one where the transaction parameter is an optional keyword argument. If the keyword argument is present and not None, then the instance method is called with that keyword as the first positional argument (i.e., almost a NoOp). If the keyword argument is not present, then a new transaction is created and the instance method called with it as the first positional argument, plus the call is wrapped with try/except/else to ensure proper commit and abort of the internally created transaction is done. So this wrapper allows for a method that requires a transaction to be run with either an existing transaction or one created just for the purpose of running it. @param operation: a callable that takes an L{IAsyncTransaction} as its first argument, and returns a value. """ @wraps(operation) @inlineCallbacks def _inTxn(self, *args, **kwargs): label = self.label(operation.__name__) if "txn" in kwargs: txn = kwargs["txn"] del kwargs["txn"] result = yield operation(self, txn, *args, **kwargs) returnValue(result) else: txn = self.store.newTransaction(label=label) try: result = yield operation(self, txn, *args, **kwargs) except Exception as ex: f = Failure() yield txn.abort() log.error("{label} failed: {e}", label=label, e=str(ex)) returnValue(f) else: yield txn.commit() returnValue(result) return _inTxn # Cross-pod synchronization of an entire calendar home class CrossPodHomeSync(object): BATCH_SIZE = 50 def __init__(self, store, diruid, final=False, uselog=None): """ @param store: the data store @type store: L{CommonDataStore} @param diruid: directory uid of the user whose home is to be sync'd @type diruid: L{str} @param final: indicates whether this is in the final sync stage with the remote home already disabled @type final: L{bool} @param uselog: additional logging written to this object @type: L{File} """ self.store = store self.diruid = diruid self.disabledRemote = final self.uselog = uselog self.record = None self.homeId = None def label(self, detail): return "Cross-pod Migration Sync for {}: {}".format(self.diruid, detail) def accounting(self, logstr): emitAccounting(ACCOUNTING_TYPE, self.record if self.record is None else self.diruid, "{} {}\n".format(datetime.datetime.now().isoformat(), logstr), filename=ACCOUNTING_LOG) if self.uselog is not None: self.uselog.write("CrossPodHomeSync: {}\n".format(logstr)) @inlineCallbacks def migrateHere(self): """ This is a full, serialized version of a data migration (minus any directory update) that can be triggered via a command line tool. It is designed to minimize down time for the migrating user. """ # Step 1 - initial full sync yield self.sync() # Step 2 - increment sync (since the initial sync may take a long time # to run we should do one incremental sync before bringing down the # account being migrated) yield self.sync() # Step 3 - disable remote home # NB Any failure from this point on will need to be caught and # handled by re-enabling the old home (and fixing any sharing state # that may have been changed) yield self.disableRemoteHome() # Step 4 - final incremental sync yield self.sync() # Step 5 - final overall sync of meta-data (including sharing re-linking) yield self.finalSync() # Step 6 - enable new home yield self.enableLocalHome() # Step 7 - remove remote home yield self.removeRemoteHome() # Step 8 - say phew! TODO: Actually alert everyone else pass @inlineCallbacks def sync(self): """ Initiate a sync of the home. This is a simple data sync that does not reconcile sharing state etc. The L{finalSync} method will do a full sharing reconcile as well as disable the migration source home. """ yield self.loadRecord() self.accounting("Starting: sync...") yield self.prepareCalendarHome() # Calendar list and calendar data yield self.syncCalendarList() # Sync home metadata such as alarms, default calendars, etc yield self.syncCalendarHomeMetaData() # Sync attachments yield self.syncAttachments() self.accounting("Completed: sync.\n") @inlineCallbacks def finalSync(self): """ Do the final sync up of any additional data, re-link sharing bind rows, recalculate quota etc. """ yield self.loadRecord() self.accounting("Starting: finalSync...") yield self.prepareCalendarHome() # Link attachments to resources: ATTACHMENT_CALENDAR_OBJECT table yield self.linkAttachments() # TODO: Re-write attachment URIs - not sure if we need this as reverse proxy may take care of it pass # Group attendee reconcile yield self.groupAttendeeReconcile() # Delegates reconcile yield self.delegateReconcile() # Shared collections reconcile (including group sharees) yield self.sharedByCollectionsReconcile() yield self.sharedToCollectionsReconcile() # Notifications yield self.notificationsReconcile() # iMIP tokens yield self.iMIPTokensReconcile() # Work items yield self.workItemsReconcile() self.accounting("Completed: finalSync.\n") @inTransactionWrapper @inlineCallbacks def disableRemoteHome(self, txn): """ Mark the remote home as disabled. Also, prevent any scheduling jobs for the corresponding user from being run. """ yield self.loadRecord() self.accounting("Starting: disableRemoteHome...") yield self.prepareCalendarHome() # Stop any work first remote_home = yield self._remoteHome(txn) yield remote_home.pauseWork() # Calendar home yield remote_home.setStatus(_HOME_STATUS_DISABLED) # Notification home notifications = yield self._remoteNotificationsHome(txn) yield notifications.setStatus(_HOME_STATUS_DISABLED) self.disabledRemote = True self.accounting("Completed: disableRemoteHome.\n") @inTransactionWrapper @inlineCallbacks def enableLocalHome(self, txn): """ Mark the local home as enabled and remove any previously existing external home. """ yield self.loadRecord() self.accounting("Starting: enableLocalHome...") yield self.prepareCalendarHome() # Disable any local external homes oldhome = yield txn.calendarHomeWithUID(self.diruid, status=_HOME_STATUS_EXTERNAL) if oldhome is not None: yield oldhome.setLocalStatus(_HOME_STATUS_DISABLED) oldnotifications = yield txn.notificationsWithUID(self.diruid, status=_HOME_STATUS_EXTERNAL) if oldnotifications: yield oldnotifications.setLocalStatus(_HOME_STATUS_DISABLED) # Enable the migrating ones newhome = yield txn.calendarHomeWithUID(self.diruid, status=_HOME_STATUS_MIGRATING) if newhome is not None: yield newhome.setStatus(_HOME_STATUS_NORMAL) newnotifications = yield txn.notificationsWithUID(self.diruid, status=_HOME_STATUS_MIGRATING) if newnotifications: yield newnotifications.setStatus(_HOME_STATUS_NORMAL) # Unpause work items yield newhome.unpauseWork() # Remove migration state yield MigrationCleanupWork.reschedule( txn, MigrationCleanupWork.notBeforeDelay, homeResourceID=newhome.id(), ) # Purge the old ones yield HomeCleanupWork.reschedule( txn, HomeCleanupWork.notBeforeDelay, ownerUID=newhome.uid(), ) self.accounting("Completed: enableLocalHome.\n") @inlineCallbacks def removeRemoteHome(self): """ Remove all the old data on the remote pod. """ # TODO: implement API on CommonHome to purge the old data without # any side-effects (scheduling, sharing etc). Also purge associated # data such as iMIP tokens, delegates, work items, etc yield self.loadRecord() self.accounting("Starting: removeRemoteHome...") yield self.prepareCalendarHome() yield self._migratedHome() self.accounting("Completed: removeRemoteHome.\n") @inTransactionWrapper def _migratedHome(self, txn): """ Send cross-pod message to tell the old pod to remove the migrated data. """ return txn.store().conduit.send_migrated_home(txn, self.diruid) @inlineCallbacks def loadRecord(self): """ Initiate a sync of the home. """ if self.record is None: self.record = yield self.store.directoryService().recordWithUID(self.diruid) if self.record is None: raise DirectoryRecordNotFoundError("Cross-pod Migration Sync missing directory record for {}".format(self.diruid)) if self.record.thisServer(): raise ValueError("Cross-pod Migration Sync cannot sync with user already on this server: {}".format(self.diruid)) @inTransactionWrapper @inlineCallbacks def prepareCalendarHome(self, txn): """ Make sure the inactive home to migrate into is present on this pod. """ if self.homeId is None: home = yield self._localHome(txn) if home is None: if self.disabledRemote: self.homeId = None else: home = yield txn.calendarHomeWithUID(self.diruid, status=_HOME_STATUS_MIGRATING, create=True) self.accounting(" Created new home collection to migrate into.") self.homeId = home.id() if home is not None else None @inTransactionWrapper @inlineCallbacks def syncCalendarHomeMetaData(self, txn): """ Make sure the home meta-data (alarms, default calendars) is properly sync'd """ self.accounting("Starting: syncCalendarHomeMetaData...") remote_home = yield self._remoteHome(txn) yield remote_home.readMetaData() calendars = yield CalendarMigrationRecord.querysimple(txn, calendarHomeResourceID=self.homeId) calendarIDMap = dict((item.remoteResourceID, item.localResourceID) for item in calendars) local_home = yield self._localHome(txn) yield local_home.copyMetadata(remote_home, calendarIDMap) self.accounting("Completed: syncCalendarHomeMetaData.") @inlineCallbacks def _remoteHome(self, txn): """ Create a synthetic external home object that maps to the actual remote home. """ from txdav.caldav.datastore.sql_external import CalendarHomeExternal resourceID = yield txn.store().conduit.send_home_resource_id(txn, self.record, migrating=True) home = CalendarHomeExternal.makeSyntheticExternalHome(txn, self.record.uid, resourceID) if resourceID is not None else None if self.disabledRemote: home._migratingHome = True returnValue(home) @inlineCallbacks def _remoteNotificationsHome(self, txn): """ Create a synthetic external home object that maps to the actual remote home. """ notifications = yield NotificationCollectionExternal.notificationsWithUID(txn, self.diruid, create=True) if self.disabledRemote: notifications._migratingHome = True returnValue(notifications) def _localHome(self, txn): """ Get the home on this pod that will have data migrated to it. """ return txn.calendarHomeWithUID(self.diruid, status=_HOME_STATUS_MIGRATING) @inlineCallbacks def syncCalendarList(self): """ Synchronize each owned calendar. """ self.accounting("Starting: syncCalendarList...") # Remote sync details remote_sync_state = yield self.getCalendarSyncList() self.accounting(" Found {} remote calendars to sync.".format(len(remote_sync_state))) # Get local sync details from local DB local_sync_state = yield self.getSyncState() self.accounting(" Found {} local calendars to sync.".format(len(local_sync_state))) # Remove local calendars no longer on the remote side yield self.purgeLocal(local_sync_state, remote_sync_state) # Sync each calendar that matches on both sides for remoteID in remote_sync_state.keys(): yield self.syncCalendar(remoteID, local_sync_state, remote_sync_state) self.accounting("Completed: syncCalendarList.") @inTransactionWrapper @inlineCallbacks def getCalendarSyncList(self, txn): """ Get the names and sync-tokens for each remote owned calendar. """ # List of calendars from the remote side home = yield self._remoteHome(txn) if home is None: returnValue(None) calendars = yield home.loadChildren() results = {} for calendar in calendars: if calendar.owned(): sync_token = yield calendar.syncToken() results[calendar.id()] = CalendarMigrationRecord.make( calendarHomeResourceID=home.id(), remoteResourceID=calendar.id(), localResourceID=0, lastSyncToken=sync_token, ) returnValue(results) @inTransactionWrapper @inlineCallbacks def getSyncState(self, txn): """ Get local synchronization state for the home being migrated. """ records = yield CalendarMigrationRecord.querysimple( txn, calendarHomeResourceID=self.homeId ) returnValue(dict([(record.remoteResourceID, record) for record in records])) @inTransactionWrapper @inlineCallbacks def updateSyncState(self, txn, stateRecord, newSyncToken): """ Update or insert an L{CalendarMigrationRecord} with the new specified sync token. """ if stateRecord.isnew(): stateRecord.lastSyncToken = newSyncToken yield stateRecord.insert(txn) else: # The existing stateRecord has a stale txn, but valid column values. We have # to duplicate it before we can give it a different txn. stateRecord = stateRecord.duplicate() stateRecord.transaction = txn yield stateRecord.update(lastSyncToken=newSyncToken) @inTransactionWrapper @inlineCallbacks def purgeLocal(self, txn, local_sync_state, remote_sync_state): """ Remove (silently - i.e., no scheduling) local calendars that are no longer on the remote side. @param txn: transaction to use @type txn: L{CommonStoreTransaction} @param local_sync_state: local sync state @type local_sync_state: L{dict} @param remote_sync_state: remote sync state @type remote_sync_state: L{dict} """ home = yield self._localHome(txn) for localID in set(local_sync_state.keys()) - set(remote_sync_state.keys()): calendar = yield home.childWithID(local_sync_state[localID].localResourceID) if calendar is not None: yield calendar.purge() del local_sync_state[localID] self.accounting(" Purged calendar local-id={} that no longer exists on the remote pod.".format(localID)) @inlineCallbacks def syncCalendar(self, remoteID, local_sync_state, remote_sync_state): """ Sync the contents of a calendar from the remote side. The local calendar may need to be created on initial sync. Make use of sync tokens to avoid unnecessary work. @param remoteID: id of the remote calendar to sync @type remoteID: L{int} @param local_sync_state: local sync state @type local_sync_state: L{dict} @param remote_sync_state: remote sync state @type remote_sync_state: L{dict} """ self.accounting("Starting: syncCalendar.") # See if we need to create the local one first if remoteID not in local_sync_state: localID = yield self.newCalendar() local_sync_state[remoteID] = CalendarMigrationRecord.make( calendarHomeResourceID=self.homeId, remoteResourceID=remoteID, localResourceID=localID, lastSyncToken=None, ) self.accounting(" Created new calendar local-id={}, remote-id={}.".format(localID, remoteID)) else: localID = local_sync_state.get(remoteID).localResourceID self.accounting(" Updating calendar local-id={}, remote-id={}.".format(localID, remoteID)) local_record = local_sync_state.get(remoteID) remote_token = remote_sync_state[remoteID].lastSyncToken if local_record.lastSyncToken != remote_token: # Sync meta-data such as name, alarms, supported-components, transp, etc yield self.syncCalendarMetaData(local_record) # Sync object resources changed, removed = yield self.findObjectsToSync(local_record) self.accounting(" Calendar objects changed={}, removed={}.".format(len(changed), len(removed))) yield self.purgeDeletedObjectsInBatches(local_record, removed) yield self.updateChangedObjectsInBatches(local_record, changed) yield self.updateSyncState(local_record, remote_token) self.accounting("Completed: syncCalendar.") @inTransactionWrapper @inlineCallbacks def newCalendar(self, txn): """ Create a new local calendar to sync remote data to. We don't care about the name of the calendar right now - it will be sync'd later. """ home = yield self._localHome(txn) calendar = yield home.createChildWithName(str(uuid4())) returnValue(calendar.id()) @inTransactionWrapper @inlineCallbacks def syncCalendarMetaData(self, txn, migrationRecord): """ Sync the metadata of a calendar from the remote side. @param migrationRecord: current migration record @type localID: L{CalendarMigrationRecord} """ # Remote changes remote_home = yield self._remoteHome(txn) remote_calendar = yield remote_home.childWithID(migrationRecord.remoteResourceID) if remote_calendar is None: returnValue(None) # Check whether the deleted set items local_home = yield self._localHome(txn) local_calendar = yield local_home.childWithID(migrationRecord.localResourceID) yield local_calendar.copyMetadata(remote_calendar) self.accounting(" Copied calendar meta-data for calendar local-id={0.localResourceID}, remote-id={0.remoteResourceID}.".format(migrationRecord)) @inTransactionWrapper @inlineCallbacks def findObjectsToSync(self, txn, migrationRecord): """ Find the set of object resources that need to be sync'd from the remote side and the set that need to be removed locally. Take into account the possibility that this is a partial sync and removals or additions might be false positives. @param migrationRecord: current migration record @type localID: L{CalendarMigrationRecord} """ # Remote changes remote_home = yield self._remoteHome(txn) remote_calendar = yield remote_home.childWithID(migrationRecord.remoteResourceID) if remote_calendar is None: returnValue(None) changed, deleted, _ignore_invalid = yield remote_calendar.resourceNamesSinceToken(migrationRecord.lastSyncToken) # Check whether the deleted set items local_home = yield self._localHome(txn) local_calendar = yield local_home.childWithID(migrationRecord.localResourceID) # Check the md5's on each changed remote with the local one to filter out ones # we don't actually need to sync remote_changes = yield remote_calendar.objectResourcesWithNames(changed) remote_changes = dict([(calendar.name(), calendar) for calendar in remote_changes]) local_changes = yield local_calendar.objectResourcesWithNames(changed) local_changes = dict([(calendar.name(), calendar) for calendar in local_changes]) actual_changes = [] for name, calendar in remote_changes.items(): if name not in local_changes or remote_changes[name].md5() != local_changes[name].md5(): actual_changes.append(name) returnValue((actual_changes, deleted,)) @inlineCallbacks def purgeDeletedObjectsInBatches(self, migrationRecord, deleted): """ Purge (silently remove) the specified object resources. This needs to succeed in the case where some or all resources have already been deleted. Do this in batches to keep transaction times small. @param migrationRecord: local calendar migration record @type migrationRecord: L{CalendarMigrationRecord} @param deleted: list of names to purge @type deleted: L{list} of L{str} """ remaining = list(deleted) while remaining: yield self.purgeBatch(migrationRecord.localResourceID, remaining[:self.BATCH_SIZE]) del remaining[:self.BATCH_SIZE] @inTransactionWrapper @inlineCallbacks def purgeBatch(self, txn, localID, purge_names): """ Purge a bunch of object resources from the specified calendar. @param txn: transaction to use @type txn: L{CommonStoreTransaction} @param localID: id of the local calendar to sync @type localID: L{int} @param purge_names: object resource names to purge @type purge_names: L{list} of L{str} """ # Check whether the deleted set items local_home = yield self._localHome(txn) local_calendar = yield local_home.childWithID(localID) local_objects = yield local_calendar.objectResourcesWithNames(purge_names) for local_object in local_objects: yield local_object.purge(implicitly=False) self.accounting(" Purged calendar object local-id={}.".format(local_object.id())) @inlineCallbacks def updateChangedObjectsInBatches(self, migrationRecord, changed): """ Update the specified object resources. This needs to succeed in the case where some or all resources have already been deleted. Do this in batches to keep transaction times small. @param migrationRecord: local calendar migration record @type migrationRecord: L{CalendarMigrationRecord} @param changed: list of names to update @type changed: L{list} of L{str} """ remaining = list(changed) while remaining: yield self.updateBatch( migrationRecord.localResourceID, migrationRecord.remoteResourceID, remaining[:self.BATCH_SIZE], ) del remaining[:self.BATCH_SIZE] @inTransactionWrapper @inlineCallbacks def updateBatch(self, txn, localID, remoteID, remaining): """ Update a bunch of object resources from the specified remote calendar. @param txn: transaction to use @type txn: L{CommonStoreTransaction} @param localID: id of the local calendar to sync @type localID: L{int} @param remoteID: id of the remote calendar to sync with @type remoteID: L{int} @param purge_names: object resource names to update @type purge_names: L{list} of L{str} """ # Get remote objects remote_home = yield self._remoteHome(txn) remote_calendar = yield remote_home.childWithID(remoteID) if remote_calendar is None: returnValue(None) remote_objects = yield remote_calendar.objectResourcesWithNames(remaining) remote_objects = dict([(obj.name(), obj) for obj in remote_objects]) # Get local objects local_home = yield self._localHome(txn) local_calendar = yield local_home.childWithID(localID) local_objects = yield local_calendar.objectResourcesWithNames(remaining) local_objects = dict([(obj.name(), obj) for obj in local_objects]) # Sync ones that still exist - use txn._migrating together with stuffing the remote md5 # value onto the component being stored to ensure that the md5 value stored locally # matches the remote one (which should help reduce the need for a client to resync # the data when moved from one pod to the other). txn._migrating = True for obj_name in remote_objects.keys(): remote_object = remote_objects[obj_name] remote_data = yield remote_object.component() remote_data.md5 = remote_object.md5() if obj_name in local_objects: local_object = yield local_objects[obj_name] yield local_object._setComponentInternal(remote_data, internal_state=ComponentUpdateState.RAW) del local_objects[obj_name] log_op = "Updated" else: local_object = yield local_calendar._createCalendarObjectWithNameInternal(obj_name, remote_data, internal_state=ComponentUpdateState.RAW) # Maintain the mapping from the remote to local id. Note that this mapping never changes as the ids on both # sides are immutable - though it may get deleted if the local object is removed during sync (via a cascade). yield CalendarObjectMigrationRecord.create( txn, calendarHomeResourceID=self.homeId, remoteResourceID=remote_object.id(), localResourceID=local_object.id() ) log_op = "Created" # Sync meta-data such as schedule object, schedule tags, access mode etc yield local_object.copyMetadata(remote_object) self.accounting(" {} calendar object local-id={}, remote-id={}.".format(log_op, local_object.id(), remote_object.id())) # Purge the ones that remain for local_object in local_objects.values(): yield local_object.purge(implicitly=False) self.accounting(" Purged calendar object local-id={}.".format(local_object.id())) @inlineCallbacks def syncAttachments(self): """ Sync attachments (both metadata and actual attachment data) for the home being migrated. """ self.accounting("Starting: syncAttachments...") # Two steps - sync the table first in one txn, then sync each attachment's data changed_ids, removed_ids = yield self.syncAttachmentTable() self.accounting(" Attachments changed={}, removed={}".format(len(changed_ids), len(removed_ids))) for local_id in changed_ids: yield self.syncAttachmentData(local_id) self.accounting("Completed: syncAttachments.") returnValue((changed_ids, removed_ids,)) @inTransactionWrapper @inlineCallbacks def syncAttachmentTable(self, txn): """ Sync the ATTACHMENT table data for the home being migrated. Return the list of local attachment ids that now need there attachment data sync'd from the server. """ remote_home = yield self._remoteHome(txn) rattachments = yield remote_home.getAllAttachments() rmap = dict([(attachment.id(), attachment) for attachment in rattachments]) local_home = yield self._localHome(txn) lattachments = yield local_home.getAllAttachments() lmap = dict([(attachment.id(), attachment) for attachment in lattachments]) # Figure out the differences records = yield AttachmentMigrationRecord.querysimple( txn, calendarHomeResourceID=self.homeId ) mapping = dict([(record.remoteResourceID, record) for record in records]) # Removed - remove attachment and migration state removed = set(mapping.keys()) - set(rmap.keys()) for remove_id in removed: record = mapping[remove_id] att = yield ManagedAttachment.load(txn, None, None, attachmentID=record.localResourceID) if att: yield att.remove(adjustQuota=False) else: yield record.delete() # Track which ones need attachment data sync'd over data_ids = set() # Added - add new attachment and migration state added = set(rmap.keys()) - set(mapping.keys()) for added_id in added: attachment = yield ManagedAttachment._create(txn, None, self.homeId) yield AttachmentMigrationRecord.create( txn, calendarHomeResourceID=self.homeId, remoteResourceID=added_id, localResourceID=attachment.id(), ) data_ids.add(attachment.id()) # Possible updates - check for md5 change and sync updates = set(mapping.keys()) & set(rmap.keys()) for updated_id in updates: local_id = mapping[updated_id].localResourceID if rmap[updated_id].md5() != lmap[local_id].md5(): yield lmap[local_id].copyRemote(rmap[updated_id]) data_ids.add(local_id) returnValue((data_ids, removed,)) @inTransactionWrapper @inlineCallbacks def syncAttachmentData(self, txn, local_id): """ Sync the attachment data for the home being migrated. """ remote_home = yield self._remoteHome(txn) local_home = yield self._localHome(txn) attachment = yield local_home.getAttachmentByID(local_id) if attachment is None: returnValue(None) records = yield AttachmentMigrationRecord.querysimple( txn, calendarHomeResourceID=self.homeId, localResourceID=local_id ) if records: # Read the data from the conduit yield remote_home.readAttachmentData(records[0].remoteResourceID, attachment) self.accounting(" Read attachment local-id={0.localResourceID}, remote-id={0.remoteResourceID}".format(records[0])) @inlineCallbacks def linkAttachments(self): """ Link attachments to the calendar objects they belong to. """ self.accounting("Starting: linkAttachments...") # Get the map of links for the remote home links = yield self.getAttachmentLinks() self.accounting(" Linking {} attachments".format(len(links))) # Get remote->local ID mappings attachmentIDMap, objectIDMap = yield self.getAttachmentMappings() # Batch setting links for the local home len_links = len(links) while links: yield self.makeAttachmentLinks(links[:50], attachmentIDMap, objectIDMap) links = links[50:] self.accounting("Completed: linkAttachments.") returnValue(len_links) @inTransactionWrapper @inlineCallbacks def getAttachmentLinks(self, txn): """ Get the remote link information. """ # Get the map of links for the remote home remote_home = yield self._remoteHome(txn) links = yield remote_home.getAttachmentLinks() returnValue(links) @inTransactionWrapper @inlineCallbacks def getAttachmentMappings(self, txn): """ Get the remote link information. """ # Get migration mappings records = yield AttachmentMigrationRecord.querysimple( txn, calendarHomeResourceID=self.homeId ) attachmentIDMap = dict([(record.remoteResourceID, record) for record in records]) records = yield CalendarObjectMigrationRecord.querysimple( txn, calendarHomeResourceID=self.homeId ) objectIDMap = dict([(record.remoteResourceID, record) for record in records]) returnValue((attachmentIDMap, objectIDMap,)) @inTransactionWrapper @inlineCallbacks def makeAttachmentLinks(self, txn, links, attachmentIDMap, objectIDMap): """ Map remote links to local links. """ for link in links: # Remote link has an invalid txn at this point so replace that first link._txn = txn # Now re-map the attachment ID and calendar_object_id to the local ones link._attachmentID = attachmentIDMap[link._attachmentID].localResourceID link._calendarObjectID = objectIDMap[link._calendarObjectID].localResourceID yield link.insert() @inlineCallbacks def delegateReconcile(self): """ Sync the delegate assignments from the remote home to the local home. We won't use a fake directory UID locally. """ self.accounting("Starting: delegateReconcile...") yield self.individualDelegateReconcile() yield self.groupDelegateReconcile() yield self.externalDelegateReconcile() self.accounting("Completed: delegateReconcile.") @inTransactionWrapper @inlineCallbacks def individualDelegateReconcile(self, txn): """ Sync the delegate assignments from the remote home to the local home. We won't use a fake directory UID locally. """ remote_records = yield txn.dumpIndividualDelegatesExternal(self.record) for record in remote_records: yield record.insert(txn) self.accounting(" Found {} individual delegates".format(len(remote_records))) @inTransactionWrapper @inlineCallbacks def groupDelegateReconcile(self, txn): """ Sync the delegate assignments from the remote home to the local home. We won't use a fake directory UID locally. """ remote_records = yield txn.dumpGroupDelegatesExternal(self.record) for delegator, group in remote_records: # We need to make sure the group exists locally first and map the groupID to the local one local_group = yield txn.groupByUID(group.groupUID) delegator.groupID = local_group.groupID yield delegator.insert(txn) self.accounting(" Found {} group delegates".format(len(remote_records))) @inTransactionWrapper @inlineCallbacks def externalDelegateReconcile(self, txn): """ Sync the external delegate assignments from the remote home to the local home. We won't use a fake directory UID locally. """ remote_records = yield txn.dumpExternalDelegatesExternal(self.record) for record in remote_records: yield record.insert(txn) self.accounting(" Found {} external delegates".format(len(remote_records))) @inlineCallbacks def groupAttendeeReconcile(self): """ Sync the remote group attendee links to the local store. """ self.accounting("Starting: groupAttendeeReconcile...") # Get remote data and local mapping information remote_group_attendees, objectIDMap = yield self.groupAttendeeData() self.accounting(" Found {} group attendees".format(len(remote_group_attendees))) # Map each result to a local resource (in batches) number_of_links = len(remote_group_attendees) while remote_group_attendees: yield self.groupAttendeeProcess(remote_group_attendees[:50], objectIDMap) remote_group_attendees = remote_group_attendees[50:] self.accounting("Completed: groupAttendeeReconcile.") returnValue(number_of_links) @inTransactionWrapper @inlineCallbacks def groupAttendeeData(self, txn): """ Sync the remote group attendee links to the local store. """ remote_home = yield self._remoteHome(txn) remote_group_attendees = yield remote_home.getAllGroupAttendees() # Get all remote->local object maps records = yield CalendarObjectMigrationRecord.querysimple( txn, calendarHomeResourceID=self.homeId ) objectIDMap = dict([(record.remoteResourceID, record.localResourceID) for record in records]) returnValue((remote_group_attendees, objectIDMap,)) @inTransactionWrapper @inlineCallbacks def groupAttendeeProcess(self, txn, results, objectIDMap): """ Sync the remote group attendee links to the local store. """ # Map each result to a local resource for groupAttendee, group in results: local_group = yield txn.groupByUID(group.groupUID) groupAttendee.groupID = local_group.groupID try: groupAttendee.resourceID = objectIDMap[groupAttendee.resourceID] except KeyError: continue yield groupAttendee.insert(txn) @inlineCallbacks def notificationsReconcile(self): """ Sync all the existing L{NotificationObject} resources from the remote store. """ self.accounting("Starting: notificationsReconcile...") records = yield self.notificationRecords() self.accounting(" Found {} notifications".format(len(records))) # Batch setting resources for the local home len_records = len(records) while records: yield self.makeNotifications(records[:50]) records = records[50:] self.accounting("Completed: notificationsReconcile.") returnValue(len_records) @inTransactionWrapper @inlineCallbacks def notificationRecords(self, txn): """ Get all the existing L{NotificationObjectRecord}'s from the remote store. """ notifications = yield self._remoteNotificationsHome(txn) records = yield notifications.notificationObjectRecords() for record in records: # This needs to be reset when added to the local store del record.resourceID # Map the remote id to the local one. record.notificationHomeResourceID = notifications.id() returnValue(records) @inTransactionWrapper @inlineCallbacks def makeNotifications(self, txn, records): """ Create L{NotificationObjectRecord} records in the local store. """ notifications = yield NotificationCollection.notificationsWithUID(txn, self.diruid, status=_HOME_STATUS_MIGRATING, create=True) for record in records: # Do this via the "write" API so that sync revisions are updated properly, rather than just # inserting the records directly. notification = yield notifications.writeNotificationObject(record.notificationUID, record.notificationType, record.notificationData) self.accounting(" Added notification local-id={}.".format(notification.id())) @inlineCallbacks def sharedByCollectionsReconcile(self): """ Sync all the collections shared by the migrating user from the remote store. We will do this one calendar at a time since there could be a large number of sharees per calendar. Here is the logic we need: first assume we have three pods: A, B, C, and we are migrating a user from A->B. We start with a set of shares (X -> Y - where X is the sharer and Y the sharee) on pod A. We migrate the sharer to pod B. We then need to have a set of bind records on pod B, and adjust the set on pod A. Note that no changes are required on pod C. Original | Changes | Changes Shares | on B | on A --------------|------------------------------|--------------------- A -> A | B -> A (new) | B -> A (modify existing) A -> B | B -> B (modify existing) | (removed) A -> C | B -> C (new) | (removed) """ self.accounting("Starting: sharedByCollectionsReconcile...") calendars = yield self.getSyncState() len_records = 0 for calendar in calendars.values(): records, bindUID = yield self.sharedByCollectionRecords(calendar.remoteResourceID, calendar.localResourceID) if not records: continue records = records.items() self.accounting(" Found shared by calendar local-id={0.localResourceID}, remote-id={0.remoteResourceID} with {1} sharees".format( calendar, len(records), )) # Batch setting resources for the local home len_records += len(records) while records: yield self.makeSharedByCollections(records[:50], calendar.localResourceID) records = records[50:] # Get groups from remote pod yield self.syncGroupSharees(calendar.remoteResourceID, calendar.localResourceID) # Update the remote pod to switch over the shares yield self.updatedRemoteSharedByCollections(calendar.remoteResourceID, bindUID) self.accounting("Completed: sharedByCollectionsReconcile.") returnValue(len_records) @inTransactionWrapper @inlineCallbacks def sharedByCollectionRecords(self, txn, remote_id, local_id): """ Get all the existing L{CalendarBindRecord}'s from the remote store. Also make sure a bindUID exists for the local calendar. """ remote_home = yield self._remoteHome(txn) remote_calendar = yield remote_home.childWithID(remote_id) records = yield remote_calendar.sharingBindRecords() # Check bindUID local_records = yield CalendarBindRecord.querysimple( txn, calendarHomeResourceID=self.homeId, calendarResourceID=local_id, ) if records and not local_records[0].bindUID: yield local_records[0].update(bindUID=str(uuid4())) returnValue((records, local_records[0].bindUID,)) @inTransactionWrapper @inlineCallbacks def makeSharedByCollections(self, txn, records, calendar_id): """ Create L{CalendarBindRecord} records in the local store. """ for shareeUID, record in records: shareeHome = yield txn.calendarHomeWithUID(shareeUID, create=True) # First look for an existing record that could be present if the migrating user had # previously shared with this sharee as a cross-pod share oldrecord = yield CalendarBindRecord.querysimple( txn, calendarHomeResourceID=shareeHome.id(), calendarResourceName=record.calendarResourceName, ) # FIXME: need to figure out sync-token and bind revision changes if oldrecord: # Point old record to the new local calendar being shared yield oldrecord[0].update( calendarResourceID=calendar_id, bindRevision=0, ) self.accounting(" Updating existing sharee {}".format(shareeHome.uid())) else: # Map the record resource ids and insert a new record record.calendarHomeResourceID = shareeHome.id() record.calendarResourceID = calendar_id record.bindRevision = 0 yield record.insert(txn) self.accounting(" Adding new sharee {}".format(shareeHome.uid())) @inTransactionWrapper @inlineCallbacks def syncGroupSharees(self, txn, remote_id, local_id): """ Sync the group sharees for a remote share. """ remote_home = yield self._remoteHome(txn) remote_calendar = yield remote_home.childWithID(remote_id) results = yield remote_calendar.groupSharees() groups = dict([(group.groupID, group.groupUID,) for group in results["groups"]]) for share in results["sharees"]: local_group = yield txn.groupByUID(groups[share.groupID]) share.groupID = local_group.groupID share.calendarID = local_id yield share.insert(txn) self.accounting(" Adding group sharee {}".format(local_group.groupUID)) @inTransactionWrapper @inlineCallbacks def updatedRemoteSharedByCollections(self, txn, remote_id, bindUID): """ Get all the existing L{CalendarBindRecord}'s from the remote store. """ remote_home = yield self._remoteHome(txn) remote_calendar = yield remote_home.childWithID(remote_id) records = yield remote_calendar.migrateBindRecords(bindUID) self.accounting(" Updating remote records") returnValue(records) @inlineCallbacks def sharedToCollectionsReconcile(self): """ Sync all the collections shared to the migrating user from the remote store. Here is the logic we need: first assume we have three pods: A, B, C, and we are migrating a user from A->B. We start with a set of shares (X -> Y - where X is the sharer and Y the sharee) with sharee on pod A. We migrate the sharee to pod B. We then need to have a set of bind records on pod B, and adjust the set on pod A. Note that no changes are required on pod C. Original | Changes | Changes Shares | on B | on A --------------|------------------------------|--------------------- A -> A | A -> B (new) | A -> B (modify existing) B -> A | B -> B (modify existing) | (removed) C -> A | C -> B (new) | (removed) """ self.accounting("Starting: sharedToCollectionsReconcile...") records = yield self.sharedToCollectionRecords() records = records.items() len_records = len(records) self.accounting(" Found {} shared to collections".format(len_records)) while records: yield self.makeSharedToCollections(records[:50]) records = records[50:] self.accounting("Completed: sharedToCollectionsReconcile.") returnValue(len_records) @inTransactionWrapper @inlineCallbacks def sharedToCollectionRecords(self, txn): """ Get the names and sharer UIDs for remote shared calendars. """ # List of calendars from the remote side home = yield self._remoteHome(txn) if home is None: returnValue(None) results = yield home.sharedToBindRecords() returnValue(results) @inTransactionWrapper @inlineCallbacks def makeSharedToCollections(self, txn, records): """ Create L{CalendarBindRecord} records in the local store. """ for sharerUID, (shareeRecord, ownerRecord, metadataRecord) in records: sharerHome = yield txn.calendarHomeWithUID(sharerUID, create=True) # We need to figure out the right thing to do based on whether the sharer is local to this pod # (the one where the migrated user will be hosted) vs located on another pod if sharerHome.normal(): # First look for an existing record that must be present if the migrating user had # previously been shared with by this sharee oldrecord = yield CalendarBindRecord.querysimple( txn, calendarResourceName=shareeRecord.calendarResourceName, ) if len(oldrecord) == 1: # Point old record to the new local calendar home yield oldrecord[0].update( calendarHomeResourceID=self.homeId, ) self.accounting(" Updated existing local sharer record {}".format(sharerHome.uid())) else: raise AssertionError("An existing share must be present") else: # We have an external user. That sharer may have already shared the calendar with some other user # on this pod, in which case there is already a CALENDAR table entry for it, and we need the # resource ID from that to use in the new CALENDAR_BIND record we create. If a pre-existing share # is not present, then we have to create the CALENDAR table entry and associated pieces remote_id = shareeRecord.calendarResourceID # Look for pre-existing share with the same external ID oldrecord = yield CalendarBindRecord.querysimple( txn, calendarHomeResourceID=sharerHome.id(), bindUID=ownerRecord.bindUID, ) if oldrecord: # Map the record resource ids and insert a new record calendar_id = oldrecord.calendarResourceID log_op = "Updated" else: sharerView = yield sharerHome.createCollectionForExternalShare( ownerRecord.calendarResourceName, ownerRecord.bindUID, metadataRecord.supportedComponents, ) calendar_id = sharerView.id() log_op = "Created" shareeRecord.calendarHomeResourceID = self.homeId shareeRecord.calendarResourceID = calendar_id shareeRecord.bindRevision = 0 yield shareeRecord.insert(txn) self.accounting(" {} remote sharer record {}".format(log_op, sharerHome.uid())) yield self.updatedRemoteSharedToCollection(remote_id, txn=txn) @inTransactionWrapper @inlineCallbacks def updatedRemoteSharedToCollection(self, txn, remote_id): """ Get all the existing L{CalendarBindRecord}'s from the remote store. """ remote_home = yield self._remoteHome(txn) remote_calendar = yield remote_home.childWithID(remote_id) records = yield remote_calendar.migrateBindRecords(None) self.accounting(" Updating remote records") returnValue(records) @inlineCallbacks def iMIPTokensReconcile(self): """ Sync all the existing L{iMIPTokenRecord} records from the remote store. """ self.accounting("Starting: iMIPTokensReconcile...") records = yield self.iMIPTokenRecords() self.accounting(" Found {} iMIPToken records".format(len(records))) # Batch setting resources for the local home len_records = len(records) while records: yield self.makeiMIPTokens(records[:50]) records = records[50:] self.accounting("Completed: iMIPTokensReconcile.") returnValue(len_records) @inTransactionWrapper @inlineCallbacks def iMIPTokenRecords(self, txn): """ Get all the existing L{iMIPTokenRecord}'s from the remote store. """ remote_home = yield self._remoteHome(txn) records = yield remote_home.iMIPTokens() returnValue(records) @inTransactionWrapper @inlineCallbacks def makeiMIPTokens(self, txn, records): """ Create L{iMIPTokenRecord} records in the local store. """ for record in records: yield record.insert(txn) @inlineCallbacks def workItemsReconcile(self): """ Sync all the existing L{SCheduleWork} records from the remote store. """ self.accounting("Starting: workItemsReconcile...") records, mapping = yield self.workItemRecords() self.accounting(" Found {} Schedule work records".format(len(records))) # Batch setting resources for the local home len_records = len(records) while records: yield self.makeWorkItems(records[:50], mapping) records = records[50:] self.accounting("Completed: workItemsReconcile.") returnValue(len_records) @inTransactionWrapper @inlineCallbacks def workItemRecords(self, txn): """ Get all the existing L{ScheduleWork}'s from the remote store. Also, if any are found, get the object resource id mapping details. """ remote_home = yield self._remoteHome(txn) records = yield remote_home.workItems() mapping = {} # Cache remote->local resource id mapping if records: local_home = yield self._localHome(txn) mappings = yield CalendarObjectMigrationRecord.query( txn, CalendarObjectMigrationRecord.calendarHomeResourceID == local_home.id() ) for item in mappings: mapping[item.remoteResourceID] = item.localResourceID returnValue((records, mapping,)) @inTransactionWrapper @inlineCallbacks def makeWorkItems(self, txn, records, mapping): """ Create L{ScheduleWork} records in the local store. Note that the work items need to be given references to the local home and object resource. The job is created in paused state. """ local_home = yield self._localHome(txn) @inlineCallbacks def mapIDs(remote_id): local_id = mapping.get(remote_id) if local_id is not None: obj = yield local_home.objectResourceWithID(local_id) else: obj = None returnValue((local_home, obj,)) for record in records: yield record.migrate(txn, mapIDs)
from os.path import join, exists, isdir, relpath, abspath, dirname import datetime as dt import posixpath import logging import tempfile from os import stat, makedirs, remove import random import uuid from cStringIO import StringIO import time from six import string_types try: import gevent except: gevent = None from ..clients.http import Client from .. import settings from ..serialization import deserializer, serializer from ..errors import KitchenSinkError from ..utils.pathutils import urlsplit, dirsplit, urljoin from .catalog import _write, _read logger = logging.getLogger(__name__) ## should factor this out into a separate module class RemoteData(object): """RemoteData objects can contain raw data (in memory, in self._raw, or on disk, in self._local_path. Or as a deserialized object, in self._obj. Maybe this isn't the greatest approach, mixing them all together in one object but that's what we have for now. Currently, if you ask for self.obj(), or self.local_path(), or self.raw(), we will retrieve it from what we perceive to be the cheapest source. For example, if you have the stream in memory (self._raw), and you ask for the local path, we will write it to a temp file and return you that path """ def __init__(self, obj=None, local_path=None, data_url=None, raw=None, rpc_url=None, fmt="cloudpickle"): if rpc_url is None: rpc_url = settings.data_rpc_url self.rpc_url = rpc_url self.data_url = data_url self.fmt = fmt self._obj = obj self._local_path = local_path self._raw = raw def __setstate__(self, obj): self.data_url = obj['data_url'] self.fmt = obj['fmt'] self.rpc_url = settings.data_rpc_url self._obj = None self._local_path = None self._raw = None def __getstate__(self): return dict(data_url=self.data_url, fmt=self.fmt) def client(self, rpc_url=None): if rpc_url is None: rpc_url = self.rpc_url return Client(rpc_url, rpc_name='data', queue_name='data') def _get_stream(self): """returns either the stream associated with a data_url """ if settings.catalog: return settings.catalog.get(self.data_url) c = self.client(self.rpc_url) node, url = c.pick_host(self.data_url) resp = self.client(url)._get_data(self.data_url) return resp.raw def _put(self, f, data_type='object', fmt="cloudpickle"): logger.debug("posting %s to %s", self.data_url, self.rpc_url) f.seek(0) if settings.catalog and not settings.read_only: settings.catalog.write(f, self.data_url, is_new=True, data_type=data_type, fmt=self.fmt) else: c = self.client(self.rpc_url) hosts = c.call('hosts', to_write=True, _async=False, _rpc_name='data') if self.rpc_url in set(hosts.values()): url = self.rpc_url else: url = random.choice(hosts.values()) c = self.client(url) return c._put_data(self.data_url, f, data_type=data_type, fmt=fmt) def _existing_file_path(self): if settings.catalog: name = settings.catalog.get_file_path(self.data_url) if name is not None: self._local_path = name return name def local_path(self): """provides a path to a local file that contains the contents of this remote data source (downloads if necessary) """ if self._local_path: return self._local_path path = self._existing_file_path() if path is not None: return path ### if we have the data in memory, write it to a local file and return it if self._raw: name = tempfile.NamedTemporaryFile(prefix="ks-data-").name _write(self._raw, name) self._local_path = name return name ### if we have an in memory object, serialize it, write to a file ### and return it if self._obj is not None: name = tempfile.NamedTemporaryFile(prefix="ks-data-").name data = serializer(self.fmt)(self._obj) _write(data, name) self._local_path = name return name ### grab the stream, write to temporary file, return the path stream = self._get_stream() try: name = tempfile.NamedTemporaryFile(prefix="ks-data-").name _write(stream, name) self._local_path = name return name finally: stream.close() def raw(self): """provides raw contents of the remote data """ if self._raw: return self._raw path = self._existing_file_path() if path is not None: self._raw = _read(path) return self._raw if self._local_path: self._raw = _read(self._local_path) return self._raw if self._obj is not None: self._raw = serializer(self.fmt)(self._obj) return self._raw stream = None try: stream = self._get_stream() self._raw = stream.read() return self._raw finally: if stream: stream.close() def obj(self): if self._obj is not None: return self._obj #should be able to pass a file in later try: raw = self.raw() obj = deserializer(self.fmt)(raw) self._obj = obj return obj except Exception as e: logger.error("error with %s on %s raw", self.data_url, settings.data_rpc_url, ) logger.exception(e) raise def delete(self): raise NotImplementedError def _save_stream(self): if self._raw: return len(self._raw), StringIO(self._raw) elif self._local_path: length = stat(self._local_path).st_size return length, open(self._local_path, "rb") else: data = serializer(self.fmt)(self._obj) return len(data), StringIO(data) def save(self, url=None, prefix=""): """use this function to save a NEW data object """ if self.data_url: raise KitchenSinkError, "Dataset is already created, cannot be modified" if url is None: self.data_url = urljoin(prefix, str(uuid.uuid4())) else: self.data_url = url length, f = self._save_stream() try: data_type = "object" if self._obj is not None else None fmt = self.fmt if self._obj is not None else None self._put(f, data_type=data_type, fmt=fmt) finally: f.close() # removing pipelining - slowing down development, and I'm not using it yet # def pipeline(self, existing=True, size=None, url=None): # if size is None and existing is False: # raise Exception # c = self.client() # writeable_hosts = c.hosts(to_write=True) # if existing: # active_hosts, results = c.data_info([self.data_url]) # location_info, _ = results # size = _['size'] # else: # active_host = c.hosts() # location_info = {} # from ..utils.funcs import reverse_dict # current_host_name = reverse_dict(active_hosts)[self.rpc_url] # #remove hosts that already have data # for k in writeable_hosts.keys(): # if k in location_info: # del writeable_hosts[k] # # if we can write to the current host # if current_host_name in writeable_hosts: # target = current_host_name # else: # target = writeable_hosts[-1] # writable_hosts = writeable_hosts[:-1] # for idx in range(1, len(writeable_hosts)): # queue = c.queue('data', host=writeable_hosts[idx - 1]) # c.bc('chunked_copy', url, size, writeable_hosts[idx], # _queue_name=queue) # queue = c.queue('data', host=writeable_hosts[-1]) # c.bc('chunked_copy', url, size, target, _queue_name=queue) # if target not in location_info: # return c._put_data(self.data_url, f, data_type=data_type, fmt=fmt) def __repr__(self): if self.data_url: return "RemoteData(data_url='%s')" % self.data_url elif self._local_path: return "RemoteData(local_path='%s')" % self._local_path else: return "RemoteData(obj=obj)" def __getitem__(self, arg): if settings.catalog: obj = do(self.obj()[arg]) obj.save() return obj else: c = Client(self.rpc_url) return c.async_result(c.call(self.__class__.__getitem__, self, arg)) def du(url, **kwargs): return RemoteData(data_url=url, **kwargs) def dp(path): return RemoteData(local_path=path) def do(obj, **kwargs): return RemoteData(obj=obj, **kwargs) def dr(raw): return RemoteData(raw=raw)
import binascii import re from cryptography.exceptions import InvalidSignature from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives.serialization import load_pem_public_key import util def byte_size(limit, target): if limit < len(target): raise Exception('Length is not less than %s bytes' % limit) def field_is_sha256(sha, field_name=None): """Verifies a string is a possible SHA256 hash. Args: sha (str): Hash to verify. """ if not re.match(r'^[A-Fa-f0-9]{64}$', sha): raise Exception('Field is not a hash' if field_name is None else 'Field %s is not a hash' % field_name) def rsa(public_key, signature, message): """Verifies an RSA signature. Args: public_key (str): Public key with BEGIN and END sections. signature (str): Hex value of the signature with its leading 0x stripped. message (str): Message that was signed, unhashed. """ try: public_rsa = load_pem_public_key(bytes(public_key), backend=default_backend() ) hashed = util.sha256(message) public_rsa.verify( binascii.unhexlify(signature), hashed, padding.PSS( mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH ), hashes.SHA256() ) except InvalidSignature: raise Exception('Invalid signature') def sha256(sha, message, name=None): """Verifies the hash matches the SHA256'd message. Args: sha (str): A SHA256 hash result. message (str): Message to hash and compare to. """ if not sha == util.sha256(message): raise Exception('SHA256 does not match message' if name is None else 'SHA256 of %s does not match hash' % name) def star_log(star_log_json): """Verifies the starlog has all the required fields, and any hashes and signatures match up. Args: star_log_json (dict): Target starlog json. """ if not isinstance(star_log_json['hash'], basestring): raise Exception('hash is not a string') if not isinstance(star_log_json['version'], int): raise Exception('version is not an integer') if not isinstance(star_log_json['previous_hash'], basestring): raise Exception('previous_hash is not a string') if not isinstance(star_log_json['difficulty'], int): raise Exception('difficulty is not an integer') if not isinstance(star_log_json['nonce'], int): raise Exception('nonce is not an integer') if not isinstance(star_log_json['time'], int): raise Exception('time is not an integer') if util.get_time() < star_log_json['time']: raise Exception('time is greater than the current time') if not isinstance(star_log_json['events_hash'], basestring): raise Exception('events_hash is not a string') if star_log_json['events'] is None: raise Exception('events is missing') if star_log_json['meta_hash'] is None: raise Exception('meta_hash is missing') field_is_sha256(star_log_json['hash'], 'hash') field_is_sha256(star_log_json['previous_hash'], 'previous_hash') field_is_sha256(star_log_json['events_hash'], 'events_hash') field_is_sha256(star_log_json['meta_hash'], 'meta_hash') sha256(star_log_json['hash'], util.concat_star_log_header(star_log_json), 'log_header') sha256(star_log_json['meta_hash'], star_log_json['meta'], 'meta') if not star_log_json['events_hash'] == util.hash_events(star_log_json['events']): raise Exception('events_hash does not match actual hash') difficulty(star_log_json['difficulty'], star_log_json['hash']) events(star_log_json['events']) def events(events_json): """Verifies the state of a star log has all the required fields, and any hashes and signatures match up. Args: events_json (dict): Events json. """ remaining_ship_rewards = util.shipReward() input_keys = [] output_keys = [] for current_event in events_json: event(current_event) if current_event['type'] == 'reward': if len(current_event['inputs']) != 0: raise Exception('reward events cannot have inputs') if len(current_event['outputs']) == 0: raise Exception('reward events with no recipients should not be included') for current_output in current_event['outputs']: remaining_ship_rewards -= current_output['count'] if remaining_ship_rewards < 0: raise Exception('number of ships rewarded is out of range') if current_output['type'] != 'reward': raise Exception('reward outputs must be of type "reward"') elif current_event['type'] == 'jump': if len(current_event['inputs']) == 0: raise Exception('jump events cannot have zero inputs') output_length = len(current_event['outputs']) if output_length == 0: raise Exception('jump events cannot have zero outputs') if 2 < output_length: raise Exception('jump events cannot have more than 2 outputs') if 2 == output_length and \ current_event['outputs'][0]['star_system'] == current_event['outputs'][1]['star_system']: raise Exception('jump event cannot split in new system') for current_output in current_event['outputs']: if current_output['count'] <= 0: raise Exception('jump events cannot jump ' 'zero or less ships') if current_output['type'] != 'jump': raise Exception('jump outputs must be of type "jump"') elif current_event['type'] == 'attack': if len(current_event['inputs']) < 2: raise Exception('attack events need at least two inputs') if len(current_event['inputs']) < len(current_event['outputs']): raise Exception('attacks cannot have more outputs than inputs') for current_output in current_event['outputs']: if current_output['count'] <= 0: raise Exception('attack events cannot outputs ' 'zero or less ships') if current_output['type'] != 'attack': raise Exception('attack outputs must be of type "attack"') elif current_event['type'] == 'transfer': if len(current_event['inputs']) < 1: raise Exception('transfer events need at least one input') if len(current_event['outputs']) < len(current_event['inputs']): raise Exception('transfers cannot have more inputs than outputs') for current_output in current_event['outputs']: if current_output['count'] <= 0: raise Exception('transfer events cannot output zero or less ships') if current_output['type'] != 'transfer': raise Exception('transfer outputs must be of type "transfer"') else: raise ValueError('unrecognized event of type ' '%s' % current_event['type']) for current_input in current_event['inputs']: key = current_input['key'] if key in input_keys: raise Exception('event input key %s ' 'is listed more than once' % key) input_keys.append(key) for current_output in current_event['outputs']: key = current_output['key'] if key in output_keys: raise Exception('event output key %s ' 'is listed more than once' % key) output_keys.append(key) def event(event_json, require_index=True, require_star_system=False, reward_allowed=True): """Verifies the fields of an event. Args: event_json (dict): Target. require_index (bool): Verifies an integer index is included if True. require_star_system (bool): Verifies that every output specifies a star system if True. """ if not isinstance(event_json['type'], basestring): raise Exception('type is not a string') if not isinstance(event_json['fleet_hash'], basestring): raise Exception('fleet_hash is not a string') if not isinstance(event_json['fleet_key'], basestring): raise Exception('fleet_key is not a string') if not isinstance(event_json['hash'], basestring): raise Exception('hash is not a string') if require_index and not isinstance(event_json['index'], int): raise Exception('index is not an integer') field_is_sha256(event_json['hash'], 'hash') if not reward_allowed and event_json['type'] == 'reward': raise Exception('event of type %s forbidden' % event_json['type']) if event_json['type'] not in ['reward', 'jump', 'attack', 'transfer']: raise Exception('unrecognized event of type %s' % event_json['type']) input_indices = [] for current_input in event_json['inputs']: event_input(current_input) input_index = current_input['index'] if input_index in input_indices: raise Exception('duplicate input index %s' % input_index) input_indices.append(input_index) output_indices = [] for current_output in event_json['outputs']: event_output(current_output, require_star_system) output_index = current_output['index'] if output_index in output_indices: raise Exception('duplicate output index %s' % output_index) output_indices.append(output_index) if util.hash_event(event_json) != event_json['hash']: raise Exception('provided hash does not match the calculated one') field_is_sha256(event_json['fleet_hash'], 'fleet_hash') sha256(event_json['fleet_hash'], event_json['fleet_key'], 'fleet_key') rsa(util.expand_rsa_public_key(event_json['fleet_key']), event_json['signature'], event_json['hash']) def event_input(input_json): if not isinstance(input_json['index'], int): raise Exception('index is not an integer') if not isinstance(input_json['key'], basestring): raise Exception('key is not a string') if input_json['index'] < 0: raise Exception('index is out of range') field_is_sha256(input_json['key'], 'key') def event_output(output_json, require_star_system=False): if not isinstance(output_json['index'], int): raise Exception('index is not an integer') if not isinstance(output_json['type'], basestring): raise Exception('type is not a string') if not isinstance(output_json['fleet_hash'], basestring): raise Exception('fleet_hash is not a string') if not isinstance(output_json['key'], basestring): raise Exception('key is not a string') if output_json['star_system'] is None and require_star_system: raise Exception('star_system is missing') if output_json['star_system'] is not None: if not isinstance(output_json['star_system'], basestring): raise Exception('star_system is not a string') field_is_sha256(output_json['star_system'], 'star_system') if not isinstance(output_json['count'], int): raise Exception('count is not an integer') if output_json['index'] < 0: raise Exception('index is out of range') if output_json['count'] <= 0: raise Exception('count is out of range') field_is_sha256(output_json['fleet_hash'], 'fleet_hash') field_is_sha256(output_json['key'], 'key') def event_rsa(event_json): """Verifies the RSA signature of the provided event json. Args: event_json (dict): Event to validate. """ try: rsa(util.expand_rsa_public_key(event_json['fleet_key']), event_json['signature'], util.concat_event(event_json)) except InvalidSignature: raise Exception('Invalid RSA signature') def difficulty(packed, sha, validate_params=True): """Takes the integer form of difficulty and verifies that the hash is less than it. Args: packed (int): Packed target difficulty the provided SHA256 hash must meet. sha (str): Hex target to test, stripped of its leading 0x. """ if validate_params: if not isinstance(packed, (int, long)): raise Exception('difficulty is not an int') field_is_sha256(sha, 'difficulty target') mask = util.unpack_bits(packed, True) leading_zeros = len(mask) - len(mask.lstrip('0')) difficulty_unpacked(mask, leading_zeros, sha, validate_params) def difficulty_unpacked(unpacked_stripped, leading_zeros, sha, validate_params=True): """Takes the unpacked form of difficulty and verifies that the hash is less than it. Args: unpacked_stripped (str): Unpacked target difficulty the provided SHA256 hash must meet. sha (str): Hex target to test, stripped of its leading 0x. """ if validate_params: field_is_sha256(sha, 'difficulty target') try: for i in range(0, leading_zeros): if sha[i] != '0': raise Exception('Hash is greater than packed target') significant = sha[:len(unpacked_stripped)] if int(unpacked_stripped, 16) <= int(significant, 16): raise Exception('Hash is greater than packed target') except: raise Exception('Unable to cast to int from hexidecimal')
"""The tests for numeric state automation.""" import unittest from blumate.bootstrap import _setup_component import blumate.components.automation as automation from tests.common import get_test_home_assistant class TestAutomationNumericState(unittest.TestCase): """Test the event automation.""" def setUp(self): # pylint: disable=invalid-name """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.hass.config.components.append('group') self.calls = [] def record_call(service): """Helper to record calls.""" self.calls.append(service) self.hass.services.register('test', 'automation', record_call) def tearDown(self): # pylint: disable=invalid-name """Stop everything that was started.""" self.hass.stop() def test_if_fires_on_entity_change_below(self): """"Test the firing with changed entity.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 9 is below 10 self.hass.states.set('test.entity', 9) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_fires_on_entity_change_over_to_below(self): """"Test the firing with changed entity.""" self.hass.states.set('test.entity', 11) self.hass.pool.block_till_done() assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 9 is below 10 self.hass.states.set('test.entity', 9) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_not_fires_on_entity_change_below_to_below(self): """"Test the firing with changed entity.""" self.hass.states.set('test.entity', 9) self.hass.pool.block_till_done() assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 9 is below 10 so this should not fire again self.hass.states.set('test.entity', 8) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_fires_on_entity_change_above(self): """"Test the firing with changed entity.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'above': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is above 10 self.hass.states.set('test.entity', 11) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_fires_on_entity_change_below_to_above(self): """"Test the firing with changed entity.""" # set initial state self.hass.states.set('test.entity', 9) self.hass.pool.block_till_done() assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'above': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is above 10 and 9 is below self.hass.states.set('test.entity', 11) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_not_fires_on_entity_change_above_to_above(self): """"Test the firing with changed entity.""" # set initial state self.hass.states.set('test.entity', 11) self.hass.pool.block_till_done() assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'above': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is above 10 so this should fire again self.hass.states.set('test.entity', 12) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_fires_on_entity_change_below_range(self): """"Test the firing with changed entity.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, 'above': 5, }, 'action': { 'service': 'test.automation' } } }) # 9 is below 10 self.hass.states.set('test.entity', 9) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_fires_on_entity_change_below_above_range(self): """"Test the firing with changed entity.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, 'above': 5, }, 'action': { 'service': 'test.automation' } } }) # 4 is below 5 self.hass.states.set('test.entity', 4) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_fires_on_entity_change_over_to_below_range(self): """"Test the firing with changed entity.""" self.hass.states.set('test.entity', 11) self.hass.pool.block_till_done() assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, 'above': 5, }, 'action': { 'service': 'test.automation' } } }) # 9 is below 10 self.hass.states.set('test.entity', 9) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_fires_on_entity_change_over_to_below_above_range(self): """"Test the firing with changed entity.""" self.hass.states.set('test.entity', 11) self.hass.pool.block_till_done() assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, 'above': 5, }, 'action': { 'service': 'test.automation' } } }) # 4 is below 5 so it should not fire self.hass.states.set('test.entity', 4) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_not_fires_if_entity_not_match(self): """"Test if not fired with non matching entity.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.another_entity', 'below': 100, }, 'action': { 'service': 'test.automation' } } }) self.hass.states.set('test.entity', 11) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_fires_on_entity_change_below_with_attribute(self): """"Test attributes change.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 9 is below 10 self.hass.states.set('test.entity', 9, {'test_attribute': 11}) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_not_fires_on_entity_change_not_below_with_attribute(self): """"Test attributes.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is not below 10 self.hass.states.set('test.entity', 11, {'test_attribute': 9}) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_fires_on_attribute_change_with_attribute_below(self): """"Test attributes change.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute }}', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 9 is below 10 self.hass.states.set('test.entity', 'entity', {'test_attribute': 9}) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_not_fires_on_attribute_change_with_attribute_not_below(self): """"Test attributes change.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute }}', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is not below 10 self.hass.states.set('test.entity', 'entity', {'test_attribute': 11}) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_not_fires_on_entity_change_with_attribute_below(self): """"Test attributes change.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute }}', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is not below 10, entity state value should not be tested self.hass.states.set('test.entity', '9', {'test_attribute': 11}) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_not_fires_on_entity_change_with_not_attribute_below(self): """"Test attributes change.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute }}', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is not below 10, entity state value should not be tested self.hass.states.set('test.entity', 'entity') self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_fires_on_attr_change_with_attribute_below_and_multiple_attr(self): """"Test attributes change.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute }}', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 9 is not below 10 self.hass.states.set('test.entity', 'entity', {'test_attribute': 9, 'not_test_attribute': 11}) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_template_list(self): """"Test template list.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute[2] }}', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 3 is below 10 self.hass.states.set('test.entity', 'entity', {'test_attribute': [11, 15, 3]}) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_template_string(self): """"Test template string.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute | multiply(10) }}', 'below': 10, }, 'action': { 'service': 'test.automation', 'data_template': { 'some': '{{ trigger.%s }}' % '}} - {{ trigger.'.join(( 'platform', 'entity_id', 'below', 'above', 'from_state.state', 'to_state.state')) }, } } }) self.hass.states.set('test.entity', 'test state 1', {'test_attribute': '1.2'}) self.hass.pool.block_till_done() self.hass.states.set('test.entity', 'test state 2', {'test_attribute': '0.9'}) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) self.assertEqual( 'numeric_state - test.entity - 10.0 - None - test state 1 - ' 'test state 2', self.calls[0].data['some']) def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr(self): """"Test if not fired changed attributes.""" assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'numeric_state', 'entity_id': 'test.entity', 'value_template': '{{ state.attributes.test_attribute }}', 'below': 10, }, 'action': { 'service': 'test.automation' } } }) # 11 is not below 10 self.hass.states.set('test.entity', 'entity', {'test_attribute': 11, 'not_test_attribute': 9}) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_action(self): """"Test if action.""" entity_id = 'domain.test_entity' test_state = 10 assert _setup_component(self.hass, automation.DOMAIN, { automation.DOMAIN: { 'trigger': { 'platform': 'event', 'event_type': 'test_event', }, 'condition': { 'platform': 'numeric_state', 'entity_id': entity_id, 'above': test_state, 'below': test_state + 2 }, 'action': { 'service': 'test.automation' } } }) self.hass.states.set(entity_id, test_state) self.hass.bus.fire('test_event') self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) self.hass.states.set(entity_id, test_state - 1) self.hass.bus.fire('test_event') self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) self.hass.states.set(entity_id, test_state + 1) self.hass.bus.fire('test_event') self.hass.pool.block_till_done() self.assertEqual(2, len(self.calls))
# Developed by Aditya Atluri # Date: 18 Jan 2014 # Mail: pyurutu@gmail.com # This file contains the OpenCL implementation of the Python Code # It converts Python Code to OpenCL code # Modified: 15 Jun 2014 import inspect,shlex import numpy as np import execl import threads, blocks, declare, grammar import device class cl_test: arguments = [] arg_nam = [""] returns = [] var_nam = [] var_val = [] kernel_final = [] kernel_args = "" kernel = "/*This is callable OpenCL kernel code generated by Urutu*/\n" device = "/*This is OpenCL device code generated by Urutu*/\n" threads = [1, 1, 1] blocks = [1, 1, 1] threads_dec = [False, False, False] blocks_dec = [False, False, False] global_func = [] code = "" args = [] type_args = [] type_vars = [] words = [] sentences = [] __global = [] __shared = [] __register = [] __constant = [] tabs = 0 count_def = 0 num_return = 0 count_return = 0 is_device_code = False device_py = [[]] device_tab = 0 device_body_buff = "" device_func_name = [] device_var_nam = [[]] device_type_vars = [[]] device_scope = False device_sentences = [[]] device_threads_dec = [False,False,False] device_blocks_dec = [False, False, False] return_kernel = False modules = [] arg_mask = [] def __init__(self, fn, args): stri = inspect.getsource(fn) sh = shlex.shlex(stri) self.code = stri self.args = args if type(args[0]) is list and type(args[1]) is not list: self.threads = args[0] self.args = args[1:] if type(args[0]) is list and type(args[1]) is list: self.threads = args[0] self.blocks = args[1] self.args = args[2:] if args[0] == True: self.return_kernel = True self.args = args[1:] self.typeargs() def decarrays(self, phrase): # print phrase if phrase[0] == '__global' and phrase[1] == 'is': phrase.pop(0) phrase.pop(0) for word in phrase: if word != ',': self.__global.append(word) # print self.__global if phrase[0] == '__shared' and phrase[1] == 'is': phrase.pop(0) phrase.pop(0) for word in phrase: if word != ',': self.__shared.append(word) # print self.__shared if phrase[0] == '__register' and phrase[1] == 'is': phrase.pop(0) phrase.pop(0) for word in phrase: if word != ',': self.__register.append(word) # print self.__register if phrase[0] == '__constant' and phrase[1] == 'is': phrase.pop(0) phrase.pop(0) for word in phrase: if word != ',': self.__constant.append(word) # print self.__constant def typeargs(self): id = 0 self.args = list(self.args) for arg in self.args: try: j = str(type(arg[0])).split("'") except: j = str(type(arg)).split("'") if 'numpy' in j[1]: j = j[1].split(".") self.type_args.append(j[1]+"*") self.type_vars.append(j[1]+"*") self.arg_mask.append(True) else: arg_type = type(self.args[id]) if type(1) == arg_type: self.args[id] = np.int32(self.args[id]) if type(1.0) == arg_type: self.args[id] = np.float32(self.args[id]) if type(long(1)) == arg_type: self.args[id] = np.int64(self.args[id]) self.type_args.append(j[0]) self.type_vars.append(j[0]) self.arg_mask.append(False) id = id + 1 def funcname_cl(self, control): self.func_name = self.keys[control + 1] self.kernel = self.kernel + "__kernel void CL_kernel(" return control + 2 def semi_colon(self, phrase): self.kernel = self.kernel + phrase + ";\n" def declare_workitems(self,keys,kernel): # The keys are strings # print "\n\nDEC_WORKITEMS", keys, self.device_threads_dec, self.device_blocks_dec if keys.find('tx') > -1: kernel, self.device_threads_dec[0] = threads.tx(self.device_threads_dec[0], kernel) if keys.find('ty') > -1: kernel, self.device_threads_dec[1] = threads.ty(self.device_threads_dec[1], kernel) if keys.find('tz') > -1: kernel, self.device_threads_dec[2] = threads.tz(self.device_threads_dec[2], kernel) if keys.find('bx') > -1: kernel, self.device_blocks_dec[0] = blocks.bx(self.device_blocks_dec[0], kernel) if keys.find('bx') > -1: kernel, self.device_blocks_dec[1] = blocks.by(self.device_blocks_dec[1], kernel) if keys.find('bz') > -1: kernel, self.device_blocks_dec[2] = blocks.bz(self.device_blocks_dec[2], kernel) return kernel def inspect_it(self,sentence,kernel): # print "Inside inspect_it()",sentence,kernel phrase = sentence.split('\t') if phrase.count('#') > 0: return tab = phrase.count('') ## if tab > self.tabs and tab != len(phrase): # for j in range(tab - self.tabs): # kernel = kernel + "{\n" if tab < self.tabs: for j in range(self.tabs - tab): kernel = kernel + "}\n" self.tabs = phrase.count('') sh = shlex.shlex(phrase[-1]) i = sh.get_token() if i == '@' or i == 'def' or i == '' or i == '#' or i == '//' or i == '"""': return kernel stmt = [] while i is not sh.eof: stmt.append(i) i = sh.get_token() # print stmt for j in self.device_func_name: if stmt.count(j) > 0: kernel += self.device_create_func(self.device_func_name.index(j),j, stmt) kernel = self.device_body_buff + "}\n" + kernel self.device_body_buff = "" return kernel if self.keys.count('tx') > 0 or self.keys.count("__shared"): kernel, self.threads_dec[0] = threads.tx(self.threads_dec[0], kernel) if self.keys.count('ty') > 0: kernel, self.threads_dec[1] = threads.ty(self.threads_dec[1], kernel) if self.keys.count('tz') > 0: kernel, self.threads_dec[2] = threads.tz(self.threads_dec[2], kernel) if self.keys.count('bx') > 0: kernel, self.blocks_dec[0] = blocks.bx(self.blocks_dec[0], kernel) if self.keys.count('bx') > 0: kernel, self.blocks_dec[1] = blocks.by(self.blocks_dec[1], kernel) if self.keys.count('bz') > 0: kernel, self.blocks_dec[2] = blocks.bz(self.blocks_dec[2], kernel) if stmt.count('Tx') == 1 or stmt.count('Ty') == 1 or stmt.count('Tz') == 1: threads.threads_decl(stmt, self.var_nam, self.var_val, self.threads, self.type_vars) return kernel if stmt.count('Bx') == 1 or stmt.count('By') == 1 or stmt.count('Bz') == 1: blocks.blocks_decl(stmt, self.var_nam, self.var_val, self.blocks) return kernel if stmt[0] == '__global' or stmt[0] == '__shared' or stmt[0] == '__register' or stmt[0] == '__constant' : self.decarrays(stmt) return kernel if stmt.count('for') > 0: kernel += self._for(stmt, kernel) return kernel if stmt.count('if') > 0: return kernel + grammar.keyword(stmt, kernel) if stmt.count('else') > 0: kernel = kernel + "else{\n " self.tabs+=1 return kernel if stmt.count('Urmod') > 0: self.modules.append(stmt[2:]) self.kernel_final.append(kernel+"}") kernel = "__kernel void" + self.global_func + "_" + str(len(self.modules)+1) + "(" + self.kernel_args + "){\n" kernel = "#pragma OPENCL EXTENSION cl_khr_fp64 : enable\n" + kernel self.threads_dec = [False, False, False] self.blocks_dec = [False, False, False] self.kernel = kernel return kernel else: # print "Entering Checkvars" return self.checkvars(stmt,phrase[-1],kernel) # print stmt, self.tabs def Urmod(self,stmt,cl_args): id = stmt.index('(') function = stmt[id-1] module = stmt[id-3] string_args = [""] var = "" for i in stmt[id+1:-1]: if i is not ',': if self.arg_nam.count(i) > 0: string_args[-1] = "args[" + str(self.arg_nam.index(i)) + "]" elif i == "size": size = self.args[self.arg_nam.index(stmt[stmt.index(i)-2])].size string_args[-1] = str(size) else: string_args[-1] += str(i) else: string_args.append("") module.execute(module,function,string_args,cu_args) def device_create_func(self,index,name,stmt): # print "Inside DCF",name, stmt, self.device_py device_keys = self.device_py[index] self.device_scope = True # print "Device Keys",device_keys if stmt[stmt.index("(")+1] == "[" and stmt[stmt.index("(")+3] == "]": print "Dynamic Parallelism" else: self.device_funcname(stmt[:],device_keys[device_keys.index('(')+1:device_keys.index(')')],True) # print self.device_body_buff # print "Inititiate threads" # print stmt index = self.device_func_name.index(name) for i in self.device_sentences[index]: self.device_body_buff = self.declare_workitems(i,self.device_body_buff) # print "Inside CREATING DEVICE BODY" for i in self.device_sentences[index]: self.device_body_buff = self.inspect_it(i,self.device_body_buff) # print self.device_body_buff self.device_scope = False self.kernel = self.device_body_buff + self.kernel self.device_threads_dec = [False, False, False] self.device_blocks_dec = [False, False, False] return self.stringize(stmt) + "; \n" def device_funcname(self,stmt,args,device): # print "Inside device_funcname: ", stmt while ',' in args: args.remove(',') if device == True: self.device_body_buff = "" else: self.device_body_buff = "__global__ " index = stmt.index("(") tmp = " " + str(stmt[index-1]) + "(" if self.device_func_name.count(stmt[0]) == 1: self.device_body_buff += "void " elif stmt[1] == "[": self.device_body_buff += self.type_vars[self.var_nam.index(stmt[0])][:-1] else: self.device_body_buff += self.type_vars[self.var_nam.index(stmt[0])] self.device_body_buff += tmp # print stmt,index idx = 0 stmt[index], stmt[-1] = ',',',' l=stmt.remove(stmt[0]) # print args for j in range(len(stmt[index:])): i = stmt[j+index] if i is not ",": # print i if self.var_nam.count(i) == 1: if stmt[stmt.index(i)+1] == '[': type_var = self.type_vars[self.var_nam.index(i)][:-1] self.device_body_buff += type_var + " " + args[idx] + ", " self.device_var_nam[-1].append(args[idx]) self.device_type_vars[-1].append(type_var) else: type_var = self.type_vars[self.var_nam.index(i)] if self.__shared.count(i) == 1: self.device_body_buff += "__local " elif self.__register.count(i) == 1: self.device_body_buff += "__private " elif self.__constant.count(i) == 1: self.device_body_buff += "__constant " else: self.device_body_buff += "__global " self.device_body_buff += type_var + " " + args[idx] + ", " self.device_var_nam[-1].append(args[idx]) self.device_type_vars[-1].append(type_var) else: if stmt[j+1+index] is '.': self.device_body_buff += "float " + args[idx] + ", " j+=2 self.device_var_nam[-1].append(args[idx]) self.device_type_vars[-1].append("float") elif type(stmt[j]) is int or stmt[j] is '-': self.device_body_buff += "int " + args[idx] + ", " self.device_var_nam[-1].append(args[idx]) self.device_type_vars[-1].append("int") else: idx += 1 dec_threads = True self.device_body_buff = self.device_body_buff[:-2] self.device_body_buff +="){\n" # print "DBB",self.device_body_buff def checkchars(self, var): return False # convert the list into string def stringize(self, stmt): phrase = '' for i in stmt: phrase = phrase + str(i) return phrase # Checking the type of variable to be created def checktype(self,var,val): # print var, val if val.count('.') == 1: return 'float ', self.stringize(var[:]) , '', self.stringize(val[:]) try: int(self.stringize(val)) return 'int ', self.stringize(var[:]), '', self.stringize(val[:]) except: if self.stringize(val).find('"') != -1: return 'char ', self.stringize(var[:]), '[]', self.stringize(val[:]) elif self.stringize(val).find("'") != -1: val = str(val[0]).split("'") quote = ['"'] quote.append(val[1]) quote.append('"') return 'char ' , self.stringize(var[:]), '[]', self.stringize(quote) else: return '','',self.stringize(var[:]), self.stringize(val[:]) # a = 10 type variables are declared here! def decvars(self,stmt,phrase,kernel): # print "Inside Dec vars",kernel,phrase,stmt if kernel[-2] == '}': kernel = kernel[:-2] # kernel += "\n" if stmt.count('return') == 1: kernel += phrase+";\n" # print "Adding return",kernel return kernel elif stmt.count('=') == 0: return kernel else: ideq = stmt.index('=') commavarid = [-1] commavalid = [ideq] tmp = stmt for k in tmp: if k == ',' and tmp.index(k) < ideq: commavarid.append(tmp.index(k)) tmp[tmp.index(k)] = '' if k == ',' and tmp.index(k) > ideq: commavalid.append(tmp.index(k)) tmp[tmp.index(k)] = '' commacount = len(commavarid) commavalid.append(len(tmp)) commavarid.append(ideq) for i in range(commacount): if self.var_nam.count(i) == 0 and stmt.index('=') > i: ret_checktype = self.checktype(stmt[commavarid[i]+1:commavarid[i+1]],stmt[commavalid[i]+1:commavalid[i+1]]) # print "ret_checktype",ret_checktype kernel += ret_checktype[0] + ret_checktype[1] + ret_checktype[2] + " = " + ret_checktype[3] + ";\n" # print "Exiting decvars",kernel return kernel # CHECKVARS here!! def checkvars(self,stmt,phrase,kernel): # print "Inside Check Vars",phrase, stmt, kernel if self.__shared.count(stmt[0]) == 1 and self.var_nam.count(stmt[0]) == 0: kernel, self.var_nam, self.type_vars = declare.decshared(stmt, self.type_vars, self.var_nam, self.args, kernel) return kernel elif self.__global.count(stmt[0]) == 1 and self.var_nam.count(stmt[0]) == 0: kernel, self.var_nam, self.type_vars = declare.decglobal(stmt, self.type_vars, self.var_nam, self.args, kernel) return kernel elif self.__register.count(stmt[0]) == 1 and self.var_nam.count(stmt[0]) == 0: kernel, self.var_nam, self.type_vars = declare.decregister(stmt, self.type_vars, self.var_nam, self.args, kernel) return kernel elif self.__constant.count(stmt[0]) == 1 and self.var_nam.count(stmt[0]) == 0: kernel, self.var_nam, self.type_vars = declare.decconstant(stmt, self.type_vars, self.var_nam, self.args, kernel) return kernel else: kernel = self.decvars(stmt,phrase,kernel) return kernel # body (self) here! def body(self): for sentence in self.sentences: if sentence.split('\t')>1: phrase = sentence.split('\t') tabs = phrase.count('') # print "Inside Body",phrase, tabs sh = shlex.shlex(phrase[-1]) i = sh.get_token() # print i if i == "def": # print "In DEF" # print self.device_py self.is_device_code = True self.device_tab = tabs self.device_py.append([i]) if self.device_py[0] == []: self.device_py.pop(0) i = sh.get_token() self.device_func_name.append(i) # print "DEC",self.device_func_name, self.device_py while i is not sh.eof: self.device_py[-1].append(i) i = sh.get_token() self.device_sentences.append([]) elif self.device_tab < tabs and self.is_device_code == True: for j in phrase[tabs:]: self.device_sentences[-1].append(j) if self.device_sentences[0] == []: self.device_sentences.pop(0) # print "Body!!", self.device_py, self.device_sentences else: self.kernel = self.inspect_it(sentence,self.kernel) return def defargs(self,comma,control,kernel): if self.arguments.count(self.keys[control]) < 2: self.arguments.append(self.keys[control]) kernel_arg = "" if comma == True: if "int64*" == self.type_args[len(self.arguments) - 1]: kernel_arg = ", __global long* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "long*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "int32*" == self.type_args[len(self.arguments) - 1]: kernel_arg = ", __global int* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "int*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "float32*" == self.type_args[len(self.arguments) - 1]: kernel_arg = ", __global float* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "float*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "float64*" == self.type_args[len(self.arguments) - 1]: kernel_arg = ", __global double* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "double*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "int" == self.type_args[len(self.arguments) - 1]: kernel_arg = ", int " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "int" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "float" == self.type_args[len(self.arguments) - 1]: kernel_arg = ", float " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "float" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif comma == False: if "int64*" == self.type_args[len(self.arguments) - 1]: kernel_arg = " __global long* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "long*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "int32*" == self.type_args[len(self.arguments) - 1]: kernel_arg = " __global int* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "int*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "float32*" == self.type_args[len(self.arguments) - 1]: kernel_arg = " __global float* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "float*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "float64*" == self.type_args[len(self.arguments) - 1]: kernel_arg = " __global double* " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "double*" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "int" == self.type_args[len(self.arguments) - 1]: kernel_arg = " int " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "int" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") elif "float" == self.type_args[len(self.arguments) - 1]: kernel_arg = " float " + self.keys[control] self.type_vars[len(self.arguments) - 1] = "float" self.arg_nam[-1] = self.keys[control] self.arg_nam.append("") self.kernel_args += kernel_arg kernel += kernel_arg self.var_nam.append(self.keys[control]) return kernel def _for(self, words, kernel): iterator = words[1] if words.count('range') > 0: if words.count(',') == 0: ind = words.index('(') if words[ind+1] == "len": str_for = "for(int " + str(iterator) var_for = words[ind+3] str_for += " = 0; " + str(iterator) + " < sizeof(" + str(var_for) + ")/sizeof(" + str(var_for) + "[0])" if words[words[ind:].index(')')+ind+1] != ")": ind_closed = words[ind+1:].index(")")+ind+2 print words[ind_closed] ind_total_close = words[ind+1:].index(")")+ind_closed-1 print words[ind_total_close] for i in range(ind_closed, ind_total_close): str_for += str(words[i]) str_for += "; " + str(iterator) + "++){\n" type_var_for = str(self.type_vars[self.var_nam.index(var_for)][:-1]) self.var_nam.append(str(iterator)) self.type_vars.append(type_var_for) else: str_for = "for(int " + str(iterator) var_for = words[ind+3] str_for += " = 0; " + str(iterator) + " < " range_id = words.index("range") end_id = words.index(":") for i in range(range_id+1,end_id): str_for += str(words[i]) str_for += "; " + str(iterator) + "++){\n" # type_var_for = str(self.type_vars[self.var_nam.index(var_for)][:-1]) # self.var_nam.append(str(iterator)) # self.type_vars.append(type_var_for) # if words.count(',') else: str_for = "for(int _" + str(iterator) var_for = words[words.index('in')+1] str_for += " = 0; " + "_"+str(iterator) + " < sizeof(" + str(var_for) + ")/sizeof(" + str(var_for) + "[0]); _" + str(iterator) + "++){\n" type_var_for = str(self.type_vars[self.var_nam.index(var_for)][:-1]) str_for += type_var_for + " " + str(iterator) + " = " + str(var_for) + "[_" + str(iterator) + "];\n" self.var_nam.append(str(iterator)) self.type_vars.append(type_var_for) # print self.type_vars, self.var_nam self.tabs += 1 return str_for def execute(self): tmp = execl.cl_exe() sh = shlex.shlex(self.code) i = sh.get_token() self.keys = [i] while i is not sh.eof: i = sh.get_token() self.keys.append(i) self.num_return = self.keys.count('return') control = self.keys.index('def') control = self.funcname_cl(control) comma = False if self.keys[control] == '(': control = control + 1 while self.keys[control] != ')': if self.keys[control] == ',': control = control + 1 self.kernel = self.defargs(comma, control, self.kernel) comma = True control = control + 1 ret = len(self.keys) - self.keys[::-1].index('return') while self.keys[ret] != '': if self.keys[ret] == ',': ret = ret + 1 self.returns.append(self.keys[ret]) ret = ret + 1 self.kernel = self.kernel + "){\n" control = control + 1 if self.keys[control] == ':': control = control + 1 self.sentences = self.code.split("\n") self.sentences.remove(self.sentences[1]) self.sentences.remove(self.sentences[-2]) # print self.kernel, "Entering body()" self.body() self.kernel = "#pragma OPENCL EXTENSION cl_khr_fp64 : enable\n"+self.kernel + "}" self.arg_nam.pop(-1) self.kernel_final.append(self.kernel) # self.print_cl() # print self.var_nam, self.type_vars, self.__shared, self.__global # print self.kernel if self.return_kernel == False: tmp.start(self.args,self.arg_nam, self.arg_mask) tmp.exe_cl(self.kernel_final[0], self.func_name, self.threads, self.blocks) return tmp.get_returns(self.returns) elif self.return_kernel == True: return self.kernel_final def print_cl(self): print "In print_cl:" print self.type_args print self.arguments print self.returns print self.var_nam print self.var_val print self.kernel print self.threads print self.blocks print self.func_name print self.code print self.words print self.sentences
# Copyright (c) 2018, Henrique Miranda # All rights reserved. # # This file is part of the yambopy project # from yambopy import * from math import sqrt from time import time from yambopy.tools.string import marquee from yambopy.tools.funcs import abs2, lorentzian, gaussian class YamboDipolesDB(): """ Class to read the dipoles databases from the ``ndb.dip*`` files Can be used to for exapmle plot the imaginary part of the dielectric function which corresponds to the optical absorption """ def __init__(self,lattice,save='SAVE',filename='ndb.dip_iR_and_P',dip_type='iR',field_dir=[1,0,0],field_dir3=[0,0,1]): self.lattice = lattice self.filename = "%s/%s"%(save,filename) #read dipoles try: database = Dataset(self.filename, 'r') except: raise IOError("Error opening %s in YamboDipolesDB"%self.filename) self.nq_ibz, self.nq_bz, self.nk_ibz, self.nk_bz = database.variables['HEAD_R_LATT'][:].astype(int) self.spin = database.variables['SPIN_VARS'][1].astype(int) # indexv is the maximum partially occupied band # indexc is the minimum partially empty band self.min_band, self.max_band, self.indexv, self.indexc = database.variables['PARS'][:4].astype(int) database.close() # determine the number of bands self.nbands = self.max_band-self.min_band+1 self.nbandsv = self.indexv-self.min_band+1 self.nbandsc = self.max_band-self.indexc+1 #read the database self.dipoles = self.readDB(dip_type) #expand the dipoles to the full brillouin zone self.expandDipoles(self.dipoles) def normalize(self,electrons): """ Use the electrons to normalize the dipole matrix elements """ eiv = electrons.eigenvalues nkpoints, nbands = eiv.shape for nk in range(nkpoints): eivk = eiv[nk] #create eigenvalues differences arrays norm = np.array([ [ec-ev for ev in eivk] for ec in eivk ]) #normalize for i,j in product(list(range(nbands)),repeat=2): if norm[i,j] == 0: self.dipoles[nk,:,i,j] = 0 else: self.dipoles[nk,:,i,j] = self.dipoles[nk,:,i,j]/norm[i,j] dipoles = self.dipoles def readDB(self,dip_type): """ The dipole matrix has the following indexes: [nkpoints, cartesian directions, nspin, nbands conduction, nbands valence] """ #check if output is in the old format fragmentname = "%s_fragment_1"%(self.filename) if os.path.isfile(fragmentname): return self.readDB_oldformat(dip_type) self.dip_type = dip_type dipoles = np.zeros([self.nk_ibz,3,self.nbandsc,self.nbandsv],dtype=np.complex64) database = Dataset(self.filename) dip = np.squeeze(database.variables['DIP_%s'%(dip_type)]) dip = (dip[:,:,:,:,0]+1j*dip[:,:,:,:,1]) # Read as nk,nv,nc,ir dipoles = np.swapaxes(dip,1,3) # Swap indices as mentioned in the docstring database.close() return dipoles def readDB_oldformat(self,dip_type): """ Legacy function for compatibility The dipole matrix has the following indexes: [nkpoints, cartesian directions, nspin, nbands conduction, nbands valence] """ self.dip_type = dip_type dipoles = np.zeros([self.nk_ibz,3,self.nbandsc,self.nbandsv],dtype=np.complex64) #check dipole db format filename = "%s_fragment_1"%(self.filename) database = Dataset(filename) tag1 = 'DIP_iR_k_0001_spin_0001' tag2 = 'DIP_iR_k_0001_xyz_0001_spin_0001' if tag1 in list(database.variables.keys()): dipoles_format = 1 elif tag2 in list(database.variables.keys()): dipoles_format = 2 database.close() for nk in range(self.nk_ibz): #open database for each k-point filename = "%s_fragment_%d"%(self.filename,nk+1) database = Dataset(filename) if dipoles_format == 1: dip = database.variables['DIP_%s_k_%04d_spin_%04d'%(dip_type,nk+1,1)] dip = (dip[:,:,:,0]+1j*dip[:,:,:,1]) for i in range(3): dipoles[nk,i] = dip[:,:,i].T elif dipoles_format == 2: for i in range(3): dip = database.variables['DIP_%s_k_%04d_xyz_%04d_spin_%04d'%(dip_type,nk+1,i+1,1)][:] dipoles[nk,i] = dip[0].T+dip[1].T*1j #close database database.close() return dipoles def expandDipoles(self,dipoles=None,field_dir=[1,0,0],field_dir3=[0,0,1]): """ Expand diples from the IBZ to the FBZ """ if dipoles is None: dipoles = self.dipoles #check if we need to expand the dipoles to the full BZ lattice = self.lattice kpts = lattice.car_kpoints nks = lattice.kpoints_indexes nss = lattice.symmetry_indexes #normalize the fields field_dir = np.array(field_dir) field_dir = field_dir/np.linalg.norm(field_dir) field_dir3 = np.array(field_dir3) field_dir3 = field_dir3/np.linalg.norm(field_dir3) #calculate polarization directions field_dirx = field_dir field_diry = np.cross(field_dir3,field_dirx) field_dirz = field_dir3 #get band indexes nkpoints = len(nks) indexv = self.min_band-1 indexc = self.indexc-1 nbands = self.min_band+self.nbands-1 #Note that P is Hermitian and iR anti-hermitian. # [FP] Other possible dipole options (i.e., velocity gauge) to be checked. Treat them as not supported. if self.dip_type == 'P': factor = 1.0 else: factor = -1.0 #save dipoles in the ibz self.dipoles_ibz = dipoles #get dipoles in the full Brilouin zone self.dipoles = np.zeros([nkpoints,3,nbands,nbands],dtype=np.complex64) for nk_fbz,nk_ibz,ns in zip(list(range(nkpoints)),nks,nss): #if time rev we conjugate if lattice.time_rev_list[ns]: dip = np.conjugate(dipoles[nk_ibz,:,:,:]) else: dip = dipoles[nk_ibz,:,:,:] #get symmmetry operation sym = lattice.sym_car[ns].T #get projection operation pro = np.array([field_dirx,field_diry,field_dirz]) #transformation tra = np.dot(pro,sym) for c,v in product(list(range(self.nbandsc)),list(range(self.nbandsv))): #rotate dipoles self.dipoles[nk_fbz,:,indexc+c,indexv+v] = np.dot(tra,dip[:,c,v]) #make hermitian for c,v in product(list(range(self.nbandsc)),list(range(self.nbandsv))): self.dipoles[nk_fbz,:,indexv+v,indexc+c] = factor*np.conjugate(self.dipoles[nk_fbz,:,indexc+c,indexv+v]) self.field_dirx = field_dirx self.field_diry = field_diry self.field_dirz = field_dirz return dipoles, kpts def plot(self,ax,kpoint=0,dir=0,func=abs2): return ax.matshow(func(self.dipoles[kpoint,dir])) def ip_eps2(self,electrons,pol=1,ntot_dip=-1,GWshift=0.,broad=0.1,broadtype='l',nbnds=[-1,-1],emin=0.,emax=10.,esteps=500): """ Compute independent-particle absorption (by Fulvio Paleari) electrons -> electrons YamboElectronsDB GWshift -> rigid GW shift in eV broad -> broadening of peaks broadtype -> 'l' is lorentzian, 'g' is gaussian nbnds -> number of [valence, conduction] bands included starting from Fermi level. Default means all are included emin,emax,esteps -> frequency range for the plot """ #get eigenvalues and weights of electrons eiv = electrons.eigenvalues print(eiv.shape) weights = electrons.weights nv = electrons.nbandsv nc = electrons.nbandsc #get dipoles dipoles = self.dipoles #get frequencies and im freq = np.linspace(emin,emax,esteps) eps2 = np.zeros([len(freq)]) #Cut bands to the maximum number used for the dipoles if ntot_dip>0: eiv = eiv[:,:ntot_dip] nc=ntot_dip-nv #Print band gap values and apply GW_shift electrons.energy_gaps(GWshift) #Check bands to include in the calculation if nbnds[0]<0: nbnds[0]=nv if nbnds[1]<0: nbnds[1]=nc iv = nv-nbnds[0] #first valence lc = nv+nbnds[1] #last conduction #choose broadening if "l" in broadtype: broadening = lorentzian else: broadening = gaussian na = np.newaxis #calculate epsilon for c,v in product(list(range(nv,lc)),list(range(iv,nv))): #get electron-hole energy and dipoles ecv = eiv[:,c]-eiv[:,v] dip2 = abs2(dipoles[:,pol,c-nv,v]) #make dimensions match dip2a = dip2[na,:] ecva = ecv[na,:] freqa = freq[:,na] wa = weights[na,:] #calculate the lorentzians broadw = broadening(freqa,ecva,broad) #scale broadening with dipoles and weights epsk = wa*dip2a*broadw #integrate over kpoints eps2 += np.sum(epsk,axis=1) return freq, eps2 def __str__(self): lines = []; app = lines.append app(marquee(self.__class__.__name__)) app("kpoints:") app("nk_ibz : %d"%self.nk_ibz) app("nk_bz : %d"%self.nk_bz) app("bands:") app("nbands : %d" % self.nbands) app("nbandsv: %d" % self.nbandsv) app("nbandsc: %d" % self.nbandsc) app("indexv : %d" % (self.min_band-1)) app("indexc : %d" % (self.indexc-1)) app("field_dirx: %10.6lf %10.6lf %10.6lf"%tuple(self.field_dirx)) app("field_diry: %10.6lf %10.6lf %10.6lf"%tuple(self.field_diry)) app("field_dirz: %10.6lf %10.6lf %10.6lf"%tuple(self.field_dirz)) return "\n".join(lines) if __name__ == "__main__": ddb = DipolesDB() ddb.get_databases() print(ddb)
""" Script that computes and displays audio recording statistics. The statistics computed for a recording include an *ampligram*. An ampligram is an image whose X axis is time, whose Y axis is waveform amplitude, and in which color indicates the sample count in a particular time/amplitude cell. It is useful for assessing the gain setting of a recording, since it illustrates how much of the available amplitude dynamic range the recording uses, and how often there is clipping in the recording. An ampligram can be *one-sided* or *two-sided*. A one-sided ampligram plots counts of absolute sample values, and its Y axis ranges from zero (the minimum absolute sample value) to one (the maximum absolute sample value). A two-sided ampligram plots counts of non-absolute sample values, and its Y axis ranges from minus one (the minimum sample value) to one (the maximum sample value). """ from collections import defaultdict from pathlib import Path import math import time from matplotlib import cm from matplotlib.backends.backend_pdf import PdfPages from matplotlib.colors import LogNorm import matplotlib.pyplot as plt import numpy as np from vesper.signal.wave_audio_file import WaveAudioFileReader from vesper.util.bunch import Bunch TEST_MODE_ENABLED = False TEST_MODE_INTERVAL_COUNT_LIMIT = 50 # RECORDING_DIR_PATH = \ # Path('/Volumes/Recordings2/Nocturnal Bird Migration/Harold/2020') # # RECORDING_FILE_NAMES = ( # # ('Harold_2020-10-18_23.18.22_Z.wav',), # ('Harold_2020-10-19_23.16.50_Z.wav',), # ('Harold_2020-10-20_23.15.18_Z.wav',), # ('Harold_2020-10-21_23.13.48_Z.wav',), # ('Harold_2020-10-22_23.12.18_Z.wav',), # ('Harold_2020-10-23_23.10.49_Z.wav',), # ('Harold_2020-10-24_23.09.22_Z.wav',), # ('Harold_2020-10-25_23.07.56_Z.wav',), # ('Harold_2020-10-26_23.06.31_Z.wav',), # ('Harold_2020-10-27_23.05.08_Z.wav',), # ('Harold_2020-10-28_23.03.45_Z.wav',), # ('Harold_2020-10-29_23.02.24_Z.wav',), # ('Harold_2020-10-30_23.01.04_Z.wav',), # ('Harold_2020-10-31_22.59.45_Z.wav',), # # # ('Harold2_2020-11-04_22.54.47_Z.wav', # # 'Harold2_2020-11-05_03.34.24_Z.wav', # # 'Harold2_2020-11-05_08.14.00_Z.wav',), # # ) # RECORDING_DIR_PATH = \ # Path('/Volumes/MPG Ranch 2018 Part 2/09/MPG Floodplain') # # RECORDING_FILE_NAMES = ( # ('FLOOD-21C_20180901_194500.wav',), # ('FLOOD-21C_20180902_194400.wav',), # ('FLOOD-21C_20180903_194200.wav',), # ('FLOOD-21C_20180904_194000.wav',), # ('FLOOD-21C_20180905_193800.wav',), # ('FLOOD-21C_20180906_193600.wav',), # ('FLOOD-21C_20180907_193400.wav',), # ('FLOOD-21C_20180908_193200.wav',), # ('FLOOD-21C_20180909_193000.wav',), # ('FLOOD-21C_20180910_192800.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-05-06') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-05-07_00.30.00_Z.wav', # 'usny000_2000-05-07_01.30.01_Z.wav', # 'usny000_2000-05-07_02.30.01_Z.wav', # 'usny000_2000-05-07_03.30.01_Z.wav', # 'usny000_2000-05-07_04.30.01_Z.wav', # 'usny000_2000-05-07_05.30.01_Z.wav', # 'usny000_2000-05-07_06.30.01_Z.wav', # 'usny000_2000-05-07_07.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-05-07') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-05-08_00.30.00_Z.wav', # 'usny000_2000-05-08_01.30.01_Z.wav', # 'usny000_2000-05-08_02.30.01_Z.wav', # 'usny000_2000-05-08_03.30.01_Z.wav', # 'usny000_2000-05-08_04.30.01_Z.wav', # 'usny000_2000-05-08_05.30.01_Z.wav', # 'usny000_2000-05-08_06.30.01_Z.wav', # 'usny000_2000-05-08_07.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-05-08') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-05-09_00.30.00_Z.wav', # 'usny000_2000-05-09_01.30.00_Z.wav', # 'usny000_2000-05-09_02.30.00_Z.wav', # 'usny000_2000-05-09_03.30.00_Z.wav', # 'usny000_2000-05-09_04.30.00_Z.wav', # 'usny000_2000-05-09_05.30.00_Z.wav', # 'usny000_2000-05-09_06.30.00_Z.wav', # 'usny000_2000-05-09_07.30.00_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-05-10') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-05-11_00.30.00_Z.wav', # 'usny000_2000-05-11_01.30.01_Z.wav', # 'usny000_2000-05-11_02.30.01_Z.wav', # 'usny000_2000-05-11_03.30.01_Z.wav', # 'usny000_2000-05-11_04.30.01_Z.wav', # 'usny000_2000-05-11_05.30.01_Z.wav', # 'usny000_2000-05-11_06.30.01_Z.wav', # 'usny000_2000-05-11_07.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-05-11') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-05-12_00.30.00_Z.wav', # 'usny000_2000-05-12_01.30.01_Z.wav', # 'usny000_2000-05-12_02.30.01_Z.wav', # 'usny000_2000-05-12_03.30.01_Z.wav', # 'usny000_2000-05-12_04.30.01_Z.wav', # 'usny000_2000-05-12_05.30.01_Z.wav', # 'usny000_2000-05-12_06.30.01_Z.wav', # 'usny000_2000-05-12_07.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-05-12') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-05-13_00.30.00_Z.wav', # 'usny000_2000-05-13_01.30.01_Z.wav', # 'usny000_2000-05-13_02.30.01_Z.wav', # 'usny000_2000-05-13_03.30.01_Z.wav', # 'usny000_2000-05-13_04.30.01_Z.wav', # 'usny000_2000-05-13_05.30.01_Z.wav', # 'usny000_2000-05-13_06.30.01_Z.wav', # 'usny000_2000-05-13_07.30.01_Z.wav', # 'usny000_2000-05-13_08.30.01_Z.wav', # 'usny000_2000-05-13_09.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-09-22') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-09-23_01.30.00_Z.wav', # 'usny000_2000-09-23_02.30.01_Z.wav', # 'usny000_2000-09-23_03.30.01_Z.wav', # 'usny000_2000-09-23_04.30.01_Z.wav', # 'usny000_2000-09-23_05.30.01_Z.wav', # 'usny000_2000-09-23_06.30.01_Z.wav', # 'usny000_2000-09-23_07.30.01_Z.wav', # 'usny000_2000-09-23_08.30.01_Z.wav', # 'usny000_2000-09-23_09.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-09-23') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-09-24_01.30.00_Z.wav', # 'usny000_2000-09-24_02.30.01_Z.wav', # 'usny000_2000-09-24_03.30.01_Z.wav', # 'usny000_2000-09-24_04.30.01_Z.wav', # 'usny000_2000-09-24_05.30.01_Z.wav', # 'usny000_2000-09-24_06.30.01_Z.wav', # 'usny000_2000-09-24_07.30.01_Z.wav', # 'usny000_2000-09-24_08.30.01_Z.wav', # 'usny000_2000-09-24_09.30.01_Z.wav', # 'usny000_2000-09-24_10.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-09-25') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-09-26_01.30.00_Z.wav', # 'usny000_2000-09-26_02.30.01_Z.wav', # 'usny000_2000-09-26_03.30.01_Z.wav', # 'usny000_2000-09-26_04.30.01_Z.wav', # 'usny000_2000-09-26_05.30.01_Z.wav', # 'usny000_2000-09-26_06.30.01_Z.wav', # 'usny000_2000-09-26_07.30.01_Z.wav', # 'usny000_2000-09-26_08.30.01_Z.wav', # 'usny000_2000-09-26_09.30.01_Z.wav', # 'usny000_2000-09-26_10.30.01_Z.wav',), # ) # RECORDING_DIR_PATH = \ # Path( # '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' # '2000-09-26') # # RECORDING_FILE_NAMES = ( # ('usny000_2000-09-27_01.30.00_Z.wav', # 'usny000_2000-09-27_02.30.01_Z.wav', # 'usny000_2000-09-27_03.30.01_Z.wav', # 'usny000_2000-09-27_04.30.01_Z.wav', # 'usny000_2000-09-27_05.30.01_Z.wav', # 'usny000_2000-09-27_06.30.01_Z.wav', # 'usny000_2000-09-27_07.30.01_Z.wav', # 'usny000_2000-09-27_08.30.01_Z.wav', # 'usny000_2000-09-27_09.30.01_Z.wav', # 'usny000_2000-09-27_10.30.01_Z.wav',), # ) RECORDING_DIR_PATH = \ Path( '/Volumes/Recordings2/Nocturnal Bird Migration/BirdCast 2000/' '2000-09-27') RECORDING_FILE_NAMES = ( ('usny000_2000-09-28_01.30.00_Z.wav', 'usny000_2000-09-28_02.30.01_Z.wav', 'usny000_2000-09-28_03.30.01_Z.wav', 'usny000_2000-09-28_04.30.01_Z.wav', 'usny000_2000-09-28_05.30.01_Z.wav', 'usny000_2000-09-28_06.30.01_Z.wav', 'usny000_2000-09-28_07.30.01_Z.wav', 'usny000_2000-09-28_08.30.01_Z.wav', 'usny000_2000-09-28_09.30.01_Z.wav', 'usny000_2000-09-28_10.30.01_Z.wav',), ) OUTPUT_DIR_PATH = Path('/Users/harold/Desktop/Recording Stats') AMPLIGRAM_PLOT_FILE_NAME_FORMAT = '{} Ampligram.pdf' INTERVAL_DURATION = 60 MAX_ABS_SAMPLE = 32768 AMPLIGRAM_ONE_SIDED = True AMPLIGRAM_BIN_COUNT = 100 AMPLIGRAM_COLORMAP_MIN_VALUE = 1e-7 # set to about fraction for one sample AMPLIGRAM_COLORMAP_MAX_VALUE = 1 AMPLIGRAM_COLORMAP_NAME = 'inferno' AMPLIGRAM_COLORMAP_BAD_COLOR = 'white' PROGRESS_MESSAGE_PERIOD = 100 # TODO: Write statistics to CSV files, one per recording. def main(): start_time = time.time() for file_paths in get_recording_file_paths(): show_recording_interval_stats(file_paths, INTERVAL_DURATION) end_time = time.time() duration = end_time - start_time print(f'Processing took {duration} seconds.') def get_recording_file_paths(): return [ get_recording_file_paths_aux(file_names) for file_names in RECORDING_FILE_NAMES] def get_recording_file_paths_aux(file_names): if TEST_MODE_ENABLED: return [RECORDING_DIR_PATH / file_names[0]] else: return [RECORDING_DIR_PATH / file_name for file_name in file_names] def show_recording_interval_stats(file_paths, interval_duration): stats = get_recording_stats(file_paths, interval_duration) recording_name = get_recording_name(file_paths) plot_ampligram(recording_name, stats) def get_recording_stats(file_paths, interval_duration): file_count = len(file_paths) file_stats = [] for file_num, file_path in enumerate(file_paths): print(f'File "{file_path.name}", {file_num + 1} of {file_count}...') stats = get_file_interval_stats(file_path, interval_duration) file_stats.append(stats) # file_stats[file_num][channel_num][interval_num] is a stat dict file_count = len(file_stats) channel_count = len(file_stats[0]) channel_stats = [defaultdict(list) for _ in range(channel_count)] for file_num in range(file_count): for channel_num in range(channel_count): interval_stats = file_stats[file_num][channel_num] for stat_dict in interval_stats: for name in [ 'max', 'min', 'mean', 'mean_abs', 'std', 'ampligram']: channel_stats[channel_num][name].append(stat_dict[name]) # channel_stats[channel_num] is a defaultdict of stat lists recording_stats = [create_channel_stat_bunch(s) for s in channel_stats] # recording_stats[channel_num] is a stat bunch return recording_stats def get_file_interval_stats(file_path, interval_duration): reader = WaveAudioFileReader(str(file_path)) channel_count = reader.num_channels interval_length = int(interval_duration * reader.sample_rate) interval_count = reader.length // interval_length if TEST_MODE_ENABLED: interval_count = min(interval_count, TEST_MODE_INTERVAL_COUNT_LIMIT) stats = [[] for _ in range(channel_count)] for interval_num in range(interval_count): if interval_num != 0 and interval_num % PROGRESS_MESSAGE_PERIOD == 0: print(f' Interval {interval_num} of {interval_count}...') stat_computers = [ StatComputer(AMPLIGRAM_ONE_SIDED, AMPLIGRAM_BIN_COUNT) for _ in range(channel_count)] frame_num = interval_num * interval_length samples = reader.read(frame_num, interval_length) / MAX_ABS_SAMPLE for channel_num in range(channel_count): computer = stat_computers[channel_num] computer.process_samples(samples[channel_num]) stats[channel_num].append(computer.get_stats()) return stats def create_channel_stat_bunch(stat_dicts): return Bunch( max=np.array(stat_dicts['max']), min=np.array(stat_dicts['min']), mean=np.array(stat_dicts['mean']), mean_abs=np.array(stat_dicts['mean_abs']), std=np.array(stat_dicts['std']), ampligram=np.stack(stat_dicts['ampligram'])) def get_recording_name(file_paths): return file_paths[0].name[:-4] def plot_ampligram(recording_name, recording_stats): pdf_file_path = get_ampligram_file_path(recording_name) with PdfPages(pdf_file_path) as pdf: channel_count = len(recording_stats) figure, axes_list = plt.subplots(channel_count, figsize=(9, 6)) # When there's only one channel, `axes_list` is an `AxesSubplot` # rather than a sequence of them. if channel_count == 1: axes_list = [axes_list] for channel_num, channel_stats in enumerate(recording_stats): print(f'channel {channel_num}:') print(f'max {channel_stats.max}') print(f'min {channel_stats.min}') print(f'mean {channel_stats.mean}') print(f'mean_abs {channel_stats.mean_abs}') print(f'std {channel_stats.std}') gram = channel_stats.ampligram.transpose() start_time = 0 end_time = gram.shape[1] / 60 start_amp, end_amp = StatComputer.get_ampligram_amplitude_range( AMPLIGRAM_ONE_SIDED) extent = (start_time, end_time, start_amp, end_amp) colormap = cm.get_cmap(AMPLIGRAM_COLORMAP_NAME) colormap.set_bad(AMPLIGRAM_COLORMAP_BAD_COLOR) colormap_norm = LogNorm( vmin=AMPLIGRAM_COLORMAP_MIN_VALUE, vmax=AMPLIGRAM_COLORMAP_MAX_VALUE, clip=True) axes = axes_list[channel_num] image = axes.imshow( gram, cmap=colormap, norm=colormap_norm, origin='lower', extent=extent, aspect='auto', interpolation='nearest') axes.set_xlabel('Time (hours)') axes.set_ylabel('Amplitude') axes.label_outer() figure.suptitle(f'{recording_name} Ampligram') colorbar = figure.colorbar(image, ax=axes_list) colorbar.ax.set_ylabel('Fraction of Samples') # This messed up the layout as of 2020-11-06. # plt.tight_layout() pdf.savefig() plt.close() def get_ampligram_file_path(recording_name): file_name = AMPLIGRAM_PLOT_FILE_NAME_FORMAT.format(recording_name) return OUTPUT_DIR_PATH / file_name _DEFAULT_ONE_SIDED_AMPLIGRAM_BIN_COUNT = 100 class StatComputer: @staticmethod def get_ampligram_amplitude_range(ampligram_one_sided): if ampligram_one_sided: return (0, 1) else: return (-1, 1) def __init__(self, ampligram_one_sided=True, ampligram_bin_count=None): self._sample_count = 0 self._max = -1 self._min = 1 self._sum = 0 self._abs_sum = 0 self._squared_sum = 0 self._ampligram_one_sided = ampligram_one_sided self._ampligram_amplitude_range = \ StatComputer.get_ampligram_amplitude_range(ampligram_one_sided) self._ampligram_bin_count = \ self._get_ampligram_bin_count(ampligram_bin_count) self._hist = np.zeros(self._ampligram_bin_count, dtype='int') def _get_ampligram_bin_count(self, ampligram_bin_count): if ampligram_bin_count is None: # bin count not specified if self._ampligram_one_sided: return _DEFAULT_ONE_SIDED_AMPLIGRAM_BIN_COUNT else: return 2 * _DEFAULT_ONE_SIDED_AMPLIGRAM_BIN_COUNT + 1 else: # bin count specified return ampligram_bin_count def process_samples(self, samples): self._sample_count += len(samples) self._max = max(self._max, samples.max()) self._min = min(self._min, samples.min()) self._sum += samples.sum() self._abs_sum += np.abs(samples).sum() self._squared_sum += (samples * samples).sum() self._hist += self._compute_histogram(samples) def _compute_histogram(self, samples): if self._ampligram_one_sided: samples = np.abs(samples) hist, _ = np.histogram( samples, self._ampligram_bin_count, self._ampligram_amplitude_range) return hist def get_stats(self): mean = self._sum / self._sample_count mean_abs = self._abs_sum / self._sample_count std = math.sqrt(self._squared_sum / self._sample_count) ampligram = self._hist / self._sample_count return { 'sample_count': self._sample_count, 'max': self._max, 'min': self._min, 'mean': mean, 'mean_abs': mean_abs, 'std': std, 'ampligram': ampligram } if __name__ == '__main__': main()
""" Information measures based on Mike DeWeese's multivariate mutual information. """ from itertools import product from ..algorithms import BaseAuxVarOptimizer from ..distconst import RVFunctions, insert_rvf from ..helpers import normalize_rvs from ..utils import extended_partition, partitions, unitful __all__ = ( 'deweese_coinformation', 'deweese_total_correlation', 'deweese_dual_total_correlation', 'deweese_caekl_mutual_information', ) def deweese_constructor(mmi): """ Construct a DeWeese-like multivariate mutual information. Parameters ---------- mmi : func A multivariate mutual information. Returns ------- deweese_mmi : func A DeWeese'd form of `mmi`. """ @unitful def deweese(dist, rvs=None, crvs=None, return_opt=False, rv_mode=None): """ Compute the DeWeese form of {name}. Parameters ---------- dist : Distribution The distribution to work with. rvs : iter of iters, None The variables of interest. If None, use all. crvs : iter, None The variables to condition on. If None, none. return_opt : bool Whether to return the distribution containing the variable functions or not. Defaults to False. rv_mode : str, None Specifies how to interpret `rvs` and `crvs`. Valid options are: {{'indices', 'names'}}. If equal to 'indices', then the elements of `crvs` and `rvs` are interpreted as random variable indices. If equal to 'names', the the elements are interpreted as random variable names. If `None`, then the value of `dist._rv_mode` is consulted, which defaults to 'indices'. Returns ------- val : float The value of the DeWeese {name} opt_d : Distribution The distribution with the functions achieving `val`. Only returned if `return_opt` is True. """ rvs, crvs, rv_mode = normalize_rvs(dist, rvs, crvs, rv_mode) dist = dist.coalesce(rvs + [crvs]) new_rvs = [[i + len(rvs) + 1] for i, _ in enumerate(rvs)] new_crvs = [dist.outcome_length() - 1] rvf = RVFunctions(dist) def all_funcs(): """ A generator to construct all possible functions of the variables. Yields ------ d : Distribution A distribution with additional indices corresponding to functions of those variables. """ partss = [partitions({(o[i],) for o in dist.outcomes}) for i, _ in enumerate(rvs)] for parts in product(*partss): d = dist.copy() for i, part in enumerate(parts): new_part = extended_partition(d.outcomes, [i], part, d._outcome_ctor) d = insert_rvf(d, rvf.from_partition(new_part)) yield d possibilities = ((mmi(d, rvs=new_rvs, crvs=new_crvs), d) for d in all_funcs()) opt_val, opt_d = max(possibilities, key=lambda t: t[0]) if return_opt: return opt_val, opt_d else: return opt_val deweese.__doc__ = deweese.__doc__.format(name=mmi.__name__) return deweese class BaseDeWeeseOptimizer(BaseAuxVarOptimizer): """ An optimizer for DeWeese-style multivariate mutual informations. """ construct_initial = BaseAuxVarOptimizer.construct_copy_initial _sign = -1 _shotgun = 5 def __init__(self, dist, rvs=None, crvs=None, deterministic=False, rv_mode=None): """ Initialize the optimizer. Parameters ---------- dist : Distribution The distribution to optimize. rvs : iter of iters The random variables of interest. crvs : iter The random variables to condition on. deterministic : bool Whether the functions to optimize over should be deterministic or not. Defaults to False. rv_mode : str, None Specifies how to interpret `rvs` and `crvs`. Valid options are: {{'indices', 'names'}}. If equal to 'indices', then the elements of `crvs` and `rvs` are interpreted as random variable indices. If equal to 'names', the the elements are interpreted as random variable names. If `None`, then the value of `dist._rv_mode` is consulted, which defaults to 'indices'. """ super().__init__(dist, rvs=rvs, crvs=crvs, rv_mode=rv_mode) self._construct_auxvars([({rv}, size) for rv, size in zip(self._rvs, self._shape)]) if deterministic: self.constraints = [{'type': 'eq', 'fun': self._constraint_deterministic(), }, ] self._default_hops *= 2 @classmethod def functional(cls): """ Construct a functional form of this optimizer. Returns ------- function : func A function which constructs this optimizer and performs the optimization. """ @unitful def function(dist, rvs=None, crvs=None, niter=None, deterministic=False, rv_mode=None): """ Compute the DeWeese {name}. Parameters ---------- dist : Distribution The distribution of interest. rvs : iter of iters, None The random variables of interest. If None, use all. crvs : iter, None The variables to condition on. If None, none. niter : int, None If specified, the number of optimization steps to perform. deterministic : bool Whether the functions to optimize over should be deterministic or not. Defaults to False. rv_mode : str, None Specifies how to interpret `rvs` and `crvs`. Valid options are: {{'indices', 'names'}}. If equal to 'indices', then the elements of `crvs` and `rvs` are interpreted as random variable indices. If equal to 'names', the the elements are interpreted as random variable names. If `None`, then the value of `dist._rv_mode` is consulted, which defaults to 'indices'. Returns ------- val : float The value of the DeWeese {name}. """ opt = cls(dist, rvs=rvs, crvs=crvs, rv_mode=rv_mode, deterministic=deterministic) opt.optimize(niter=niter) return cls._sign * opt.objective(opt._optima) function.__doc__ = function.__doc__.format(name=cls.name) return function class DeWeeseCoInformation(BaseDeWeeseOptimizer): """ The DeWeese Co-Information: I_D[X_0 : ... : X_n | Y] = max_{p(x'_i | x_i)} I[X'_0 : ... : X'_n | Y] """ name = 'coinformation' def _objective(self): """ The conditional co-information. Returns ------- obj : func The objective function. """ coi = self._coinformation(rvs=self._arvs, crvs=self._crvs) def objective(self, x): """ The conditional coinformation. Parameters ---------- x : np.ndarray An optimization vector. Returns ------- obj : float The value of the objective. """ pmf = self.construct_joint(x) return -coi(pmf) return objective deweese_coinformation = DeWeeseCoInformation.functional() class DeWeeseTotalCorrelation(BaseDeWeeseOptimizer): """ The DeWeese Total Correlation: T_D[X_0 : ... : X_n | Y] = max_{p(x'_i | x_i)} T[X'_0 : ... : X'_n | Y] """ name = 'total correlation' def _objective(self): """ The conditional total correlation. Returns ------- obj : func The objective function. """ tc = self._total_correlation(rvs=self._arvs, crvs=self._crvs) def objective(self, x): """ The conditional total correlation. Parameters ---------- x : np.ndarray An optimization vector. Returns ------- obj : float The value of the objective. """ pmf = self.construct_joint(x) return -tc(pmf) return objective deweese_total_correlation = DeWeeseTotalCorrelation.functional() class DeWeeseDualTotalCorrelation(BaseDeWeeseOptimizer): """ The DeWeese Dual Total Correlation: B_D[X_0 : ... : X_n | Y] = max_{p(x'_i | x_i)} B[X'_0 : ... : X'_n | Y] """ name = 'dual total correlation' def _objective(self): """ The conditional dual total correlation. Returns ------- obj : func The objective function. """ dtc = self._dual_total_correlation(rvs=self._arvs, crvs=self._crvs) def objective(self, x): """ The conditional dual total correlation. Parameters ---------- x : np.ndarray An optimization vector. Returns ------- obj : float The value of the objective. """ pmf = self.construct_joint(x) return -dtc(pmf) return objective deweese_dual_total_correlation = DeWeeseDualTotalCorrelation.functional() class DeWeeseCAEKLMutualInformation(BaseDeWeeseOptimizer): """ The DeWeese CAEKL Mutual Information: J_D[X_0 : ... : X_n | Y] = max_{p(x'_i | x_i)} J[X'_0 : ... : X'_n | Y] """ name = 'caekl mutual information' def _objective(self): """ The conditional caekl mutual information. Returns ------- obj : func The objective function. """ caekl = self._caekl_mutual_information(rvs=self._arvs, crvs=self._crvs) def objective(self, x): """ The conditional caekl mutual information. Parameters ---------- x : np.ndarray An optimization vector. Returns ------- obj : float The value of the objective. """ pmf = self.construct_joint(x) return -caekl(pmf) return objective deweese_caekl_mutual_information = DeWeeseCAEKLMutualInformation.functional()
"""The tests for the Demo Media player platform.""" from unittest.mock import patch import pytest import voluptuous as vol import homeassistant.components.media_player as mp from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_ENTITY_PICTURE, ATTR_SUPPORTED_FEATURES, STATE_OFF, STATE_PAUSED, STATE_PLAYING, ) from homeassistant.helpers.aiohttp_client import DATA_CLIENTSESSION from homeassistant.setup import async_setup_component TEST_ENTITY_ID = "media_player.walkman" @pytest.fixture(name="mock_media_seek") def media_player_media_seek_fixture(): """Mock demo YouTube player media seek.""" with patch( "homeassistant.components.demo.media_player.DemoYoutubePlayer.media_seek", autospec=True, ) as seek: yield seek async def test_source_select(hass): """Test the input source service.""" entity_id = "media_player.lounge_room" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "dvd" with pytest.raises(vol.Invalid): await hass.services.async_call( mp.DOMAIN, mp.SERVICE_SELECT_SOURCE, {ATTR_ENTITY_ID: entity_id, mp.ATTR_INPUT_SOURCE: None}, blocking=True, ) state = hass.states.get(entity_id) assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "dvd" await hass.services.async_call( mp.DOMAIN, mp.SERVICE_SELECT_SOURCE, {ATTR_ENTITY_ID: entity_id, mp.ATTR_INPUT_SOURCE: "xbox"}, blocking=True, ) state = hass.states.get(entity_id) assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "xbox" async def test_repeat_set(hass): """Test the repeat set service.""" entity_id = "media_player.walkman" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get(mp.ATTR_MEDIA_REPEAT) == mp.const.REPEAT_MODE_OFF await hass.services.async_call( mp.DOMAIN, mp.SERVICE_REPEAT_SET, {ATTR_ENTITY_ID: entity_id, mp.ATTR_MEDIA_REPEAT: mp.const.REPEAT_MODE_ALL}, blocking=True, ) state = hass.states.get(entity_id) assert state.attributes.get(mp.ATTR_MEDIA_REPEAT) == mp.const.REPEAT_MODE_ALL async def test_clear_playlist(hass): """Test clear playlist.""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PLAYING await hass.services.async_call( mp.DOMAIN, mp.SERVICE_CLEAR_PLAYLIST, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF async def test_volume_services(hass): """Test the volume service.""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 1.0 with pytest.raises(vol.Invalid): await hass.services.async_call( mp.DOMAIN, mp.SERVICE_VOLUME_SET, {ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_LEVEL: None}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 1.0 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_VOLUME_SET, {ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_LEVEL: 0.5}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.5 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_VOLUME_DOWN, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.4 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_VOLUME_UP, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.5 assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is False with pytest.raises(vol.Invalid): await hass.services.async_call( mp.DOMAIN, mp.SERVICE_VOLUME_MUTE, {ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_MUTED: None}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is False await hass.services.async_call( mp.DOMAIN, mp.SERVICE_VOLUME_MUTE, {ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_MUTED: True}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is True async def test_turning_off_and_on(hass): """Test turn_on and turn_off.""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PLAYING await hass.services.async_call( mp.DOMAIN, mp.SERVICE_TURN_OFF, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF assert not mp.is_on(hass, TEST_ENTITY_ID) await hass.services.async_call( mp.DOMAIN, mp.SERVICE_TURN_ON, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PLAYING assert mp.is_on(hass, TEST_ENTITY_ID) await hass.services.async_call( mp.DOMAIN, mp.SERVICE_TOGGLE, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF assert not mp.is_on(hass, TEST_ENTITY_ID) async def test_playing_pausing(hass): """Test media_pause.""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PLAYING await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_PAUSE, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PAUSED await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PLAYING await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PAUSED await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_PLAY, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PLAYING async def test_prev_next_track(hass): """Test media_next_track and media_previous_track .""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 1 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_NEXT_TRACK, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 2 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_NEXT_TRACK, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 3 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_PREVIOUS_TRACK, {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 2 assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() ent_id = "media_player.lounge_room" state = hass.states.get(ent_id) assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 1 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_NEXT_TRACK, {ATTR_ENTITY_ID: ent_id}, blocking=True, ) state = hass.states.get(ent_id) assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 2 await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_PREVIOUS_TRACK, {ATTR_ENTITY_ID: ent_id}, blocking=True, ) state = hass.states.get(ent_id) assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 1 async def test_play_media(hass): """Test play_media .""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() ent_id = "media_player.living_room" state = hass.states.get(ent_id) assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0 assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) is not None with pytest.raises(vol.Invalid): await hass.services.async_call( mp.DOMAIN, mp.SERVICE_PLAY_MEDIA, {ATTR_ENTITY_ID: ent_id, mp.ATTR_MEDIA_CONTENT_ID: "some_id"}, blocking=True, ) state = hass.states.get(ent_id) assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0 assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) != "some_id" await hass.services.async_call( mp.DOMAIN, mp.SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: ent_id, mp.ATTR_MEDIA_CONTENT_TYPE: "youtube", mp.ATTR_MEDIA_CONTENT_ID: "some_id", }, blocking=True, ) state = hass.states.get(ent_id) assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0 assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) == "some_id" async def test_seek(hass, mock_media_seek): """Test seek.""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() ent_id = "media_player.living_room" state = hass.states.get(ent_id) assert state.attributes[ATTR_SUPPORTED_FEATURES] & mp.SUPPORT_SEEK assert not mock_media_seek.called with pytest.raises(vol.Invalid): await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_SEEK, { ATTR_ENTITY_ID: ent_id, mp.ATTR_MEDIA_SEEK_POSITION: None, }, blocking=True, ) assert not mock_media_seek.called await hass.services.async_call( mp.DOMAIN, mp.SERVICE_MEDIA_SEEK, { ATTR_ENTITY_ID: ent_id, mp.ATTR_MEDIA_SEEK_POSITION: 100, }, blocking=True, ) assert mock_media_seek.called async def test_media_image_proxy(hass, hass_client): """Test the media server image proxy server .""" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() fake_picture_data = "test.test" class MockResponse: """Test response.""" def __init__(self): """Test response init.""" self.status = 200 self.headers = {"Content-Type": "sometype"} async def read(self): """Test response read.""" return fake_picture_data.encode("ascii") async def release(self): """Test response release.""" class MockWebsession: """Test websession.""" async def get(self, url): """Test websession get.""" return MockResponse() def detach(self): """Test websession detach.""" hass.data[DATA_CLIENTSESSION] = MockWebsession() state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_PLAYING client = await hass_client() req = await client.get(state.attributes.get(ATTR_ENTITY_PICTURE)) assert req.status == 200 assert await req.text() == fake_picture_data async def test_grouping(hass): """Test the join/unjoin services.""" walkman = "media_player.walkman" kitchen = "media_player.kitchen" assert await async_setup_component( hass, mp.DOMAIN, {"media_player": {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(walkman) assert state.attributes.get(mp.ATTR_GROUP_MEMBERS) == [] await hass.services.async_call( mp.DOMAIN, mp.SERVICE_JOIN, { ATTR_ENTITY_ID: walkman, mp.ATTR_GROUP_MEMBERS: [ kitchen, ], }, blocking=True, ) state = hass.states.get(walkman) assert state.attributes.get(mp.ATTR_GROUP_MEMBERS) == [walkman, kitchen] await hass.services.async_call( mp.DOMAIN, mp.SERVICE_UNJOIN, {ATTR_ENTITY_ID: walkman}, blocking=True, ) state = hass.states.get(walkman) assert state.attributes.get(mp.ATTR_GROUP_MEMBERS) == []
#!/usr/bin/env python import memegrab import re import requests import md5 import os import twitter import datetime import threading import Queue import log import urllib IMAGE_Q = Queue.Queue() def initialize_imgur_checking(): if not os.path.isfile('.bad_imgur.jpg'): obj = requests.get('http://i.imgur.com/sdlfkjdkfh.jpg', stream=True) with open('.bad_imgur.jpg', 'wb') as f: for chunk in obj.iter_content(1024): f.write(chunk) f = open('.bad_imgur.jpg', 'r') text = f.read() f.close() digest = md5.new(text).digest() return digest class Download_Thread(threading.Thread): def __init__(self, queue): threading.Thread.__init__(self) self.queue = queue self.bad_imgur = initialize_imgur_checking() def output(self, text,error=False): log.log(text, thread_name=self.getName(), error=error) def process_link(self, link, filename, time): #headers = {'User-agent': 'subdown2 (https://github.com/legoktm/subdown2)'} #req = requests.get(link, headers=headers) #if not req.ok: # return #text = req.text #if md5.new(text).digest() == self.bad_imgur: # self.output('%s has been removed from imgur.com' %link, error=True) # return #f = open(filename, 'w') #f.write(text) #f.close() #ewwww urllib.urlretrieve(link, filename) os.utime(filename, (time, time)) self.output('Setting time to %s' % time) def run(self): while True: link, filename, time = self.queue.get() self.process_link(link, filename, time) self.queue.task_done() #spawn threads for i in range(10): t = Download_Thread(IMAGE_Q) t.setDaemon(True) t.start() class Downloader: """ Custom downloaders for different websites. All traffic is directed through "Raw" which simply downloads the raw image file. """ def __init__(self, reddit, force): self.help = "Sorry, %s doesn't work yet :(" self.reddit = reddit self.bad_imgur = initialize_imgur_checking() self.force = force self.retry = False self.time = False self.title = False def Raw(self, link): link = link.split('?')[0] old_filename = link.split('/')[-1] extension = old_filename.split('.')[-1] link_hash = md5.new(link).hexdigest() # the hash is used to prevent overwriting multiple submissions with the same filename filename = self.title + '.' + link_hash + '.' + extension if filename == '': return path = self.reddit + '/' + filename if os.path.isfile(path) and (not self.force): os.utime(path, (self.time, self.time)) self.output('Skipping %s since it already exists' % link) return #download the image, so add it to the queue self.output('Adding %s to queue.' % link) IMAGE_Q.put((link, path, self.time)) def Imgur(self, link): if '.' in link.split('/')[-1]: # raw link but no i. prefix self.Raw(link) return #determine whether it is an album or just one image if '/a/' in link: #it's an album! self.output('Processing Imgur album: %s' %(link)) link = link.split('#')[0] i_id = link.split('/a/')[1] api_link = 'http://api.imgur.com/2/album/%s.json' % i_id api = self.page_grab(api_link, json=True) for image in api['album']['images']: self.Raw(image['links']['original']) self.output('Finished Imgur album: %s' % link) else: #it's a raw image i_id = link.split('/')[-1] api = self.page_grab('http://api.imgur.com/2/image/%s.json' % i_id, json=True) self.Raw(api['image']['links']['original']) def Tumblr(self, link): self.output(self.help % link, True) def Twitter(self, link): api = twitter.Api() try: twitter_id = int(link.split('/status/')[-1]) except: self.output('Can\'t parse tweet: %s' % link, True) return stat = api.GetStatus(twitter_id) text = stat.text parsed = text[text.find("http://"):text.find("http://")+21] if len(parsed) == 1: # means it didnt find it parsed = text[text.find("https://"):text.find("https://")+22] did_it_work = len(parsed) != 1 if not did_it_work: raise #expand the url so we can send it through other sets of regular expressions ret = self.page_grab('http://expandurl.appspot.com/expand', data={'url': parsed}, json=True) if ret['status'].lower() == 'ok': final_url = ret['end_url'] else: raise #if 'yfrog.com' in final_url: # self.yfrog(final_url) #else: self.All(final_url) def yfrog(self, link): text = self.page_grab(link) image_url = text[text.find('<div class="label">Direct:&nbsp;&nbsp;<a href="')+47:text.find('" target="_blank"><img src="/images/external.png" alt="Direct"/>')] self.Raw(image_url) def Pagebin(self, link): html = self.page_grab(link) x = re.findall('<img alt="(.*?)" src="(.*?)" style="width: (.*?)px; height: (.*?)px; " />', html) try: iimgur = x[0][1] self.Raw(iimgur) except KeyError: self.output("Can't parse pagebin.com HTML page :(", True) self.output("Report %s a bug please!" %(link), True) def bolt(self, link): html = self.page_grab(link) x = re.findall('<img src="(.*?)"', html) try: imglink = x[0] except IndexError: self.output(link, True) return self.Raw(imglink) def qkme(self, link): self.output('Grabbing %s.' % link) try: memegrab.get_image_qm(memegrab.read_url(link), self.reddit+'/') except: self.output('Error on %s' % link, True) def All(self, link): #verify it is an html page, not a raw image. headers = self.page_grab(link, want_headers=True) for header in headers: header = header.lower() if header.startswith('content-type'): #right header is_image = 'image' in header break if is_image: # means it is most likely an image self.Raw(link) return self.output('Skipping %s since it is not an image.' %(link)) return def setTime(self, time): self.time = time def setTitle(self, title): self.title = title.replace(' ', '_').replace('/', '_') def setThreadInfo(self, name): self.thread_name = name def output(self, text, error = False): log.log(text, thread_name = self.thread_name, error=error) def page_grab(self, link, want_headers=False, data=None,json=False): headers = {'User-agent': 'subdown2 (https://github.com/legoktm/subdown2)'} r = requests.get(link, headers=headers, params=data) if want_headers: return r.headers else: if json: return r.json() return r.text
#!/usr/bin/env python import itertools import math import kivy from kivy.animation import Animation from kivy.app import App from kivy.clock import Clock from kivy.core.window import Window from kivy.config import Config from kivy.lang import Builder from kivy.vector import Vector from kivy.uix.floatlayout import FloatLayout from kivy.uix.label import Label from kivy.uix.widget import Widget from widgets import (AimLine, BlackHole, GoalPoint, MainMenu, Shot, ShotCounter, Stars) from levels import levels kivy.require('1.0.9') # set a limit on the number of shots allowed on screen at once MAX_SHOTS = 20 class FlingBoard(Widget): """ Main application widget, takes all the touches and turns them into fun. """ def __init__(self, *args, **kwargs): super(FlingBoard, self).__init__() Window.clearcolor = (0.1, 0.1, 0.1, 1.) Clock.schedule_interval(self.tick, 1 / 60.) self._keyboard = Window.request_keyboard( None, self) self._keyboard.bind(on_key_down=self._on_keyboard_down) self.aim_line = None self.black_holes = [] self.buttons = [] self.current_level = None self.level_label = None self.goal_points = [] self.shot_counter = None self.shots = [] self.stars = None self.walls = [] self.new_background() # schedule rather than call directly init, so that width and # height are finished initializing Clock.schedule_once(self.display_main_menu) def _on_keyboard_down(self, keyboard, keycode, text, modifiers): try: level_index = int(text) if level_index < len(levels): print "loading level %s..." % (level_index) self.load_level(levels[level_index]) except ValueError: pass def add_aim_line(self, aim_line): self.aim_line = (aim_line) self.add_widget(aim_line) def add_black_hole(self, black_hole): self.black_holes.append(black_hole) self.add_widget(black_hole) def add_goal_point(self, goal_point): self.goal_points.append(goal_point) self.add_widget(goal_point) def add_shot(self, shot): self.shots.append(shot) self.add_widget(shot) self.shot_counter.increment() def add_shot_counter(self, shot_counter): self.shot_counter = shot_counter self.add_widget(shot_counter) def add_wall(self, wall): self.walls.append(wall) self.add_widget(wall) def clear_level(self): if hasattr(self, 'menu') and self.menu: self.menu.clear_widgets() self.black_holes = [] self.goal_points = [] self.shots = [] self.walls = [] self.clear_widgets() if self.stars: self.add_widget(self.stars) def display_level_text(self, level_text): self.level_label = Label( text=level_text, font_size=20, width=self.width, halign='center', y=self.height - 200, color=(.8, .8, .8, 0.)) self.add_widget(self.level_label) anim = Animation(color=(1, 1, 1, 1), duration=2.) + \ Animation(color=(1, 1, 1, 1), duration=.5) + \ Animation(color=(1, 1, 1, 0), duration=2.) anim.start(self.level_label) def display_instructions(self, button): instructions_text = """ for each level, the goal is to shoot an asteroid that touches all the blue dots touch and drag to aim an asteroid, release to shoot it you have a limited number of shots per level, you'll need to restart the level if you run out double tap to screen at any time to bring up the menu""" instructions_label = Label( text=instructions_text, font_size=20, width=self.width * .8, x=self.width * .1, y=self.height - 200, color=(.8, .8, .8, 0.)) self.add_widget(instructions_label) anim = Animation(color=(.8, 1, 1, 1), duration=2.) anim.start(instructions_label) def display_main_menu(self, *args): self.clear_level() layout_width = self.width * .2 layout_x = self.width * .4 layout_y = self.height * .1 self.menu = MainMenu(self, x=layout_x, y=layout_y, width=layout_width, current_level=self.current_level) self.add_widget(self.menu) def end_game(self, *args): end_game_text = """ thanks for playing you've mastered all the levels we've got for now, but check back soon for more levels and updates""" end_game_label = Label( text=end_game_text, font_size=20, width=self.width * .8, x=self.width * .1, y=self.height - 200, color=(1., 1., 1., 0.)) self.add_widget(end_game_label) anim = Animation(color=(.8, 1, 1, 1), duration=2.) anim.start(end_game_label) def load_level(self, level): self.clear_level() self.new_background() level.load(self) level_index = levels.index(level) level_text = "level %s: %s" % (level_index + 1, level.name) self.current_level = level self.display_level_text(level_text) self.add_shot_counter(ShotCounter( max_shots=level.max_shots, x=30, y=15)) def new_background(self): if self.stars: self.remove_widget(self.stars) self.stars = Stars(2000) self.add_widget(self.stars) def next_level(self, *args): next_level_index = levels.index(self.current_level) + 1 if next_level_index < len(levels): self.load_level(levels[next_level_index]) else: Clock.schedule_once(self.end_game, 2.) def on_touch_down(self, touch): if hasattr(self, 'menu') and self.menu.collide_point(*touch.pos): for child in self.menu.children: if child.collide_point(*touch.pos): child.dispatch('on_touch_down', touch) if touch.is_double_tap: self.display_main_menu() if self.current_level and \ len(self.shots) < self.current_level.max_shots: self.add_aim_line(AimLine(start_pt=touch.pos)) def on_touch_move(self, touch): if not self.aim_line: return try: self.aim_line.end_pt = touch.pos except (KeyError), e: pass def on_touch_up(self, touch): if not self.aim_line: return start_v = Vector(self.aim_line.start_pt) end_v = Vector(self.aim_line.end_pt) velocity_v = start_v - end_v l = velocity_v.length() if l == 0.: return velocity_v /= math.sqrt(l) if len(self.shots) > MAX_SHOTS: self.remove_shot(self.shots[0]) if self.current_level and \ len(self.shots) < self.current_level.max_shots: self.add_shot(Shot(velocity=velocity_v, pos=(touch.x, touch.y))) self.remove_aim_line(self.aim_line) def remove_aim_line(self, aim_line): self.remove_widget(aim_line) self.aim_line = None def remove_black_hole(self, black_hole): self.black_holes.remove(black_hole) self.remove_widget(black_hole) def remove_goal_point(self, goal_point): self.goal_points.remove(goal_point) self.remove_widget(goal_point) def remove_shot(self, shot): self.remove_widget(shot) self.shots.remove(shot) def restart_level(self, *args): self.load_level(self.current_level) def start_game(self, button): self.load_level(levels[0]) def tick(self, dt): for shot1, shot2 in itertools.combinations(self.shots, 2): if circles_collide(shot1, shot2): shots_collide(shot1, shot2) shot1.last_bounced = None shot2.last_bounced = None for shot in self.shots: for black_hole in self.black_holes: shot.gravitate_towards(black_hole) if circles_collide(shot, black_hole): self.remove_widget(shot) self.shots.remove(shot) for wall in self.walls: if shot.collide_wall(wall): print "collide" shot.move() for goal_point in self.goal_points: goal_point.move() for shot in self.shots: if circles_collide(goal_point, shot): self.remove_widget(goal_point) self.goal_points.remove(goal_point) if len(self.goal_points) == 0: Clock.schedule_once(self.next_level, 1) continue class FlingyApp(App): def build(self): return FlingBoard() def build_config(self, config): config.setdefaults('graphics', { 'width': '800', 'height': '600' }) def circles_collide(widget_1, widget_2): widget_distance = Vector(widget_1.pos).distance(Vector(widget_2.pos)) radial_distance = widget_1.r + widget_2.r return widget_distance < radial_distance def shots_collide(shot1, shot2): p1_v = Vector(shot1.pos) p2_v = Vector(shot2.pos) shot1.velocity = (p1_v - p2_v).normalize() / Vector(shot1.velocity).length() shot2.velocity = (p2_v - p1_v).normalize() / Vector(shot2.velocity).length() if __name__ == '__main__': FlingyApp().run()
# Copyright Contributors to the Pyro project. # SPDX-License-Identifier: Apache-2.0 # Adapted from pyro.infer.autoguide from abc import ABC, abstractmethod from contextlib import ExitStack from functools import partial import warnings import numpy as np import jax from jax import grad, hessian, lax, random from jax.tree_util import tree_map from numpyro.util import _versiontuple, find_stack_level if _versiontuple(jax.__version__) >= (0, 2, 25): from jax.example_libraries import stax else: from jax.experimental import stax import jax.numpy as jnp import numpyro from numpyro import handlers import numpyro.distributions as dist from numpyro.distributions import constraints from numpyro.distributions.flows import ( BlockNeuralAutoregressiveTransform, InverseAutoregressiveTransform, ) from numpyro.distributions.transforms import ( AffineTransform, ComposeTransform, IndependentTransform, LowerCholeskyAffine, PermuteTransform, UnpackTransform, biject_to, ) from numpyro.distributions.util import ( cholesky_of_inverse, periodic_repeat, sum_rightmost, ) from numpyro.infer.elbo import Trace_ELBO from numpyro.infer.initialization import init_to_median, init_to_uniform from numpyro.infer.util import helpful_support_errors, initialize_model from numpyro.nn.auto_reg_nn import AutoregressiveNN from numpyro.nn.block_neural_arn import BlockNeuralAutoregressiveNN from numpyro.util import not_jax_tracer __all__ = [ "AutoContinuous", "AutoGuide", "AutoDAIS", "AutoDiagonalNormal", "AutoLaplaceApproximation", "AutoLowRankMultivariateNormal", "AutoNormal", "AutoMultivariateNormal", "AutoBNAFNormal", "AutoIAFNormal", "AutoDelta", ] class AutoGuide(ABC): """ Base class for automatic guides. Derived classes must implement the :meth:`__call__` method. :param callable model: a pyro model :param str prefix: a prefix that will be prefixed to all param internal sites :param callable init_loc_fn: A per-site initialization function. See :ref:`init_strategy` section for available functions. :param callable create_plates: An optional function inputing the same ``*args,**kwargs`` as ``model()`` and returning a :class:`numpyro.plate` or iterable of plates. Plates not returned will be created automatically as usual. This is useful for data subsampling. """ def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, create_plates=None ): self.model = model self.prefix = prefix self.init_loc_fn = init_loc_fn self.create_plates = create_plates self.prototype_trace = None self._prototype_frames = {} self._prototype_frame_full_sizes = {} def _create_plates(self, *args, **kwargs): if self.create_plates is None: self.plates = {} else: plates = self.create_plates(*args, **kwargs) if isinstance(plates, numpyro.plate): plates = [plates] assert all( isinstance(p, numpyro.plate) for p in plates ), "create_plates() returned a non-plate" self.plates = {p.name: p for p in plates} for name, frame in sorted(self._prototype_frames.items()): if name not in self.plates: full_size = self._prototype_frame_full_sizes[name] self.plates[name] = numpyro.plate( name, full_size, dim=frame.dim, subsample_size=frame.size ) return self.plates def __getstate__(self): state = self.__dict__.copy() state.pop("plates", None) return state @abstractmethod def __call__(self, *args, **kwargs): """ A guide with the same ``*args, **kwargs`` as the base ``model``. :return: A dict mapping sample site name to sampled value. :rtype: dict """ raise NotImplementedError @abstractmethod def sample_posterior(self, rng_key, params, sample_shape=()): """ Generate samples from the approximate posterior over the latent sites in the model. :param jax.random.PRNGKey rng_key: random key to be used draw samples. :param dict params: Current parameters of model and autoguide. The parameters can be obtained using :meth:`~numpyro.infer.svi.SVI.get_params` method from :class:`~numpyro.infer.svi.SVI`. :param tuple sample_shape: sample shape of each latent site, defaults to (). :return: a dict containing samples drawn the this guide. :rtype: dict """ raise NotImplementedError def _setup_prototype(self, *args, **kwargs): rng_key = numpyro.prng_key() with handlers.block(): ( init_params, self._potential_fn_gen, postprocess_fn_gen, self.prototype_trace, ) = initialize_model( rng_key, self.model, init_strategy=self.init_loc_fn, dynamic_args=True, model_args=args, model_kwargs=kwargs, ) self._potential_fn = self._potential_fn_gen(*args, **kwargs) postprocess_fn = postprocess_fn_gen(*args, **kwargs) # We apply a fixed seed just in case postprocess_fn requires # a random key to generate subsample indices. It does not matter # because we only collect deterministic sites. self._postprocess_fn = handlers.seed(postprocess_fn, rng_seed=0) self._init_locs = init_params[0] self._prototype_frames = {} self._prototype_plate_sizes = {} for name, site in self.prototype_trace.items(): if site["type"] == "sample": if not site["is_observed"] and site["fn"].support.is_discrete: # raise support errors early for discrete sites with helpful_support_errors(site): biject_to(site["fn"].support) for frame in site["cond_indep_stack"]: if frame.name in self._prototype_frames: assert ( frame == self._prototype_frames[frame.name] ), f"The plate {frame.name} has inconsistent dim or size. Please check your model again." else: self._prototype_frames[frame.name] = frame elif site["type"] == "plate": self._prototype_frame_full_sizes[name] = site["args"][0] def median(self, params): """ Returns the posterior median value of each latent variable. :param dict params: A dict containing parameter values. The parameters can be obtained using :meth:`~numpyro.infer.svi.SVI.get_params` method from :class:`~numpyro.infer.svi.SVI`. :return: A dict mapping sample site name to median value. :rtype: dict """ raise NotImplementedError def quantiles(self, params, quantiles): """ Returns posterior quantiles each latent variable. Example:: print(guide.quantiles(params, [0.05, 0.5, 0.95])) :param dict params: A dict containing parameter values. The parameters can be obtained using :meth:`~numpyro.infer.svi.SVI.get_params` method from :class:`~numpyro.infer.svi.SVI`. :param list quantiles: A list of requested quantiles between 0 and 1. :return: A dict mapping sample site name to an array of quantile values. :rtype: dict """ raise NotImplementedError class AutoNormal(AutoGuide): """ This implementation of :class:`AutoGuide` uses Normal distributions to construct a guide over the entire latent space. The guide does not depend on the model's ``*args, **kwargs``. This should be equivalent to :class:`AutoDiagonalNormal` , but with more convenient site names and with better support for mean field ELBO. Usage:: guide = AutoNormal(model) svi = SVI(model, guide, ...) :param callable model: A NumPyro model. :param str prefix: a prefix that will be prefixed to all param internal sites. :param callable init_loc_fn: A per-site initialization function. See :ref:`init_strategy` section for available functions. :param float init_scale: Initial scale for the standard deviation of each (unconstrained transformed) latent variable. :param callable create_plates: An optional function inputing the same ``*args,**kwargs`` as ``model()`` and returning a :class:`numpyro.plate` or iterable of plates. Plates not returned will be created automatically as usual. This is useful for data subsampling. """ scale_constraint = constraints.softplus_positive def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, init_scale=0.1, create_plates=None, ): self._init_scale = init_scale self._event_dims = {} super().__init__( model, prefix=prefix, init_loc_fn=init_loc_fn, create_plates=create_plates ) def _setup_prototype(self, *args, **kwargs): super()._setup_prototype(*args, **kwargs) for name, site in self.prototype_trace.items(): if site["type"] != "sample" or site["is_observed"]: continue event_dim = ( site["fn"].event_dim + jnp.ndim(self._init_locs[name]) - jnp.ndim(site["value"]) ) self._event_dims[name] = event_dim # If subsampling, repeat init_value to full size. for frame in site["cond_indep_stack"]: full_size = self._prototype_frame_full_sizes[frame.name] if full_size != frame.size: dim = frame.dim - event_dim self._init_locs[name] = periodic_repeat( self._init_locs[name], full_size, dim ) def __call__(self, *args, **kwargs): if self.prototype_trace is None: # run model to inspect the model structure self._setup_prototype(*args, **kwargs) plates = self._create_plates(*args, **kwargs) result = {} for name, site in self.prototype_trace.items(): if site["type"] != "sample" or site["is_observed"]: continue event_dim = self._event_dims[name] init_loc = self._init_locs[name] with ExitStack() as stack: for frame in site["cond_indep_stack"]: stack.enter_context(plates[frame.name]) site_loc = numpyro.param( "{}_{}_loc".format(name, self.prefix), init_loc, event_dim=event_dim ) site_scale = numpyro.param( "{}_{}_scale".format(name, self.prefix), jnp.full(jnp.shape(init_loc), self._init_scale), constraint=self.scale_constraint, event_dim=event_dim, ) site_fn = dist.Normal(site_loc, site_scale).to_event(event_dim) if site["fn"].support is constraints.real or ( isinstance(site["fn"].support, constraints.independent) and site["fn"].support.base_constraint is constraints.real ): result[name] = numpyro.sample(name, site_fn) else: with helpful_support_errors(site): transform = biject_to(site["fn"].support) guide_dist = dist.TransformedDistribution(site_fn, transform) result[name] = numpyro.sample(name, guide_dist) return result def _constrain(self, latent_samples): name = list(latent_samples)[0] sample_shape = jnp.shape(latent_samples[name])[ : jnp.ndim(latent_samples[name]) - jnp.ndim(self._init_locs[name]) ] if sample_shape: flatten_samples = tree_map( lambda x: jnp.reshape(x, (-1,) + jnp.shape(x)[len(sample_shape) :]), latent_samples, ) contrained_samples = lax.map(self._postprocess_fn, flatten_samples) return tree_map( lambda x: jnp.reshape(x, sample_shape + jnp.shape(x)[1:]), contrained_samples, ) else: return self._postprocess_fn(latent_samples) def sample_posterior(self, rng_key, params, sample_shape=()): locs = {k: params["{}_{}_loc".format(k, self.prefix)] for k in self._init_locs} scales = {k: params["{}_{}_scale".format(k, self.prefix)] for k in locs} with handlers.seed(rng_seed=rng_key): latent_samples = {} for k in locs: latent_samples[k] = numpyro.sample( k, dist.Normal(locs[k], scales[k]).expand_by(sample_shape) ) return self._constrain(latent_samples) def median(self, params): locs = { k: params["{}_{}_loc".format(k, self.prefix)] for k, v in self._init_locs.items() } return self._constrain(locs) def quantiles(self, params, quantiles): quantiles = jnp.array(quantiles) locs = {k: params["{}_{}_loc".format(k, self.prefix)] for k in self._init_locs} scales = {k: params["{}_{}_scale".format(k, self.prefix)] for k in locs} latent = { k: dist.Normal(locs[k], scales[k]).icdf( quantiles.reshape((-1,) + (1,) * jnp.ndim(locs[k])) ) for k in locs } return self._constrain(latent) class AutoDelta(AutoGuide): """ This implementation of :class:`AutoGuide` uses Delta distributions to construct a MAP guide over the entire latent space. The guide does not depend on the model's ``*args, **kwargs``. .. note:: This class does MAP inference in constrained space. Usage:: guide = AutoDelta(model) svi = SVI(model, guide, ...) :param callable model: A NumPyro model. :param str prefix: a prefix that will be prefixed to all param internal sites. :param callable init_loc_fn: A per-site initialization function. See :ref:`init_strategy` section for available functions. :param callable create_plates: An optional function inputing the same ``*args,**kwargs`` as ``model()`` and returning a :class:`numpyro.plate` or iterable of plates. Plates not returned will be created automatically as usual. This is useful for data subsampling. """ def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_median, create_plates=None ): self._event_dims = {} super().__init__( model, prefix=prefix, init_loc_fn=init_loc_fn, create_plates=create_plates ) def _setup_prototype(self, *args, **kwargs): super()._setup_prototype(*args, **kwargs) with numpyro.handlers.block(): self._init_locs = { k: v for k, v in self._postprocess_fn(self._init_locs).items() if k in self._init_locs } for name, site in self.prototype_trace.items(): if site["type"] != "sample" or site["is_observed"]: continue event_dim = site["fn"].event_dim self._event_dims[name] = event_dim # If subsampling, repeat init_value to full size. for frame in site["cond_indep_stack"]: full_size = self._prototype_frame_full_sizes[frame.name] if full_size != frame.size: dim = frame.dim - event_dim self._init_locs[name] = periodic_repeat( self._init_locs[name], full_size, dim ) def __call__(self, *args, **kwargs): if self.prototype_trace is None: # run model to inspect the model structure self._setup_prototype(*args, **kwargs) plates = self._create_plates(*args, **kwargs) result = {} for name, site in self.prototype_trace.items(): if site["type"] != "sample" or site["is_observed"]: continue event_dim = self._event_dims[name] init_loc = self._init_locs[name] with ExitStack() as stack: for frame in site["cond_indep_stack"]: stack.enter_context(plates[frame.name]) site_loc = numpyro.param( "{}_{}_loc".format(name, self.prefix), init_loc, constraint=site["fn"].support, event_dim=event_dim, ) site_fn = dist.Delta(site_loc).to_event(event_dim) result[name] = numpyro.sample(name, site_fn) return result def sample_posterior(self, rng_key, params, sample_shape=()): locs = {k: params["{}_{}_loc".format(k, self.prefix)] for k in self._init_locs} latent_samples = { k: jnp.broadcast_to(v, sample_shape + jnp.shape(v)) for k, v in locs.items() } return latent_samples def median(self, params): locs = {k: params["{}_{}_loc".format(k, self.prefix)] for k in self._init_locs} return locs def _unravel_dict(x_flat, shape_dict): """Return `x` from the flatten version `x_flat`. Shape information of each item in `x` is defined in `shape_dict`. """ assert jnp.ndim(x_flat) == 1 assert isinstance(shape_dict, dict) x = {} curr_pos = next_pos = 0 for name, shape in shape_dict.items(): next_pos = curr_pos + int(np.prod(shape)) x[name] = x_flat[curr_pos:next_pos].reshape(shape) curr_pos = next_pos assert next_pos == x_flat.shape[0] return x def _ravel_dict(x): """Return the flatten version of `x` and shapes of each item in `x`.""" assert isinstance(x, dict) shape_dict = {} x_flat = [] for name, value in x.items(): shape_dict[name] = jnp.shape(value) x_flat.append(jnp.reshape(value, -1)) x_flat = jnp.concatenate(x_flat) if x_flat else jnp.zeros((0,)) return x_flat, shape_dict class AutoContinuous(AutoGuide): """ Base class for implementations of continuous-valued Automatic Differentiation Variational Inference [1]. Each derived class implements its own :meth:`_get_posterior` method. Assumes model structure and latent dimension are fixed, and all latent variables are continuous. **Reference:** 1. *Automatic Differentiation Variational Inference*, Alp Kucukelbir, Dustin Tran, Rajesh Ranganath, Andrew Gelman, David M. Blei :param callable model: A NumPyro model. :param str prefix: a prefix that will be prefixed to all param internal sites. :param callable init_loc_fn: A per-site initialization function. See :ref:`init_strategy` section for available functions. """ def _setup_prototype(self, *args, **kwargs): super()._setup_prototype(*args, **kwargs) self._init_latent, shape_dict = _ravel_dict(self._init_locs) unpack_latent = partial(_unravel_dict, shape_dict=shape_dict) # this is to match the behavior of Pyro, where we can apply # unpack_latent for a batch of samples self._unpack_latent = UnpackTransform(unpack_latent) self.latent_dim = jnp.size(self._init_latent) if self.latent_dim == 0: raise RuntimeError( "{} found no latent variables; Use an empty guide instead".format( type(self).__name__ ) ) for site in self.prototype_trace.values(): if site["type"] == "sample" and not site["is_observed"]: for frame in site["cond_indep_stack"]: if frame.size != self._prototype_frame_full_sizes[frame.name]: raise ValueError( "AutoContinuous guide does not support" " local latent variables." ) @abstractmethod def _get_posterior(self): raise NotImplementedError def _sample_latent(self, *args, **kwargs): sample_shape = kwargs.pop("sample_shape", ()) posterior = self._get_posterior() return numpyro.sample( "_{}_latent".format(self.prefix), posterior.expand_by(sample_shape), infer={"is_auxiliary": True}, ) def __call__(self, *args, **kwargs): if self.prototype_trace is None: # run model to inspect the model structure self._setup_prototype(*args, **kwargs) latent = self._sample_latent(*args, **kwargs) # unpack continuous latent samples result = {} for name, unconstrained_value in self._unpack_latent(latent).items(): site = self.prototype_trace[name] with helpful_support_errors(site): transform = biject_to(site["fn"].support) value = transform(unconstrained_value) event_ndim = site["fn"].event_dim if numpyro.get_mask() is False: log_density = 0.0 else: log_density = -transform.log_abs_det_jacobian( unconstrained_value, value ) log_density = sum_rightmost( log_density, jnp.ndim(log_density) - jnp.ndim(value) + event_ndim ) delta_dist = dist.Delta( value, log_density=log_density, event_dim=event_ndim ) result[name] = numpyro.sample(name, delta_dist) return result def _unpack_and_constrain(self, latent_sample, params): def unpack_single_latent(latent): unpacked_samples = self._unpack_latent(latent) # XXX: we need to add param here to be able to replay model unpacked_samples.update( { k: v for k, v in params.items() if k in self.prototype_trace and self.prototype_trace[k]["type"] == "param" } ) samples = self._postprocess_fn(unpacked_samples) # filter out param sites return { k: v for k, v in samples.items() if k in self.prototype_trace and self.prototype_trace[k]["type"] != "param" } sample_shape = jnp.shape(latent_sample)[:-1] if sample_shape: latent_sample = jnp.reshape( latent_sample, (-1, jnp.shape(latent_sample)[-1]) ) unpacked_samples = lax.map(unpack_single_latent, latent_sample) return tree_map( lambda x: jnp.reshape(x, sample_shape + jnp.shape(x)[1:]), unpacked_samples, ) else: return unpack_single_latent(latent_sample) def get_base_dist(self): """ Returns the base distribution of the posterior when reparameterized as a :class:`~numpyro.distributions.distribution.TransformedDistribution`. This should not depend on the model's `*args, **kwargs`. """ raise NotImplementedError def get_transform(self, params): """ Returns the transformation learned by the guide to generate samples from the unconstrained (approximate) posterior. :param dict params: Current parameters of model and autoguide. The parameters can be obtained using :meth:`~numpyro.infer.svi.SVI.get_params` method from :class:`~numpyro.infer.svi.SVI`. :return: the transform of posterior distribution :rtype: :class:`~numpyro.distributions.transforms.Transform` """ posterior = handlers.substitute(self._get_posterior, params)() assert isinstance( posterior, dist.TransformedDistribution ), "posterior is not a transformed distribution" if len(posterior.transforms) > 0: return ComposeTransform(posterior.transforms) else: return posterior.transforms[0] def get_posterior(self, params): """ Returns the posterior distribution. :param dict params: Current parameters of model and autoguide. The parameters can be obtained using :meth:`~numpyro.infer.svi.SVI.get_params` method from :class:`~numpyro.infer.svi.SVI`. """ base_dist = self.get_base_dist() transform = self.get_transform(params) return dist.TransformedDistribution(base_dist, transform) def sample_posterior(self, rng_key, params, sample_shape=()): latent_sample = handlers.substitute( handlers.seed(self._sample_latent, rng_key), params )(sample_shape=sample_shape) return self._unpack_and_constrain(latent_sample, params) class AutoDAIS(AutoContinuous): """ This implementation of :class:`AutoDAIS` uses Differentiable Annealed Importance Sampling (DAIS) [1, 2] to construct a guide over the entire latent space. Samples from the variational distribution (i.e. guide) are generated using a combination of (uncorrected) Hamiltonian Monte Carlo and Annealed Importance Sampling. The same algorithm is called Uncorrected Hamiltonian Annealing in [1]. Note that AutoDAIS cannot be used in conjuction with data subsampling. **Reference:** 1. *MCMC Variational Inference via Uncorrected Hamiltonian Annealing*, Tomas Geffner, Justin Domke 2. *Differentiable Annealed Importance Sampling and the Perils of Gradient Noise*, Guodong Zhang, Kyle Hsu, Jianing Li, Chelsea Finn, Roger Grosse Usage:: guide = AutoDAIS(model) svi = SVI(model, guide, ...) :param callable model: A NumPyro model. :param str prefix: A prefix that will be prefixed to all param internal sites. :param int K: A positive integer that controls the number of HMC steps used. Defaults to 4. :param str base_dist: Controls whether the base Normal variational distribution is parameterized by a "diagonal" covariance matrix or a full-rank covariance matrix parameterized by a lower-triangular "cholesky" factor. Defaults to "diagonal". :param float eta_init: The initial value of the step size used in HMC. Defaults to 0.01. :param float eta_max: The maximum value of the learnable step size used in HMC. Defaults to 0.1. :param float gamma_init: The initial value of the learnable damping factor used during partial momentum refreshments in HMC. Defaults to 0.9. :param callable init_loc_fn: A per-site initialization function. See :ref:`init_strategy` section for available functions. :param float init_scale: Initial scale for the standard deviation of the base variational distribution for each (unconstrained transformed) latent variable. Defaults to 0.1. """ def __init__( self, model, *, K=4, base_dist="diagonal", eta_init=0.01, eta_max=0.1, gamma_init=0.9, prefix="auto", init_loc_fn=init_to_uniform, init_scale=0.1, ): if K < 1: raise ValueError("K must satisfy K >= 1 (got K = {})".format(K)) if base_dist not in ["diagonal", "cholesky"]: raise ValueError('base_dist must be one of "diagonal" or "cholesky".') if eta_init <= 0.0 or eta_init >= eta_max: raise ValueError( "eta_init must be positive and satisfy eta_init < eta_max." ) if eta_max <= 0.0: raise ValueError("eta_max must be positive.") if gamma_init <= 0.0 or gamma_init >= 1.0: raise ValueError("gamma_init must be in the open interval (0, 1).") if init_scale <= 0.0: raise ValueError("init_scale must be positive.") self.eta_init = eta_init self.eta_max = eta_max self.gamma_init = gamma_init self.K = K self.base_dist = base_dist self._init_scale = init_scale super().__init__(model, prefix=prefix, init_loc_fn=init_loc_fn) def _setup_prototype(self, *args, **kwargs): super()._setup_prototype(*args, **kwargs) for name, site in self.prototype_trace.items(): if ( site["type"] == "plate" and isinstance(site["args"][1], int) and site["args"][0] > site["args"][1] ): raise NotImplementedError( "AutoDAIS cannot be used in conjuction with data subsampling." ) def _get_posterior(self): raise NotImplementedError def _sample_latent(self, *args, **kwargs): def log_density(x): x_unpack = self._unpack_latent(x) with numpyro.handlers.block(): return -self._potential_fn(x_unpack) eta0 = numpyro.param( "{}_eta0".format(self.prefix), self.eta_init, constraint=constraints.interval(0, self.eta_max), ) eta_coeff = numpyro.param("{}_eta_coeff".format(self.prefix), 0.00) gamma = numpyro.param( "{}_gamma".format(self.prefix), self.gamma_init, constraint=constraints.interval(0, 1), ) betas = numpyro.param( "{}_beta_increments".format(self.prefix), jnp.ones(self.K), constraint=constraints.positive, ) betas = jnp.cumsum(betas) betas = betas / betas[-1] # K-dimensional with betas[-1] = 1 mass_matrix = numpyro.param( "{}_mass_matrix".format(self.prefix), jnp.ones(self.latent_dim), constraint=constraints.positive, ) inv_mass_matrix = 0.5 / mass_matrix init_z_loc = numpyro.param( "{}_z_0_loc".format(self.prefix), self._init_latent, ) if self.base_dist == "diagonal": init_z_scale = numpyro.param( "{}_z_0_scale".format(self.prefix), jnp.full(self.latent_dim, self._init_scale), constraint=constraints.positive, ) base_z_dist = dist.Normal(init_z_loc, init_z_scale).to_event() elif self.base_dist == "cholesky": scale_tril = numpyro.param( "{}_z_0_scale_tril".format(self.prefix), jnp.identity(self.latent_dim) * self._init_scale, constraint=constraints.scaled_unit_lower_cholesky, ) base_z_dist = dist.MultivariateNormal(init_z_loc, scale_tril=scale_tril) z_0 = numpyro.sample( "{}_z_0".format(self.prefix), base_z_dist, infer={"is_auxiliary": True}, ) momentum_dist = dist.Normal(0, mass_matrix).to_event() eps = numpyro.sample( "{}_momentum".format(self.prefix), momentum_dist.expand((self.K,)).to_event().mask(False), infer={"is_auxiliary": True}, ) def scan_body(carry, eps_beta): eps, beta = eps_beta eta = eta0 + eta_coeff * beta eta = jnp.clip(eta, a_min=0.0, a_max=self.eta_max) z_prev, v_prev, log_factor = carry z_half = z_prev + v_prev * eta * inv_mass_matrix q_grad = (1.0 - beta) * grad(base_z_dist.log_prob)(z_half) p_grad = beta * grad(log_density)(z_half) v_hat = v_prev + eta * (q_grad + p_grad) z = z_half + v_hat * eta * inv_mass_matrix v = gamma * v_hat + jnp.sqrt(1 - gamma**2) * eps delta_ke = momentum_dist.log_prob(v_prev) - momentum_dist.log_prob(v_hat) log_factor = log_factor + delta_ke return (z, v, log_factor), None v_0 = eps[-1] # note the return value of scan doesn't depend on eps[-1] (z, _, log_factor), _ = jax.lax.scan(scan_body, (z_0, v_0, 0.0), (eps, betas)) numpyro.factor("{}_factor".format(self.prefix), log_factor) return z def sample_posterior(self, rng_key, params, sample_shape=()): def _single_sample(_rng_key): latent_sample = handlers.substitute( handlers.seed(self._sample_latent, _rng_key), params )(sample_shape=()) return self._unpack_and_constrain(latent_sample, params) if sample_shape: rng_key = random.split(rng_key, int(np.prod(sample_shape))) samples = lax.map(_single_sample, rng_key) return tree_map( lambda x: jnp.reshape(x, sample_shape + jnp.shape(x)[1:]), samples, ) else: return _single_sample(rng_key) class AutoDiagonalNormal(AutoContinuous): """ This implementation of :class:`AutoContinuous` uses a Normal distribution with a diagonal covariance matrix to construct a guide over the entire latent space. The guide does not depend on the model's ``*args, **kwargs``. Usage:: guide = AutoDiagonalNormal(model, ...) svi = SVI(model, guide, ...) """ scale_constraint = constraints.softplus_positive def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, init_scale=0.1, init_strategy=None, ): if init_strategy is not None: init_loc_fn = init_strategy warnings.warn( "`init_strategy` argument has been deprecated in favor of `init_loc_fn`" " argument.", FutureWarning, stacklevel=find_stack_level(), ) if init_scale <= 0: raise ValueError("Expected init_scale > 0. but got {}".format(init_scale)) self._init_scale = init_scale super().__init__(model, prefix=prefix, init_loc_fn=init_loc_fn) def _get_posterior(self): loc = numpyro.param("{}_loc".format(self.prefix), self._init_latent) scale = numpyro.param( "{}_scale".format(self.prefix), jnp.full(self.latent_dim, self._init_scale), constraint=self.scale_constraint, ) return dist.Normal(loc, scale) def get_base_dist(self): return dist.Normal(jnp.zeros(self.latent_dim), 1).to_event(1) def get_transform(self, params): loc = params["{}_loc".format(self.prefix)] scale = params["{}_scale".format(self.prefix)] return IndependentTransform(AffineTransform(loc, scale), 1) def get_posterior(self, params): """ Returns a diagonal Normal posterior distribution. """ transform = self.get_transform(params).base_transform return dist.Normal(transform.loc, transform.scale) def median(self, params): loc = params["{}_loc".format(self.prefix)] return self._unpack_and_constrain(loc, params) def quantiles(self, params, quantiles): quantiles = jnp.array(quantiles)[..., None] latent = self.get_posterior(params).icdf(quantiles) return self._unpack_and_constrain(latent, params) class AutoMultivariateNormal(AutoContinuous): """ This implementation of :class:`AutoContinuous` uses a MultivariateNormal distribution to construct a guide over the entire latent space. The guide does not depend on the model's ``*args, **kwargs``. Usage:: guide = AutoMultivariateNormal(model, ...) svi = SVI(model, guide, ...) """ scale_tril_constraint = constraints.scaled_unit_lower_cholesky def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, init_scale=0.1, init_strategy=None, ): if init_strategy is not None: init_loc_fn = init_strategy warnings.warn( "`init_strategy` argument has been deprecated in favor of `init_loc_fn`" " argument.", FutureWarning, stacklevel=find_stack_level(), ) if init_scale <= 0: raise ValueError("Expected init_scale > 0. but got {}".format(init_scale)) self._init_scale = init_scale super().__init__(model, prefix=prefix, init_loc_fn=init_loc_fn) def _get_posterior(self): loc = numpyro.param("{}_loc".format(self.prefix), self._init_latent) scale_tril = numpyro.param( "{}_scale_tril".format(self.prefix), jnp.identity(self.latent_dim) * self._init_scale, constraint=self.scale_tril_constraint, ) return dist.MultivariateNormal(loc, scale_tril=scale_tril) def get_base_dist(self): return dist.Normal(jnp.zeros(self.latent_dim), 1).to_event(1) def get_transform(self, params): loc = params["{}_loc".format(self.prefix)] scale_tril = params["{}_scale_tril".format(self.prefix)] return LowerCholeskyAffine(loc, scale_tril) def get_posterior(self, params): """ Returns a multivariate Normal posterior distribution. """ transform = self.get_transform(params) return dist.MultivariateNormal(transform.loc, transform.scale_tril) def median(self, params): loc = params["{}_loc".format(self.prefix)] return self._unpack_and_constrain(loc, params) def quantiles(self, params, quantiles): transform = self.get_transform(params) quantiles = jnp.array(quantiles)[..., None] latent = dist.Normal(transform.loc, jnp.diagonal(transform.scale_tril)).icdf( quantiles ) return self._unpack_and_constrain(latent, params) class AutoLowRankMultivariateNormal(AutoContinuous): """ This implementation of :class:`AutoContinuous` uses a LowRankMultivariateNormal distribution to construct a guide over the entire latent space. The guide does not depend on the model's ``*args, **kwargs``. Usage:: guide = AutoLowRankMultivariateNormal(model, rank=2, ...) svi = SVI(model, guide, ...) """ scale_constraint = constraints.softplus_positive def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, init_scale=0.1, rank=None, init_strategy=None, ): if init_strategy is not None: init_loc_fn = init_strategy warnings.warn( "`init_strategy` argument has been deprecated in favor of `init_loc_fn`" " argument.", FutureWarning, stacklevel=find_stack_level(), ) if init_scale <= 0: raise ValueError("Expected init_scale > 0. but got {}".format(init_scale)) self._init_scale = init_scale self.rank = rank super(AutoLowRankMultivariateNormal, self).__init__( model, prefix=prefix, init_loc_fn=init_loc_fn ) def _get_posterior(self, *args, **kwargs): rank = int(round(self.latent_dim**0.5)) if self.rank is None else self.rank loc = numpyro.param("{}_loc".format(self.prefix), self._init_latent) cov_factor = numpyro.param( "{}_cov_factor".format(self.prefix), jnp.zeros((self.latent_dim, rank)) ) scale = numpyro.param( "{}_scale".format(self.prefix), jnp.full(self.latent_dim, self._init_scale), constraint=self.scale_constraint, ) cov_diag = scale * scale cov_factor = cov_factor * scale[..., None] return dist.LowRankMultivariateNormal(loc, cov_factor, cov_diag) def get_base_dist(self): return dist.Normal(jnp.zeros(self.latent_dim), 1).to_event(1) def get_transform(self, params): posterior = self.get_posterior(params) return LowerCholeskyAffine(posterior.loc, posterior.scale_tril) def get_posterior(self, params): """ Returns a lowrank multivariate Normal posterior distribution. """ loc = params["{}_loc".format(self.prefix)] cov_factor = params["{}_cov_factor".format(self.prefix)] scale = params["{}_scale".format(self.prefix)] cov_diag = scale * scale cov_factor = cov_factor * scale[..., None] return dist.LowRankMultivariateNormal(loc, cov_factor, cov_diag) def median(self, params): loc = params["{}_loc".format(self.prefix)] return self._unpack_and_constrain(loc, params) def quantiles(self, params, quantiles): loc = params[f"{self.prefix}_loc"] cov_factor = params[f"{self.prefix}_cov_factor"] scale = params[f"{self.prefix}_scale"] scale = scale * jnp.sqrt(jnp.square(cov_factor).sum(-1) + 1) quantiles = jnp.array(quantiles)[..., None] latent = dist.Normal(loc, scale).icdf(quantiles) return self._unpack_and_constrain(latent, params) class AutoLaplaceApproximation(AutoContinuous): r""" Laplace approximation (quadratic approximation) approximates the posterior :math:`\log p(z | x)` by a multivariate normal distribution in the unconstrained space. Under the hood, it uses Delta distributions to construct a MAP guide over the entire (unconstrained) latent space. Its covariance is given by the inverse of the hessian of :math:`-\log p(x, z)` at the MAP point of `z`. Usage:: guide = AutoLaplaceApproximation(model, ...) svi = SVI(model, guide, ...) :param callable hessian_fn: EXPERIMENTAL a function that takes a function `f` and a vector `x`and returns the hessian of `f` at `x`. By default, we use ``lambda f, x: jax.hessian(f)(x)``. Other alternatives can be ``lambda f, x: jax.jacobian(jax.jacobian(f))(x)`` or ``lambda f, x: jax.hessian(f)(x) + 1e-3 * jnp.eye(x.shape[0])``. The later example is helpful when the hessian of `f` at `x` is not positive definite. Note that the output hessian is the precision matrix of the laplace approximation. """ def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, create_plates=None, hessian_fn=None, ): super().__init__( model, prefix=prefix, init_loc_fn=init_loc_fn, create_plates=create_plates ) self._hessian_fn = ( hessian_fn if hessian_fn is not None else (lambda f, x: hessian(f)(x)) ) def _setup_prototype(self, *args, **kwargs): super(AutoLaplaceApproximation, self)._setup_prototype(*args, **kwargs) def loss_fn(params): # we are doing maximum likelihood, so only require `num_particles=1` and an arbitrary rng_key. return Trace_ELBO().loss( random.PRNGKey(0), params, self.model, self, *args, **kwargs ) self._loss_fn = loss_fn def _get_posterior(self, *args, **kwargs): # sample from Delta guide loc = numpyro.param("{}_loc".format(self.prefix), self._init_latent) return dist.Delta(loc, event_dim=1) def get_base_dist(self): return dist.Normal(jnp.zeros(self.latent_dim), 1).to_event(1) def get_transform(self, params): def loss_fn(z): params1 = params.copy() params1["{}_loc".format(self.prefix)] = z return self._loss_fn(params1) loc = params["{}_loc".format(self.prefix)] precision = self._hessian_fn(loss_fn, loc) scale_tril = cholesky_of_inverse(precision) if not_jax_tracer(scale_tril): if np.any(np.isnan(scale_tril)): warnings.warn( "Hessian of log posterior at the MAP point is singular. Posterior" " samples from AutoLaplaceApproxmiation will be constant (equal to" " the MAP point). Please consider using an AutoNormal guide.", stacklevel=find_stack_level(), ) scale_tril = jnp.where(jnp.isnan(scale_tril), 0.0, scale_tril) return LowerCholeskyAffine(loc, scale_tril) def get_posterior(self, params): """ Returns a multivariate Normal posterior distribution. """ transform = self.get_transform(params) return dist.MultivariateNormal(transform.loc, scale_tril=transform.scale_tril) def sample_posterior(self, rng_key, params, sample_shape=()): latent_sample = self.get_posterior(params).sample(rng_key, sample_shape) return self._unpack_and_constrain(latent_sample, params) def median(self, params): loc = params["{}_loc".format(self.prefix)] return self._unpack_and_constrain(loc, params) def quantiles(self, params, quantiles): transform = self.get_transform(params) quantiles = jnp.array(quantiles)[..., None] latent = dist.Normal(transform.loc, jnp.diagonal(transform.scale_tril)).icdf( quantiles ) return self._unpack_and_constrain(latent, params) class AutoIAFNormal(AutoContinuous): """ This implementation of :class:`AutoContinuous` uses a Diagonal Normal distribution transformed via a :class:`~numpyro.distributions.flows.InverseAutoregressiveTransform` to construct a guide over the entire latent space. The guide does not depend on the model's ``*args, **kwargs``. Usage:: guide = AutoIAFNormal(model, hidden_dims=[20], skip_connections=True, ...) svi = SVI(model, guide, ...) :param callable model: a generative model. :param str prefix: a prefix that will be prefixed to all param internal sites. :param callable init_loc_fn: A per-site initialization function. :param int num_flows: the number of flows to be used, defaults to 3. :param list hidden_dims: the dimensionality of the hidden units per layer. Defaults to ``[latent_dim, latent_dim]``. :param bool skip_connections: whether to add skip connections from the input to the output of each flow. Defaults to False. :param callable nonlinearity: the nonlinearity to use in the feedforward network. Defaults to :func:`jax.example_libraries.stax.Elu`. """ def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, num_flows=3, hidden_dims=None, skip_connections=False, nonlinearity=stax.Elu, init_strategy=None, ): if init_strategy is not None: init_loc_fn = init_strategy warnings.warn( "`init_strategy` argument has been deprecated in favor of `init_loc_fn`" " argument.", FutureWarning, stacklevel=find_stack_level(), ) self.num_flows = num_flows # 2-layer, stax.Elu, skip_connections=False by default following the experiments in # IAF paper (https://arxiv.org/abs/1606.04934) # and Neutra paper (https://arxiv.org/abs/1903.03704) self._hidden_dims = hidden_dims self._skip_connections = skip_connections self._nonlinearity = nonlinearity super(AutoIAFNormal, self).__init__( model, prefix=prefix, init_loc_fn=init_loc_fn ) def _get_posterior(self): if self.latent_dim == 1: raise ValueError( "latent dim = 1. Consider using AutoDiagonalNormal instead" ) hidden_dims = ( [self.latent_dim, self.latent_dim] if self._hidden_dims is None else self._hidden_dims ) flows = [] for i in range(self.num_flows): if i > 0: flows.append(PermuteTransform(jnp.arange(self.latent_dim)[::-1])) arn = AutoregressiveNN( self.latent_dim, hidden_dims, permutation=jnp.arange(self.latent_dim), skip_connections=self._skip_connections, nonlinearity=self._nonlinearity, ) arnn = numpyro.module( "{}_arn__{}".format(self.prefix, i), arn, (self.latent_dim,) ) flows.append(InverseAutoregressiveTransform(arnn)) return dist.TransformedDistribution(self.get_base_dist(), flows) def get_base_dist(self): return dist.Normal(jnp.zeros(self.latent_dim), 1).to_event(1) class AutoBNAFNormal(AutoContinuous): """ This implementation of :class:`AutoContinuous` uses a Diagonal Normal distribution transformed via a :class:`~numpyro.distributions.flows.BlockNeuralAutoregressiveTransform` to construct a guide over the entire latent space. The guide does not depend on the model's ``*args, **kwargs``. Usage:: guide = AutoBNAFNormal(model, num_flows=1, hidden_factors=[50, 50], ...) svi = SVI(model, guide, ...) **References** 1. *Block Neural Autoregressive Flow*, Nicola De Cao, Ivan Titov, Wilker Aziz :param callable model: a generative model. :param str prefix: a prefix that will be prefixed to all param internal sites. :param callable init_loc_fn: A per-site initialization function. :param int num_flows: the number of flows to be used, defaults to 1. :param list hidden_factors: Hidden layer i has ``hidden_factors[i]`` hidden units per input dimension. This corresponds to both :math:`a` and :math:`b` in reference [1]. The elements of hidden_factors must be integers. """ def __init__( self, model, *, prefix="auto", init_loc_fn=init_to_uniform, num_flows=1, hidden_factors=[8, 8], init_strategy=None, ): if init_strategy is not None: init_loc_fn = init_strategy warnings.warn( "`init_strategy` argument has been deprecated in favor of `init_loc_fn`" " argument.", FutureWarning, stacklevel=find_stack_level(), ) self.num_flows = num_flows self._hidden_factors = hidden_factors super(AutoBNAFNormal, self).__init__( model, prefix=prefix, init_loc_fn=init_loc_fn ) def _get_posterior(self): if self.latent_dim == 1: raise ValueError( "latent dim = 1. Consider using AutoDiagonalNormal instead" ) flows = [] for i in range(self.num_flows): if i > 0: flows.append(PermuteTransform(jnp.arange(self.latent_dim)[::-1])) residual = "gated" if i < (self.num_flows - 1) else None arn = BlockNeuralAutoregressiveNN( self.latent_dim, self._hidden_factors, residual ) arnn = numpyro.module( "{}_arn__{}".format(self.prefix, i), arn, (self.latent_dim,) ) flows.append(BlockNeuralAutoregressiveTransform(arnn)) return dist.TransformedDistribution(self.get_base_dist(), flows) def get_base_dist(self): return dist.Normal(jnp.zeros(self.latent_dim), 1).to_event(1)
"""Tests for plotting utilities.""" import warnings import tempfile import shutil import numpy as np import pandas as pd import matplotlib as mpl import matplotlib.pyplot as plt import nose import nose.tools as nt from nose.tools import assert_equal, raises import numpy.testing as npt import pandas.util.testing as pdt from distutils.version import LooseVersion pandas_has_categoricals = LooseVersion(pd.__version__) >= "0.15" from pandas.util.testing import network try: from bs4 import BeautifulSoup except ImportError: BeautifulSoup = None from . import PlotTestCase from .. import utils, rcmod from ..utils import get_dataset_names, load_dataset a_norm = np.random.randn(100) def test_pmf_hist_basics(): """Test the function to return barplot args for pmf hist.""" out = utils.pmf_hist(a_norm) assert_equal(len(out), 3) x, h, w = out assert_equal(len(x), len(h)) # Test simple case a = np.arange(10) x, h, w = utils.pmf_hist(a, 10) nose.tools.assert_true(np.all(h == h[0])) def test_pmf_hist_widths(): """Test histogram width is correct.""" x, h, w = utils.pmf_hist(a_norm) assert_equal(x[1] - x[0], w) def test_pmf_hist_normalization(): """Test that output data behaves like a PMF.""" x, h, w = utils.pmf_hist(a_norm) nose.tools.assert_almost_equal(sum(h), 1) nose.tools.assert_less_equal(h.max(), 1) def test_pmf_hist_bins(): """Test bin specification.""" x, h, w = utils.pmf_hist(a_norm, 20) assert_equal(len(x), 20) def test_ci_to_errsize(): """Test behavior of ci_to_errsize.""" cis = [[.5, .5], [1.25, 1.5]] heights = [1, 1.5] actual_errsize = np.array([[.5, 1], [.25, 0]]) test_errsize = utils.ci_to_errsize(cis, heights) npt.assert_array_equal(actual_errsize, test_errsize) def test_desaturate(): """Test color desaturation.""" out1 = utils.desaturate("red", .5) assert_equal(out1, (.75, .25, .25)) out2 = utils.desaturate("#00FF00", .5) assert_equal(out2, (.25, .75, .25)) out3 = utils.desaturate((0, 0, 1), .5) assert_equal(out3, (.25, .25, .75)) out4 = utils.desaturate("red", .5) assert_equal(out4, (.75, .25, .25)) @raises(ValueError) def test_desaturation_prop(): """Test that pct outside of [0, 1] raises exception.""" utils.desaturate("blue", 50) def test_saturate(): """Test performance of saturation function.""" out = utils.saturate((.75, .25, .25)) assert_equal(out, (1, 0, 0)) def test_iqr(): """Test the IQR function.""" a = np.arange(5) iqr = utils.iqr(a) assert_equal(iqr, 2) def test_str_to_utf8(): """Test the to_utf8 function: string to Unicode""" s = "\u01ff\u02ff" u = utils.to_utf8(s) assert_equal(type(s), type(str())) assert_equal(type(u), type(u"\u01ff\u02ff")) class TestSpineUtils(PlotTestCase): sides = ["left", "right", "bottom", "top"] outer_sides = ["top", "right"] inner_sides = ["left", "bottom"] offset = 10 original_position = ("outward", 0) offset_position = ("outward", offset) def test_despine(self): f, ax = plt.subplots() for side in self.sides: nt.assert_true(ax.spines[side].get_visible()) utils.despine() for side in self.outer_sides: nt.assert_true(~ax.spines[side].get_visible()) for side in self.inner_sides: nt.assert_true(ax.spines[side].get_visible()) utils.despine(**dict(zip(self.sides, [True] * 4))) for side in self.sides: nt.assert_true(~ax.spines[side].get_visible()) def test_despine_specific_axes(self): f, (ax1, ax2) = plt.subplots(2, 1) utils.despine(ax=ax2) for side in self.sides: nt.assert_true(ax1.spines[side].get_visible()) for side in self.outer_sides: nt.assert_true(~ax2.spines[side].get_visible()) for side in self.inner_sides: nt.assert_true(ax2.spines[side].get_visible()) def test_despine_with_offset(self): f, ax = plt.subplots() for side in self.sides: nt.assert_equal(ax.spines[side].get_position(), self.original_position) utils.despine(ax=ax, offset=self.offset) for side in self.sides: is_visible = ax.spines[side].get_visible() new_position = ax.spines[side].get_position() if is_visible: nt.assert_equal(new_position, self.offset_position) else: nt.assert_equal(new_position, self.original_position) def test_despine_side_specific_offset(self): f, ax = plt.subplots() utils.despine(ax=ax, offset=dict(left=self.offset)) for side in self.sides: is_visible = ax.spines[side].get_visible() new_position = ax.spines[side].get_position() if is_visible and side == "left": nt.assert_equal(new_position, self.offset_position) else: nt.assert_equal(new_position, self.original_position) def test_despine_with_offset_specific_axes(self): f, (ax1, ax2) = plt.subplots(2, 1) utils.despine(offset=self.offset, ax=ax2) for side in self.sides: nt.assert_equal(ax1.spines[side].get_position(), self.original_position) if ax2.spines[side].get_visible(): nt.assert_equal(ax2.spines[side].get_position(), self.offset_position) else: nt.assert_equal(ax2.spines[side].get_position(), self.original_position) def test_despine_trim_spines(self): f, ax = plt.subplots() ax.plot([1, 2, 3], [1, 2, 3]) ax.set_xlim(.75, 3.25) utils.despine(trim=True) for side in self.inner_sides: bounds = ax.spines[side].get_bounds() nt.assert_equal(bounds, (1, 3)) def test_despine_trim_inverted(self): f, ax = plt.subplots() ax.plot([1, 2, 3], [1, 2, 3]) ax.set_ylim(.85, 3.15) ax.invert_yaxis() utils.despine(trim=True) for side in self.inner_sides: bounds = ax.spines[side].get_bounds() nt.assert_equal(bounds, (1, 3)) def test_despine_trim_noticks(self): f, ax = plt.subplots() ax.plot([1, 2, 3], [1, 2, 3]) ax.set_yticks([]) utils.despine(trim=True) nt.assert_equal(ax.get_yticks().size, 0) def test_offset_spines_warns(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always", category=UserWarning) f, ax = plt.subplots() utils.offset_spines(offset=self.offset) nt.assert_true('deprecated' in str(w[0].message)) nt.assert_true(issubclass(w[0].category, UserWarning)) def test_offset_spines(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always", category=UserWarning) f, ax = plt.subplots() for side in self.sides: nt.assert_equal(ax.spines[side].get_position(), self.original_position) utils.offset_spines(offset=self.offset) for side in self.sides: nt.assert_equal(ax.spines[side].get_position(), self.offset_position) def test_offset_spines_specific_axes(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always", category=UserWarning) f, (ax1, ax2) = plt.subplots(2, 1) utils.offset_spines(offset=self.offset, ax=ax2) for side in self.sides: nt.assert_equal(ax1.spines[side].get_position(), self.original_position) nt.assert_equal(ax2.spines[side].get_position(), self.offset_position) def test_ticklabels_overlap(): rcmod.set() f, ax = plt.subplots(figsize=(2, 2)) f.tight_layout() # This gets the Agg renderer working assert not utils.axis_ticklabels_overlap(ax.get_xticklabels()) big_strings = "abcdefgh", "ijklmnop" ax.set_xlim(-.5, 1.5) ax.set_xticks([0, 1]) ax.set_xticklabels(big_strings) assert utils.axis_ticklabels_overlap(ax.get_xticklabels()) x, y = utils.axes_ticklabels_overlap(ax) assert x assert not y def test_categorical_order(): x = ["a", "c", "c", "b", "a", "d"] y = [3, 2, 5, 1, 4] order = ["a", "b", "c", "d"] out = utils.categorical_order(x) nt.assert_equal(out, ["a", "c", "b", "d"]) out = utils.categorical_order(x, order) nt.assert_equal(out, order) out = utils.categorical_order(x, ["b", "a"]) nt.assert_equal(out, ["b", "a"]) out = utils.categorical_order(np.array(x)) nt.assert_equal(out, ["a", "c", "b", "d"]) out = utils.categorical_order(pd.Series(x)) nt.assert_equal(out, ["a", "c", "b", "d"]) out = utils.categorical_order(y) nt.assert_equal(out, [1, 2, 3, 4, 5]) out = utils.categorical_order(np.array(y)) nt.assert_equal(out, [1, 2, 3, 4, 5]) out = utils.categorical_order(pd.Series(y)) nt.assert_equal(out, [1, 2, 3, 4, 5]) if pandas_has_categoricals: x = pd.Categorical(x, order) out = utils.categorical_order(x) nt.assert_equal(out, list(x.categories)) x = pd.Series(x) out = utils.categorical_order(x) nt.assert_equal(out, list(x.cat.categories)) out = utils.categorical_order(x, ["b", "a"]) nt.assert_equal(out, ["b", "a"]) x = ["a", np.nan, "c", "c", "b", "a", "d"] out = utils.categorical_order(x) nt.assert_equal(out, ["a", "c", "b", "d"]) if LooseVersion(pd.__version__) >= "0.15": def check_load_dataset(name): ds = load_dataset(name, cache=False) assert(isinstance(ds, pd.DataFrame)) def check_load_cached_dataset(name): # Test the cacheing using a temporary file. # With Python 3.2+, we could use the tempfile.TemporaryDirectory() # context manager instead of this try...finally statement tmpdir = tempfile.mkdtemp() try: # download and cache ds = load_dataset(name, cache=True, data_home=tmpdir) # use cached version ds2 = load_dataset(name, cache=True, data_home=tmpdir) pdt.assert_frame_equal(ds, ds2) finally: shutil.rmtree(tmpdir) @network(url="https://github.com/mwaskom/seaborn-data") def test_get_dataset_names(): if not BeautifulSoup: raise nose.SkipTest("No BeautifulSoup available for parsing html") names = get_dataset_names() assert(len(names) > 0) assert(u"titanic" in names) @network(url="https://github.com/mwaskom/seaborn-data") def test_load_datasets(): if not BeautifulSoup: raise nose.SkipTest("No BeautifulSoup available for parsing html") # Heavy test to verify that we can load all available datasets for name in get_dataset_names(): # unfortunately @network somehow obscures this generator so it # does not get in effect, so we need to call explicitly # yield check_load_dataset, name check_load_dataset(name) @network(url="https://github.com/mwaskom/seaborn-data") def test_load_cached_datasets(): if not BeautifulSoup: raise nose.SkipTest("No BeautifulSoup available for parsing html") # Heavy test to verify that we can load all available datasets for name in get_dataset_names(): # unfortunately @network somehow obscures this generator so it # does not get in effect, so we need to call explicitly # yield check_load_dataset, name check_load_cached_dataset(name) def test_relative_luminance(): """Test relative luminance.""" out1 = utils.relative_luminance("white") assert_equal(out1, 1) out2 = utils.relative_luminance("#000000") assert_equal(out2, 0) out3 = utils.relative_luminance((.25, .5, .75)) nose.tools.assert_almost_equal(out3, 0.201624536) rgbs = mpl.cm.RdBu(np.linspace(0, 1, 10)) lums1 = [utils.relative_luminance(rgb) for rgb in rgbs] lums2 = utils.relative_luminance(rgbs) for lum1, lum2 in zip(lums1, lums2): nose.tools.assert_almost_equal(lum1, lum2)
from core import CodeGenerator, gensym, newResult, indent, special_char start_ruby = """ def special_escape(s) return s.replace("\\\\n", "\\\\\\\\n").replace("\\\\t", "\\\\\\\\t").replace("\\\"", '\\\\\\\"').replace("\\\\r", "\\\\\\\\r") end class PegError < Exception end class NotError < Exception end class Result attr_reader :values, :position def initialize(position) @position = position @values = [] end def getPosition() return @position end def nextPosition(amount = 1) @position += amount end def setValue(value) @values = value end def getLastValue() if @values.is_a?(Array) if @values.size() > 0 return @values[-1] else return nil end end return @values end def matches() return @values.size end def getValues() return @values end def addResult(him) @values << him.values @position = him.position end #def extendResult(self, him): # self.values.extend(him.values) # self.position = him.position end class Stream def initialize(filename) @file = File.new(filename, 'r') @position = 0 @limit = 100 @furthest = 0 @all = @file.read() @memo = {} # print "Read " + str(len(self.all)) end def close() @file.close() end def get(position, number = 1) if position + number > @limit # print (position + number) @limit += 5000 end if position + number > @all.size return 0.chr() end # print "stream: %s" % self.all[position:position+number] return @all[position...position+number] end def reportError() line = 1 column = 1 for i in 0..@furthest if @all[i] == '\\n' line += 1 column = 1 else column += 1 end end context = 10 left = @furthest - context right = @furthest + context if left < 0 left = 0 end if right > @all.size right = @all.size end puts "Read up till line #{line}, column #{column}" puts special_escape(@all[left...right]) puts (' ' * (@furthest - left)) + "^" end def update(rule, position, result) if result != nil and result.getPosition() > @furthest @furthest = result.getPosition() end for_rule = nil if @memo.has_key? rule for_rule = @memo[rule] else @memo[rule] = {} for_rule = @memo[rule] end for_position = nil if for_rule.has_key? position for_position = for_rule[position] else for_rule[position] = nil end for_rule[position] = result end def hasResult(rule, position) @memo.has_key?(rule) and @memo[rule].has_key?(position) # return @memo.has_key?(rule) and @memo[rule].has_key?(position) end def result(rule, position) return @memo[rule][position] end end """ class RubyGenerator(CodeGenerator): def fixup_ruby(self, code, how): import re fix = re.compile("\$(\d+)") return re.sub(fix, how, code) def generate_sequence(me, pattern, result, previous_result, stream, failure): data = "" for apattern in pattern.patterns: my_result = newResult() data += """ %s = Result.new(%s.getPosition()) %s %s.addResult(%s) """ % (my_result, result, apattern.generate_v1(me, my_result, result, stream, failure), result, my_result) return data + """ %s.setValue(%s.getLastValue()) """ % (result, result) # this breaks when the sub-pattern is a PatternSequence, todo: fix it def generate_maybe(me, pattern, result, previous_result, stream, failure): save = gensym("save") fail = lambda : """ %s = Result.new(%s) %s.setValue(nil) """ % (result, save, result) data = """ %s = %s.getPosition() %s """ % (save, result, pattern.pattern.generate_v1(me, result, previous_result, stream, fail)) return data def generate_repeat_many(me, pattern, result, previous_result, stream, failure): my_fail = lambda : "raise PegError" my_result = newResult() data = """ begin while true %s = Result.new(%s.getPosition()) %s %s.addResult(%s) end rescue PegError end """ % (my_result, result, indent(indent(pattern.next.generate_v1(me, my_result, result, stream, my_fail).strip())), result, my_result) return data def generate_rule(me, pattern, result, previous_result, stream, failure): def fix(v): return "%s.getValues()[%s]" % (previous_result, int(v.group(1)) - 1) def change(arg): if arg.startswith('@'): return arg[1:] return 'lambda{|*args| rule_%s(*args)}' % arg rule_parameters = "" if pattern.rules != None: rule_parameters = ", %s" % ", ".join([change(f) for f in pattern.rules]) parameters = "" if pattern.parameters != None: parameters = ", %s" % ",".join([me.fixup_ruby(p, fix) for p in pattern.parameters]) data = """ # puts "Trying rule '%s'" %s = rule_%s(%s, %s.getPosition()%s%s) if %s == nil %s end """ % (pattern.rule, result, pattern.rule, stream, result, rule_parameters, parameters, result, indent(failure())) return data def generate_repeat_once(me, pattern, result, previous_result, stream, failure): my_fail = lambda : "raise PegError" my_result = newResult() data = """ begin while (true) %s = Result.new(%s.getPosition()) %s %s.addResult(%s) end rescue PegError if %s.matches() == 0 %s end end """ % (my_result, result, indent(indent(pattern.next.generate_v1(me, my_result, result, stream, my_fail).strip())), result, my_result, result, failure()) return data def generate_void(me, pattern, result, previous_result, stream, failure): return "" def generate_verbatim(me, pattern, result, previous_result, stream, failure): def doString(): length = len(pattern.letters) if special_char(pattern.letters): length = 1 data = """ if '%s' == %s.get(%s.getPosition(), %s) then %s.nextPosition(%s) %s.setValue('%s') else %s end """ % (pattern.letters, stream, result, length, result, length, result, pattern.letters, indent(failure())) return data def doAscii(): data = """ if %s.get(%s.getPosition()).ord() == %s then %s.nextPosition() %s.setValue(%s) else %s end """ return data % (stream, result, pattern.letters, result, result, pattern.letters, indent(failure())) if type(pattern.letters) == type('x'): return doString() elif type(pattern.letters) == type(0): return doAscii() else: raise Exception("unknown verbatim value %s" % pattern.letters) def generate_ensure(me, pattern, result, previous_result, stream, failure): my_result = newResult() data = """ %s = Result.new(%s.getPosition()) %s """ % (my_result, result, pattern.next.generate_v1(me, my_result, result, stream, failure).strip()) return data def generate_not(me, pattern, result, previous_result, stream, failure): my_result = newResult() my_fail = lambda : "raise NotError" data = """ %s = Result.new(%s.getPosition()) begin %s %s rescue NotError %s.setValue(nil) end """ % (my_result, result, indent(pattern.next.generate_v1(my_result, result, stream, my_fail).strip()), failure(), result) return data def generate_any(me, pattern, result, previous_result, stream, failure): temp = gensym() data = """ %s = %s.get(%s.getPosition()) if %s != 0.chr() then %s.setValue(%s) %s.nextPosition() else %s end """ % (temp, stream, result, temp, result, temp, result, indent(failure())) return data def generate_range(me, pattern, result, previous_result, stream, failure): letter = gensym("letter") data = """ %s = %s.get(%s.getPosition()) if '%s'.index(%s) != nil then %s.nextPosition() %s.setValue(%s) else %s end """ % (letter, stream, result, pattern.range, letter, result, result, letter, indent(failure())) return data def generate_eof(me, pattern, result, previous_result, stream, failure): data = """ if 0.chr() == %s.get(%s.getPosition()) then %s.nextPosition() %s.setValue(0.chr()) else %s end """ % (stream, result, result, result, indent(failure())) return data def generate_code(me, pattern, result, previous_result, stream, failure): data = """ value = nil values = %s.getValues() %s %s.setValue(value) """ % (previous_result, me.fixup_ruby(pattern.code.strip(), lambda v: "values[%s]" % (int(v.group(1)) - 1)), result) return data def generate_bind(me, pattern, result, previous_result, stream, failure): data = """ %s %s = %s.getValues() """ % (pattern.pattern.generate_v1(me, result, previous_result, stream, failure).strip(), pattern.variable, result) return data def generate_call_rule(me, pattern, result, previous_result, stream, failure): def fix(v): return "%s.getValues()[%s]" % (previous_result, int(v.group(1)) - 1) def change(arg): if arg.startswith('@'): return arg[1:] return 'lambda{|*args| rule_%s(*args)}' % arg rule_parameters = "" if pattern.rules != None: rule_parameters = ", %s" % ", ".join([change(f) for f in pattern.rules]) parameters = "" if pattern.values != None: parameters = ", %s" % ",".join([me.fixup_ruby(p, fix) for p in pattern.values]) data = """ # print "Trying rule " + '%s' %s = %s.call(%s, %s.getPosition()%s%s) if %s == nil %s end """ % (pattern.name, result, pattern.name, stream, result, rule_parameters, parameters, result, indent(failure())) return data def generate(self): use_rules = self.rules rule_numbers = '\n'.join(["RULE_%s = %d" % (x[0].name, x[1]) for x in zip(use_rules, range(0, len(use_rules)))]) data = """ %s %s %s def parse(file) stream = Stream.new(file) out = rule_%s(stream, 0) stream.close() return out.getValues() end """ % (start_ruby, rule_numbers, '\n'.join([rule.generate_ruby() for rule in self.rules]), self.start) return data
__author__ = 'mouton' from unittest import TestCase from triggerExpressions import * from arithmeticExpressions import * from triggerGrammar import TriggerParser from database import Variable class TestTriggerGrammarParser(TestCase): @classmethod def setUpClass(cls): import grammar.grammars grammar.grammars.compileGrammars() def test_parse_litteral_1(self): toParse = 'true' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, BLitteral) self.assertTrue(expr._value) def test_parse_litteral_2(self): toParse = 'false' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, BLitteral) self.assertFalse(expr._value) def test_parse_timer(self): toParse = 'timer(30)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Timer) self.assertEqual(expr._nbFrames.value(Evaluation()), 30) def test_parse_rand(self): toParse = 'rand(0.5)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Rand) self.assertEqual(expr._prob.value(Evaluation()), 0.5) def test_parse_randint(self): toParse = 'randInt(X, 5)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, RandInt) self.assertIsInstance(expr._var, Variable) self.assertEqual(expr._var._name, 'X') self.assertEqual(expr._maxInt.value(Evaluation()), 5) def test_parse_is(self): toParse = 'X is 5' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Is) self.assertIsInstance(expr._a1, Variable) self.assertEqual(expr._a1._name, 'X') self.assertEqual(expr._a2.value(Evaluation()), 5) def test_parse_del(self): toParse = 'del Y' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Del) self.assertIsInstance(expr._a1, Variable) self.assertEqual(expr._a1._name, 'Y') def test_parse_any(self): toParse = 'anyEval(true)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, AnyEval) self.assertIsInstance(expr._expr, BLitteral) self.assertEqual(expr._expr._value, True) def test_parse_random(self): toParse = 'randomEval(true)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, RandomEval) self.assertIsInstance(expr._expr, BLitteral) self.assertEqual(expr._expr._value, True) def test_parse_min(self): toParse = 'minEval[X](true)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, SelectMinEval) self.assertIsInstance(expr._arithmExpr, ALitteral) self.assertIsInstance(expr._arithmExpr._value, Variable) self.assertEqual(expr._arithmExpr._value._name, 'X') self.assertIsInstance(expr._expr, BLitteral) self.assertEqual(expr._expr._value, True) def test_parse_max(self): toParse = 'maxEval[X](true)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, SelectMaxEval) self.assertIsInstance(expr._arithmExpr, ALitteral) self.assertIsInstance(expr._arithmExpr._value, Variable) self.assertEqual(expr._arithmExpr._value._name, 'X') self.assertIsInstance(expr._expr, BLitteral) self.assertEqual(expr._expr._value, True) def test_parse_and(self): toParse = 'true and false' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, And) self.assertIsInstance(expr._a1, BLitteral) self.assertEqual(expr._a1._value, True) self.assertIsInstance(expr._a2, BLitteral) self.assertEqual(expr._a2._value, False) def test_parse_or(self): toParse = 'true or false' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Or) self.assertIsInstance(expr._a1, BLitteral) self.assertEqual(expr._a1._value, True) self.assertIsInstance(expr._a2, BLitteral) self.assertEqual(expr._a2._value, False) def test_not(self): toParse = 'not true' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Not) self.assertIsInstance(expr._a1, BLitteral) self.assertEqual(expr._a1._value, True) def test_parse_parenthesis(self): toParse = '((true) or (false))' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Or) self.assertIsInstance(expr._a1, BLitteral) self.assertEqual(expr._a1._value, True) self.assertIsInstance(expr._a2, BLitteral) self.assertEqual(expr._a2._value, False) def test_parse_compare_leq(self): toParse = '0.5 <= 1' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, LeqThan) self.assertEqual(expr._a1.value(Evaluation()), 0.5) self.assertEqual(expr._a2.value(Evaluation()), 1) def test_parse_compare_geq(self): toParse = '0.5 >= 1' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, GeqThan) self.assertEqual(expr._a1.value(Evaluation()), 0.5) self.assertEqual(expr._a2.value(Evaluation()), 1) def test_parse_compare_low(self): toParse = '0.5 < 1' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, LowerThan) self.assertEqual(expr._a1.value(Evaluation()), 0.5) self.assertEqual(expr._a2.value(Evaluation()), 1) def test_parse_compare_eq(self): toParse = '0.5 == 1' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Equals) self.assertEqual(expr._a1.value(Evaluation()), 0.5) self.assertEqual(expr._a2.value(Evaluation()), 1) def test_parse_compare_neq(self): toParse = '0.5 != 1' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, NotEquals) self.assertEqual(expr._a1.value(Evaluation()), 0.5) self.assertEqual(expr._a2.value(Evaluation()), 1) def test_parse_empty_property(self): toParse = 'pTest()' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, PropertyTriggerExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 0) def test_parse_property_args(self): toParse = 'pTest(1, 0.5, \'abc\', X)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, PropertyTriggerExpression) self.assertEquals(len(expr._args), 4) self.assertEquals(expr._args[0].value(Evaluation()), 1) self.assertEquals(expr._args[1].value(Evaluation()), 0.5) self.assertEquals(expr._args[2].value(Evaluation()), 'abc') self.assertIsInstance(expr._args[3], Variable) self.assertEquals(expr._args[3].name, 'X') self.assertEquals(len(expr._kwargs), 0) def test_parse_property_kwargs(self): toParse = 'pTest(1 = 12, \'def\'=0.5, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, PropertyTriggerExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 3) d = {key.value(Evaluation()): expr._kwargs[key].value(Evaluation()) for key in expr._kwargs} self.assertTrue(1 in d) self.assertTrue('def' in d) self.assertTrue(3 in d) self.assertEquals(d[1], 12) self.assertEquals(d['def'], 0.5) self.assertEquals(d[3], 'abc') def test_parse_property_kwargs_variable(self): toParse = 'pTest(1 = X, \'def\'=Y, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, PropertyTriggerExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 3) for key in expr._kwargs: if key.value(Evaluation()) == 1: self.assertIsInstance(expr._kwargs[key], Variable) self.assertEquals(expr._kwargs[key].name, 'X') if key.value(Evaluation()) == 'def': self.assertIsInstance(expr._kwargs[key], Variable) self.assertEquals(expr._kwargs[key].name, 'Y') if key.value(Evaluation()) == 3: self.assertNotIsInstance(expr._kwargs[key], Variable) def test_parse_property_args_and_kwargs(self): toParse = 'pTest(8, \'ghi\', 1 = 12, \'def\'=0.5, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, PropertyTriggerExpression) self.assertEquals(len(expr._args), 2) self.assertEquals(expr._args[0].value(Evaluation()), 8) self.assertEquals(expr._args[1].value(Evaluation()), 'ghi') self.assertEquals(len(expr._kwargs), 3) d = {key.value(Evaluation()): expr._kwargs[key].value(Evaluation()) for key in expr._kwargs} self.assertTrue(1 in d) self.assertTrue('def' in d) self.assertTrue(3 in d) self.assertEquals(d[1], 12) self.assertEquals(d['def'], 0.5) self.assertEquals(d[3], 'abc') def test_parse_empty_event(self): toParse = 'eTest()' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, EventTriggerExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 0) def test_parse_event_args(self): toParse = 'eTest(1, 0.5, \'abc\', X)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, EventTriggerExpression) self.assertEquals(len(expr._args), 4) self.assertEquals(expr._args[0].value(Evaluation()), 1) self.assertEquals(expr._args[1].value(Evaluation()), 0.5) self.assertEquals(expr._args[2].value(Evaluation()), 'abc') self.assertIsInstance(expr._args[3], Variable) self.assertEquals(expr._args[3].name, 'X') self.assertEquals(len(expr._kwargs), 0) def test_parse_event_kwargs(self): toParse = 'eTest(1 = 12, \'def\'=0.5, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, EventTriggerExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 3) d = {key.value(Evaluation()): expr._kwargs[key].value(Evaluation()) for key in expr._kwargs} self.assertTrue(1 in d) self.assertTrue('def' in d) self.assertTrue(3 in d) self.assertEquals(d[1], 12) self.assertEquals(d['def'], 0.5) self.assertEquals(d[3], 'abc') def test_parse_event_kwargs_variable(self): toParse = 'eTest(1 = X, \'def\'=Y, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, EventTriggerExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 3) for key in expr._kwargs: if key.value(Evaluation()) == 1: self.assertIsInstance(expr._kwargs[key], Variable) self.assertEquals(expr._kwargs[key].name, 'X') if key.value(Evaluation()) == 'def': self.assertIsInstance(expr._kwargs[key], Variable) self.assertEquals(expr._kwargs[key].name, 'Y') if key.value(Evaluation()) == 3: self.assertNotIsInstance(expr._kwargs[key], Variable) def test_parse_event_args_and_kwargs(self): toParse = 'eTest(8, \'ghi\', 1 = 12, \'def\'=0.5, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, EventTriggerExpression) self.assertEquals(len(expr._args), 2) self.assertEquals(expr._args[0].value(Evaluation()), 8) self.assertEquals(expr._args[1].value(Evaluation()), 'ghi') self.assertEquals(len(expr._kwargs), 3) d = {key.value(Evaluation()): expr._kwargs[key].value(Evaluation()) for key in expr._kwargs} self.assertTrue(1 in d) self.assertTrue('def' in d) self.assertTrue(3 in d) self.assertEquals(d[1], 12) self.assertEquals(d['def'], 0.5) self.assertEquals(d[3], 'abc') def test_parse_empty_token(self): toParse = 'token()' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, TokenExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 0) def test_parse_token_args(self): toParse = 'token(1, 0.5, \'abc\', X)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, TokenExpression) self.assertEquals(len(expr._args), 4) self.assertEquals(expr._args[0].value(Evaluation()), 1) self.assertEquals(expr._args[1].value(Evaluation()), 0.5) self.assertEquals(expr._args[2].value(Evaluation()), 'abc') self.assertIsInstance(expr._args[3], Variable) self.assertEquals(expr._args[3].name, 'X') self.assertEquals(len(expr._kwargs), 0) def test_parse_token_kwargs(self): toParse = 'token(1 = 12, \'def\'=0.5, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, TokenExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 3) d = {key.value(Evaluation()): expr._kwargs[key].value(Evaluation()) for key in expr._kwargs} self.assertTrue(1 in d) self.assertTrue('def' in d) self.assertTrue(3 in d) self.assertEquals(d[1], 12) self.assertEquals(d['def'], 0.5) self.assertEquals(d[3], 'abc') def test_parse_token_kwargs_variable(self): toParse = 'token(1 = X, \'def\'=Y, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, TokenExpression) self.assertEquals(len(expr._args), 0) self.assertEquals(len(expr._kwargs), 3) for key in expr._kwargs: if key.value(Evaluation()) == 1: self.assertIsInstance(expr._kwargs[key], Variable) self.assertEquals(expr._kwargs[key].name, 'X') if key.value(Evaluation()) == 'def': self.assertIsInstance(expr._kwargs[key], Variable) self.assertEquals(expr._kwargs[key].name, 'Y') if key.value(Evaluation()) == 3: self.assertNotIsInstance(expr._kwargs[key], Variable) def test_parse_token_args_and_kwargs(self): toParse = 'token(8, \'ghi\', 1 = 12, \'def\'=0.5, 3=\'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, TokenExpression) self.assertEquals(len(expr._args), 2) self.assertEquals(expr._args[0].value(Evaluation()), 8) self.assertEquals(expr._args[1].value(Evaluation()), 'ghi') self.assertEquals(len(expr._kwargs), 3) d = {key.value(Evaluation()): expr._kwargs[key].value(Evaluation()) for key in expr._kwargs} self.assertTrue(1 in d) self.assertTrue('def' in d) self.assertTrue(3 in d) self.assertEquals(d[1], 12) self.assertEquals(d['def'], 0.5) self.assertEquals(d[3], 'abc') def test_parse_eLock_empty(self): toParse = 'eLock(1)' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, eLock) self.assertEqual(expr._priority.value(Evaluation()), 1) self.assertEquals(len(expr._keys), 0) def test_parse_eLock_args(self): toParse = 'eLock(-1, 1, 0.5, \'abc\')' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, eLock) self.assertEqual(expr._priority.value(Evaluation()), -1) self.assertEquals(len(expr._keys), 3) self.assertEquals(expr._keys[0].value(Evaluation()), 1) self.assertEquals(expr._keys[1].value(Evaluation()), 0.5) self.assertEquals(expr._keys[2].value(Evaluation()), 'abc') def test_parse_complicate_and_or(self): toParse = 'true and false or true and false' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, Or) self.assertIsInstance(expr._a1, And) self.assertIsInstance(expr._a1._a1, BLitteral) self.assertEqual(expr._a1._a1._value, True) self.assertIsInstance(expr._a1._a2, BLitteral) self.assertEqual(expr._a1._a2._value, False) self.assertIsInstance(expr._a2, And) self.assertIsInstance(expr._a2._a1, BLitteral) self.assertEqual(expr._a2._a1._value, True) self.assertIsInstance(expr._a2._a2, BLitteral) self.assertEqual(expr._a2._a2._value, False) def test_parse_complicate_and_or_2(self): toParse = 'true and (false or true) and false' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, And) self.assertIsInstance(expr._a1, BLitteral) self.assertEqual(expr._a1._value, True) self.assertIsInstance(expr._a2, And) self.assertIsInstance(expr._a2._a1, Or) self.assertIsInstance(expr._a2._a1._a1, BLitteral) self.assertEqual(expr._a2._a1._a1._value, False) self.assertIsInstance(expr._a2._a1._a2, BLitteral) self.assertEqual(expr._a2._a1._a2._value, True) self.assertIsInstance(expr._a2._a2, BLitteral) self.assertEqual(expr._a2._a2._value, False) def test_parse_complicate_expression(self): toParse = 'anyEval(X is 2 and not timer(30) or (Y is 3 and pTest(X,2=Y) or eTest(X, 2)))' expr = TriggerParser.parse(toParse) self.assertIsInstance(expr, AnyEval) self.assertIsInstance(expr._expr, Or) self.assertIsInstance(expr._expr._a1, And) self.assertIsInstance(expr._expr._a1._a1, Is) self.assertIsInstance(expr._expr._a1._a2, Not) self.assertIsInstance(expr._expr._a1._a2._a1, Timer) self.assertIsInstance(expr._expr._a2, Or) self.assertIsInstance(expr._expr._a2._a1, And) self.assertIsInstance(expr._expr._a2._a1._a1, Is) self.assertIsInstance(expr._expr._a2._a1._a2, PropertyTriggerExpression) self.assertIsInstance(expr._expr._a2._a2, EventTriggerExpression)
# coding=utf-8 r""" This code was generated by \ / _ _ _| _ _ | (_)\/(_)(_|\/| |(/_ v1.0.0 / / """ from twilio.base import deserialize from twilio.base import values from twilio.base.instance_resource import InstanceResource from twilio.base.list_resource import ListResource from twilio.base.page import Page class VerificationCheckList(ListResource): def __init__(self, version, service_sid): """ Initialize the VerificationCheckList :param Version version: Version that contains the resource :param service_sid: The SID of the Service that the resource is associated with :returns: twilio.rest.verify.v2.service.verification_check.VerificationCheckList :rtype: twilio.rest.verify.v2.service.verification_check.VerificationCheckList """ super(VerificationCheckList, self).__init__(version) # Path Solution self._solution = {'service_sid': service_sid, } self._uri = '/Services/{service_sid}/VerificationCheck'.format(**self._solution) def create(self, code, to=values.unset, verification_sid=values.unset, amount=values.unset, payee=values.unset): """ Create the VerificationCheckInstance :param unicode code: The verification string :param unicode to: The phone number or email to verify :param unicode verification_sid: A SID that uniquely identifies the Verification Check :param unicode amount: The amount of the associated PSD2 compliant transaction. :param unicode payee: The payee of the associated PSD2 compliant transaction :returns: The created VerificationCheckInstance :rtype: twilio.rest.verify.v2.service.verification_check.VerificationCheckInstance """ data = values.of({ 'Code': code, 'To': to, 'VerificationSid': verification_sid, 'Amount': amount, 'Payee': payee, }) payload = self._version.create(method='POST', uri=self._uri, data=data, ) return VerificationCheckInstance(self._version, payload, service_sid=self._solution['service_sid'], ) def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ return '<Twilio.Verify.V2.VerificationCheckList>' class VerificationCheckPage(Page): def __init__(self, version, response, solution): """ Initialize the VerificationCheckPage :param Version version: Version that contains the resource :param Response response: Response from the API :param service_sid: The SID of the Service that the resource is associated with :returns: twilio.rest.verify.v2.service.verification_check.VerificationCheckPage :rtype: twilio.rest.verify.v2.service.verification_check.VerificationCheckPage """ super(VerificationCheckPage, self).__init__(version, response) # Path Solution self._solution = solution def get_instance(self, payload): """ Build an instance of VerificationCheckInstance :param dict payload: Payload response from the API :returns: twilio.rest.verify.v2.service.verification_check.VerificationCheckInstance :rtype: twilio.rest.verify.v2.service.verification_check.VerificationCheckInstance """ return VerificationCheckInstance(self._version, payload, service_sid=self._solution['service_sid'], ) def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ return '<Twilio.Verify.V2.VerificationCheckPage>' class VerificationCheckInstance(InstanceResource): class Channel(object): SMS = "sms" CALL = "call" EMAIL = "email" WHATSAPP = "whatsapp" def __init__(self, version, payload, service_sid): """ Initialize the VerificationCheckInstance :returns: twilio.rest.verify.v2.service.verification_check.VerificationCheckInstance :rtype: twilio.rest.verify.v2.service.verification_check.VerificationCheckInstance """ super(VerificationCheckInstance, self).__init__(version) # Marshaled Properties self._properties = { 'sid': payload.get('sid'), 'service_sid': payload.get('service_sid'), 'account_sid': payload.get('account_sid'), 'to': payload.get('to'), 'channel': payload.get('channel'), 'status': payload.get('status'), 'valid': payload.get('valid'), 'amount': payload.get('amount'), 'payee': payload.get('payee'), 'date_created': deserialize.iso8601_datetime(payload.get('date_created')), 'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')), } # Context self._context = None self._solution = {'service_sid': service_sid, } @property def sid(self): """ :returns: The unique string that identifies the resource :rtype: unicode """ return self._properties['sid'] @property def service_sid(self): """ :returns: The SID of the Service that the resource is associated with :rtype: unicode """ return self._properties['service_sid'] @property def account_sid(self): """ :returns: The SID of the Account that created the resource :rtype: unicode """ return self._properties['account_sid'] @property def to(self): """ :returns: The phone number or email being verified :rtype: unicode """ return self._properties['to'] @property def channel(self): """ :returns: The verification method to use :rtype: VerificationCheckInstance.Channel """ return self._properties['channel'] @property def status(self): """ :returns: The status of the verification resource :rtype: unicode """ return self._properties['status'] @property def valid(self): """ :returns: Whether the verification was successful :rtype: bool """ return self._properties['valid'] @property def amount(self): """ :returns: The amount of the associated PSD2 compliant transaction. :rtype: unicode """ return self._properties['amount'] @property def payee(self): """ :returns: The payee of the associated PSD2 compliant transaction :rtype: unicode """ return self._properties['payee'] @property def date_created(self): """ :returns: The ISO 8601 date and time in GMT when the Verification Check resource was created :rtype: datetime """ return self._properties['date_created'] @property def date_updated(self): """ :returns: The ISO 8601 date and time in GMT when the Verification Check resource was last updated :rtype: datetime """ return self._properties['date_updated'] def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ return '<Twilio.Verify.V2.VerificationCheckInstance>'
import gym from typing import Dict, List, Optional, Sequence from ray.rllib.models.tf.tf_modelv2 import TFModelV2 from ray.rllib.models.tf.misc import normc_initializer from ray.rllib.models.utils import get_activation_fn, get_filter_config from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.utils.framework import try_import_tf from ray.rllib.utils.typing import ModelConfigDict, TensorType tf1, tf, tfv = try_import_tf() # TODO: (sven) obsolete this class once we only support native keras models. class VisionNetwork(TFModelV2): """Generic vision network implemented in ModelV2 API. An additional post-conv fully connected stack can be added and configured via the config keys: `post_fcnet_hiddens`: Dense layer sizes after the Conv2D stack. `post_fcnet_activation`: Activation function to use for this FC stack. """ def __init__( self, obs_space: gym.spaces.Space, action_space: gym.spaces.Space, num_outputs: int, model_config: ModelConfigDict, name: str, ): if not model_config.get("conv_filters"): model_config["conv_filters"] = get_filter_config(obs_space.shape) super(VisionNetwork, self).__init__( obs_space, action_space, num_outputs, model_config, name ) activation = get_activation_fn( self.model_config.get("conv_activation"), framework="tf" ) filters = self.model_config["conv_filters"] assert len(filters) > 0, "Must provide at least 1 entry in `conv_filters`!" # Post FC net config. post_fcnet_hiddens = model_config.get("post_fcnet_hiddens", []) post_fcnet_activation = get_activation_fn( model_config.get("post_fcnet_activation"), framework="tf" ) no_final_linear = self.model_config.get("no_final_linear") vf_share_layers = self.model_config.get("vf_share_layers") input_shape = obs_space.shape self.data_format = "channels_last" inputs = tf.keras.layers.Input(shape=input_shape, name="observations") last_layer = inputs # Whether the last layer is the output of a Flattened (rather than # a n x (1,1) Conv2D). self.last_layer_is_flattened = False # Build the action layers for i, (out_size, kernel, stride) in enumerate(filters[:-1], 1): last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=activation, padding="same", data_format="channels_last", name="conv{}".format(i), )(last_layer) out_size, kernel, stride = filters[-1] # No final linear: Last layer has activation function and exits with # num_outputs nodes (this could be a 1x1 conv or a FC layer, depending # on `post_fcnet_...` settings). if no_final_linear and num_outputs: last_layer = tf.keras.layers.Conv2D( out_size if post_fcnet_hiddens else num_outputs, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=activation, padding="valid", data_format="channels_last", name="conv_out", )(last_layer) # Add (optional) post-fc-stack after last Conv2D layer. layer_sizes = post_fcnet_hiddens[:-1] + ( [num_outputs] if post_fcnet_hiddens else [] ) feature_out = last_layer for i, out_size in enumerate(layer_sizes): feature_out = last_layer last_layer = tf.keras.layers.Dense( out_size, name="post_fcnet_{}".format(i), activation=post_fcnet_activation, kernel_initializer=normc_initializer(1.0), )(last_layer) # Finish network normally (w/o overriding last layer size with # `num_outputs`), then add another linear one of size `num_outputs`. else: last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=activation, padding="valid", data_format="channels_last", name="conv{}".format(len(filters)), )(last_layer) # num_outputs defined. Use that to create an exact # `num_output`-sized (1,1)-Conv2D. if num_outputs: if post_fcnet_hiddens: last_cnn = last_layer = tf.keras.layers.Conv2D( post_fcnet_hiddens[0], [1, 1], activation=post_fcnet_activation, padding="same", data_format="channels_last", name="conv_out", )(last_layer) # Add (optional) post-fc-stack after last Conv2D layer. for i, out_size in enumerate( post_fcnet_hiddens[1:] + [num_outputs] ): feature_out = last_layer last_layer = tf.keras.layers.Dense( out_size, name="post_fcnet_{}".format(i + 1), activation=post_fcnet_activation if i < len(post_fcnet_hiddens) - 1 else None, kernel_initializer=normc_initializer(1.0), )(last_layer) else: feature_out = last_layer last_cnn = last_layer = tf.keras.layers.Conv2D( num_outputs, [1, 1], activation=None, padding="same", data_format="channels_last", name="conv_out", )(last_layer) if last_cnn.shape[1] != 1 or last_cnn.shape[2] != 1: raise ValueError( "Given `conv_filters` ({}) do not result in a [B, 1, " "1, {} (`num_outputs`)] shape (but in {})! Please " "adjust your Conv2D stack such that the dims 1 and 2 " "are both 1.".format( self.model_config["conv_filters"], self.num_outputs, list(last_cnn.shape), ) ) # num_outputs not known -> Flatten, then set self.num_outputs # to the resulting number of nodes. else: self.last_layer_is_flattened = True last_layer = tf.keras.layers.Flatten(data_format="channels_last")( last_layer ) # Add (optional) post-fc-stack after last Conv2D layer. for i, out_size in enumerate(post_fcnet_hiddens): last_layer = tf.keras.layers.Dense( out_size, name="post_fcnet_{}".format(i), activation=post_fcnet_activation, kernel_initializer=normc_initializer(1.0), )(last_layer) feature_out = last_layer self.num_outputs = last_layer.shape[1] logits_out = last_layer # Build the value layers if vf_share_layers: if not self.last_layer_is_flattened: feature_out = tf.keras.layers.Lambda( lambda x: tf.squeeze(x, axis=[1, 2]) )(feature_out) value_out = tf.keras.layers.Dense( 1, name="value_out", activation=None, kernel_initializer=normc_initializer(0.01), )(feature_out) else: # build a parallel set of hidden layers for the value net last_layer = inputs for i, (out_size, kernel, stride) in enumerate(filters[:-1], 1): last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=activation, padding="same", data_format="channels_last", name="conv_value_{}".format(i), )(last_layer) out_size, kernel, stride = filters[-1] last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=activation, padding="valid", data_format="channels_last", name="conv_value_{}".format(len(filters)), )(last_layer) last_layer = tf.keras.layers.Conv2D( 1, [1, 1], activation=None, padding="same", data_format="channels_last", name="conv_value_out", )(last_layer) value_out = tf.keras.layers.Lambda(lambda x: tf.squeeze(x, axis=[1, 2]))( last_layer ) self.base_model = tf.keras.Model(inputs, [logits_out, value_out]) def forward( self, input_dict: Dict[str, TensorType], state: List[TensorType], seq_lens: TensorType, ) -> (TensorType, List[TensorType]): obs = input_dict["obs"] if self.data_format == "channels_first": obs = tf.transpose(obs, [0, 2, 3, 1]) # Explicit cast to float32 needed in eager. model_out, self._value_out = self.base_model(tf.cast(obs, tf.float32)) # Our last layer is already flat. if self.last_layer_is_flattened: return model_out, state # Last layer is a n x [1,1] Conv2D -> Flatten. else: return tf.squeeze(model_out, axis=[1, 2]), state def value_function(self) -> TensorType: return tf.reshape(self._value_out, [-1]) class Keras_VisionNetwork(tf.keras.Model if tf else object): """Generic vision network implemented in tf keras. An additional post-conv fully connected stack can be added and configured via the config keys: `post_fcnet_hiddens`: Dense layer sizes after the Conv2D stack. `post_fcnet_activation`: Activation function to use for this FC stack. """ def __init__( self, input_space: gym.spaces.Space, action_space: gym.spaces.Space, num_outputs: Optional[int] = None, *, name: str = "", conv_filters: Optional[Sequence[Sequence[int]]] = None, conv_activation: Optional[str] = None, post_fcnet_hiddens: Optional[Sequence[int]] = (), post_fcnet_activation: Optional[str] = None, no_final_linear: bool = False, vf_share_layers: bool = False, free_log_std: bool = False, **kwargs, ): super().__init__(name=name) if not conv_filters: conv_filters = get_filter_config(input_space.shape) assert len(conv_filters) > 0, "Must provide at least 1 entry in `conv_filters`!" conv_activation = get_activation_fn(conv_activation, framework="tf") post_fcnet_activation = get_activation_fn(post_fcnet_activation, framework="tf") input_shape = input_space.shape self.data_format = "channels_last" inputs = tf.keras.layers.Input(shape=input_shape, name="observations") last_layer = inputs # Whether the last layer is the output of a Flattened (rather than # a n x (1,1) Conv2D). self.last_layer_is_flattened = False # Build the action layers for i, (out_size, kernel, stride) in enumerate(conv_filters[:-1], 1): last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=conv_activation, padding="same", data_format="channels_last", name="conv{}".format(i), )(last_layer) out_size, kernel, stride = conv_filters[-1] # No final linear: Last layer has activation function and exits with # num_outputs nodes (this could be a 1x1 conv or a FC layer, depending # on `post_fcnet_...` settings). if no_final_linear and num_outputs: last_layer = tf.keras.layers.Conv2D( out_size if post_fcnet_hiddens else num_outputs, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=conv_activation, padding="valid", data_format="channels_last", name="conv_out", )(last_layer) # Add (optional) post-fc-stack after last Conv2D layer. layer_sizes = post_fcnet_hiddens[:-1] + ( [num_outputs] if post_fcnet_hiddens else [] ) for i, out_size in enumerate(layer_sizes): last_layer = tf.keras.layers.Dense( out_size, name="post_fcnet_{}".format(i), activation=post_fcnet_activation, kernel_initializer=normc_initializer(1.0), )(last_layer) # Finish network normally (w/o overriding last layer size with # `num_outputs`), then add another linear one of size `num_outputs`. else: last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=conv_activation, padding="valid", data_format="channels_last", name="conv{}".format(len(conv_filters)), )(last_layer) # num_outputs defined. Use that to create an exact # `num_output`-sized (1,1)-Conv2D. if num_outputs: if post_fcnet_hiddens: last_cnn = last_layer = tf.keras.layers.Conv2D( post_fcnet_hiddens[0], [1, 1], activation=post_fcnet_activation, padding="same", data_format="channels_last", name="conv_out", )(last_layer) # Add (optional) post-fc-stack after last Conv2D layer. for i, out_size in enumerate( post_fcnet_hiddens[1:] + [num_outputs] ): last_layer = tf.keras.layers.Dense( out_size, name="post_fcnet_{}".format(i + 1), activation=post_fcnet_activation if i < len(post_fcnet_hiddens) - 1 else None, kernel_initializer=normc_initializer(1.0), )(last_layer) else: last_cnn = last_layer = tf.keras.layers.Conv2D( num_outputs, [1, 1], activation=None, padding="same", data_format="channels_last", name="conv_out", )(last_layer) if last_cnn.shape[1] != 1 or last_cnn.shape[2] != 1: raise ValueError( "Given `conv_filters` ({}) do not result in a [B, 1, " "1, {} (`num_outputs`)] shape (but in {})! Please " "adjust your Conv2D stack such that the dims 1 and 2 " "are both 1.".format( self.model_config["conv_filters"], num_outputs, list(last_cnn.shape), ) ) # num_outputs not known -> Flatten. else: self.last_layer_is_flattened = True last_layer = tf.keras.layers.Flatten(data_format="channels_last")( last_layer ) # Add (optional) post-fc-stack after last Conv2D layer. for i, out_size in enumerate(post_fcnet_hiddens): last_layer = tf.keras.layers.Dense( out_size, name="post_fcnet_{}".format(i), activation=post_fcnet_activation, kernel_initializer=normc_initializer(1.0), )(last_layer) logits_out = last_layer # Build the value layers if vf_share_layers: if not self.last_layer_is_flattened: last_layer = tf.keras.layers.Lambda( lambda x: tf.squeeze(x, axis=[1, 2]) )(last_layer) value_out = tf.keras.layers.Dense( 1, name="value_out", activation=None, kernel_initializer=normc_initializer(0.01), )(last_layer) else: # build a parallel set of hidden layers for the value net last_layer = inputs for i, (out_size, kernel, stride) in enumerate(conv_filters[:-1], 1): last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=conv_activation, padding="same", data_format="channels_last", name="conv_value_{}".format(i), )(last_layer) out_size, kernel, stride = conv_filters[-1] last_layer = tf.keras.layers.Conv2D( out_size, kernel, strides=stride if isinstance(stride, (list, tuple)) else (stride, stride), activation=conv_activation, padding="valid", data_format="channels_last", name="conv_value_{}".format(len(conv_filters)), )(last_layer) last_layer = tf.keras.layers.Conv2D( 1, [1, 1], activation=None, padding="same", data_format="channels_last", name="conv_value_out", )(last_layer) value_out = tf.keras.layers.Lambda(lambda x: tf.squeeze(x, axis=[1, 2]))( last_layer ) self.base_model = tf.keras.Model(inputs, [logits_out, value_out]) def call( self, input_dict: SampleBatch ) -> (TensorType, List[TensorType], Dict[str, TensorType]): obs = input_dict["obs"] if self.data_format == "channels_first": obs = tf.transpose(obs, [0, 2, 3, 1]) # Explicit cast to float32 needed in eager. model_out, self._value_out = self.base_model(tf.cast(obs, tf.float32)) state = [v for k, v in input_dict.items() if k.startswith("state_in_")] extra_outs = {SampleBatch.VF_PREDS: tf.reshape(self._value_out, [-1])} # Our last layer is already flat. if self.last_layer_is_flattened: return model_out, state, extra_outs # Last layer is a n x [1,1] Conv2D -> Flatten. else: return tf.squeeze(model_out, axis=[1, 2]), state, extra_outs
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from unittest import mock from neutron_lib.callbacks import events as cb_events from neutron_lib.services.trunk import constants as t_const import oslo_messaging from oslo_utils import uuidutils import testtools from neutron.api.rpc.callbacks import events from neutron.api.rpc.handlers import resources_rpc from neutron.objects import trunk from neutron.services.trunk.drivers.linuxbridge.agent import driver from neutron.services.trunk.drivers.linuxbridge.agent import trunk_plumber from neutron.tests import base class LinuxBridgeTrunkDriverTestCase(base.BaseTestCase): def setUp(self): super(LinuxBridgeTrunkDriverTestCase, self).setUp() self.plumber = mock.create_autospec(trunk_plumber.Plumber()) self.stub = mock.create_autospec(driver.trunk_rpc.TrunkStub()) self.tapi = mock.create_autospec(driver._TrunkAPI(self.stub)) self.lbd = driver.LinuxBridgeTrunkDriver(self.plumber, self.tapi) self.trunk = trunk.Trunk(id=uuidutils.generate_uuid(), port_id=uuidutils.generate_uuid(), project_id=uuidutils.generate_uuid()) self.subports = [trunk.SubPort(id=uuidutils.generate_uuid(), port_id=uuidutils.generate_uuid(), segmentation_type='vlan', trunk_id=self.trunk.id, segmentation_id=i) for i in range(20)] self.trunk.sub_ports = self.subports def test_handle_trunks_created(self): self._test_handle_trunks_wire_event(events.CREATED) def test_handle_trunks_updated(self): self._test_handle_trunks_wire_event(events.UPDATED) def _test_handle_trunks_wire_event(self, event): self.plumber.trunk_on_host.return_value = True self.lbd.handle_trunks(mock.Mock(), 'TRUNKS', [self.trunk], event) self.tapi.put_trunk.assert_called_once_with( self.trunk.port_id, self.trunk) self.tapi.bind_subports_to_host.assert_called_once_with( mock.ANY, self.trunk) self.assertFalse(self.plumber.delete_trunk_subports.called) def test_handle_trunks_deleted(self): self.lbd.handle_trunks(mock.Mock(), 'TRUNKS', [self.trunk], events.DELETED) self.tapi.put_trunk.assert_called_once_with( self.trunk.port_id, None) self.plumber.delete_trunk_subports.assert_called_once_with(self.trunk) def test_handle_subports_deleted(self): self.tapi.get_trunk_by_id.return_value = self.trunk self.lbd.handle_subports(mock.Mock(), 'TRUNKS', self.trunk.sub_ports, events.DELETED) self.assertEqual(20, len(self.tapi.delete_trunk_subport.mock_calls)) # should have tried to wire trunk at the end with state self.plumber.trunk_on_host.assert_called_once_with(self.trunk) def test_handle_subports_created(self): self.tapi.get_trunk_by_id.return_value = self.trunk self.lbd.handle_subports(mock.Mock(), 'TRUNKS', self.trunk.sub_ports, events.CREATED) self.assertEqual(20, len(self.tapi.put_trunk_subport.mock_calls)) # should have tried to wire trunk at the end with state self.plumber.trunk_on_host.assert_called_once_with(self.trunk) def test_agent_port_change_is_trunk(self): self.tapi.get_trunk.return_value = self.trunk self.lbd.agent_port_change( 'resource', 'event', 'trigger', payload=cb_events.DBEventPayload( 'context', states=({'port_id': self.trunk.port_id},), resource_id=self.trunk.port_id)) # should have tried to wire trunk self.plumber.trunk_on_host.assert_called_once_with(self.trunk) def test_agent_port_change_not_trunk(self): self.tapi.get_trunk.return_value = None self.tapi.get_trunk_for_subport.return_value = None other_port_id = uuidutils.generate_uuid() self.lbd.agent_port_change( 'resource', 'event', 'trigger', payload=cb_events.DBEventPayload( 'context', states=({'port_id': other_port_id},), resource_id=other_port_id)) self.plumber.delete_subports_by_port_id.assert_called_once_with( other_port_id) def test_agent_port_change_is_subport(self): self.tapi.get_trunk.return_value = None self.tapi.get_trunk_for_subport.return_value = self.trunk port_dev = {'port_id': self.trunk.sub_ports[0].port_id, 'mac_address': 'mac_addr'} self.lbd.agent_port_change( 'resource', 'event', 'trigger', payload=cb_events.DBEventPayload( 'context', states=(port_dev,), resource_id=port_dev['port_id'])) self.plumber.delete_subports_by_port_id.assert_called_once_with( self.trunk.sub_ports[0].port_id) def test_wire_trunk_happy_path(self): self.lbd.wire_trunk('ctx', self.trunk) self.tapi.bind_subports_to_host.assert_called_once_with( 'ctx', self.trunk) self.plumber.ensure_trunk_subports.assert_called_once_with(self.trunk) self.tapi.set_trunk_status.assert_called_once_with( 'ctx', self.trunk, t_const.TRUNK_ACTIVE_STATUS) def test_wire_trunk_not_on_host(self): # trunk device not on host self.plumber.trunk_on_host.return_value = False self.lbd.wire_trunk('ctx', self.trunk) # don't bind and don't set status self.assertFalse(self.tapi.bind_subports_to_host.called) self.assertFalse(self.tapi.set_trunk_status.called) def test_wire_trunk_concurrent_removal(self): self.plumber.trunk_on_host.side_effect = [True, False] self.plumber.ensure_trunk_subports.side_effect = ValueError() self.lbd.wire_trunk('ctx', self.trunk) # we don't change status if port was just removed self.assertFalse(self.tapi.set_trunk_status.called) def test_wire_trunk_other_exception(self): self.plumber.ensure_trunk_subports.side_effect = ValueError() self.lbd.wire_trunk('ctx', self.trunk) # degraded due to dataplane failure self.tapi.set_trunk_status.assert_called_once_with( 'ctx', self.trunk, t_const.TRUNK_DEGRADED_STATUS) class TrunkAPITestCase(base.BaseTestCase): def setUp(self): super(TrunkAPITestCase, self).setUp() self.stub = mock.create_autospec(driver.trunk_rpc.TrunkStub()) self.tapi = driver._TrunkAPI(self.stub) self.trunk = trunk.Trunk(id=uuidutils.generate_uuid(), port_id=uuidutils.generate_uuid(), project_id=uuidutils.generate_uuid()) self.subports = [trunk.SubPort(id=uuidutils.generate_uuid(), port_id=uuidutils.generate_uuid(), segmentation_type='vlan', trunk_id=self.trunk.id, segmentation_id=i) for i in range(20)] self.trunk.sub_ports = self.subports self.stub.get_trunk_details.return_value = self.trunk def test_fetch_trunk(self): self.assertEqual(self.trunk, self.tapi._fetch_trunk('ctx', 'port')) self.stub.get_trunk_details.assert_called_once_with('ctx', 'port') def test_fetch_trunk_missing(self): self.stub.get_trunk_details.side_effect = ( resources_rpc.ResourceNotFound(resource_id='1', resource_type='1')) self.assertIsNone(self.tapi._fetch_trunk('ctx', 'port')) def test_fetch_trunk_plugin_disabled(self): self.stub.get_trunk_details.side_effect = ( oslo_messaging.RemoteError('CallbackNotFound')) self.assertIsNone(self.tapi._fetch_trunk('ctx', 'port')) def test_fetch_trunk_plugin_other_error(self): self.stub.get_trunk_details.side_effect = ( oslo_messaging.RemoteError('vacuum full')) with testtools.ExpectedException(oslo_messaging.RemoteError): self.tapi._fetch_trunk('ctx', 'port') def test_set_trunk_status(self): self.tapi.set_trunk_status('ctx', self.trunk, 'STATUS') self.stub.update_trunk_status.assert_called_once_with( 'ctx', self.trunk.id, 'STATUS') def test_bind_subports_to_host(self): self.tapi.bind_subports_to_host('ctx', self.trunk) self.stub.update_subport_bindings.assert_called_once_with( 'ctx', self.trunk.sub_ports) def test_put_trunk_subport_non_existent_trunk(self): # trunks not registered are ignored self.tapi.put_trunk_subport( 'non_trunk_id', self.trunk.sub_ports[0]) def test_get_trunk_by_id(self): self.tapi.put_trunk(self.trunk.port_id, self.trunk) self.assertEqual(self.trunk, self.tapi.get_trunk_by_id('ctx', self.trunk.id)) self.assertIsNone(self.tapi.get_trunk_by_id('ctx', 'other_id')) def test_put_trunk_subport(self): self.tapi.put_trunk(self.trunk.port_id, self.trunk) new = trunk.SubPort(id=uuidutils.generate_uuid(), port_id=uuidutils.generate_uuid(), segmentation_type='vlan', trunk_id=self.trunk.id, segmentation_id=1010) self.tapi.put_trunk_subport(self.trunk.id, new) subs = self.tapi.get_trunk('ctx', self.trunk.port_id).sub_ports self.assertEqual(21, len(subs)) self.assertEqual(new, subs[-1]) def test_delete_trunk_subport(self): self.tapi.put_trunk(self.trunk.port_id, self.trunk) sub = self.trunk.sub_ports[10] self.tapi.delete_trunk_subport(self.trunk.id, sub) subs = self.tapi.get_trunk('ctx', self.trunk.port_id).sub_ports self.assertNotIn(sub, subs) self.assertEqual(19, len(subs)) def test_get_trunk(self): self.tapi.put_trunk(self.trunk.port_id, self.trunk) self.assertEqual(self.trunk, self.tapi.get_trunk('ctx', self.trunk.port_id)) self.tapi.get_trunk('ctx', self.trunk.port_id) self.assertFalse(self.stub.get_trunk_details.called) def test_get_trunk_cache_miss(self): self.assertEqual(self.trunk, self.tapi.get_trunk('ctx', self.trunk.port_id)) self.tapi.get_trunk('ctx', self.trunk.port_id) self.assertEqual(1, len(self.stub.get_trunk_details.mock_calls)) def test_get_trunk_not_found(self): self.stub.get_trunk_details.side_effect = ( resources_rpc.ResourceNotFound(resource_id='1', resource_type='1')) self.assertIsNone(self.tapi.get_trunk('ctx', self.trunk.port_id)) self.tapi.get_trunk('ctx', self.trunk.port_id) self.assertEqual(1, len(self.stub.get_trunk_details.mock_calls)) def test_get_trunk_for_subport(self): self.tapi.put_trunk(self.trunk.port_id, self.trunk) t = self.tapi.get_trunk_for_subport( 'ctx', self.trunk.sub_ports[0].port_id) self.assertEqual(self.trunk, t)
# Authors: Thomas Moreau <thomas.moreau@inria.fr> import time import numpy as np from scipy import sparse from . import check_random_state from .dictionary import get_D_shape from ..loss_and_gradient import gradient_zi from .convolution import _choose_convolve_multi def _coordinate_descent_idx(Xi, D, constants, reg, z0=None, max_iter=1000, tol=1e-3, strategy='greedy', n_seg='auto', freeze_support=False, debug=False, timing=False, random_state=None, name="CD", verbose=0): """Compute the coding signal associated to Xi with coordinate descent. Parameters ---------- Xi : array, shape (n_channels, n_times) The signal to encode. D : array The atoms. Can either be full rank with shape shape (n_atoms, n_channels, n_times_atom) or rank 1 with shape shape (n_atoms, n_channels + n_times_atom) constants : dict Constants containing DtD to speedup computation z0 : array, shape (n_atoms, n_times_valid) Initial estimate of the coding signal, to warm t_start the algorithm. tol : float Tolerance for the stopping criterion of the algorithm max_iter : int Maximal number of iterations run by the algorithm strategy : str in {'greedy' | 'random'} Strategy to select the updated coordinate in the CD algorithm. n_seg : int or 'auto' Number of segments used to divide the coding signal. The updates are performed successively on each of these segments. freeze_support : boolean If set to True, only update the coefficient that are non-zero in z0. debug : boolean Activate extra check in the algorithm to assert that we have implemented the correct algorithm. """ if timing: t_start = time.time() n_channels, n_times = Xi.shape n_atoms, n_channels, n_times_atom = get_D_shape(D, n_channels) n_times_valid = n_times - n_times_atom + 1 t0 = n_times_atom - 1 if z0 is None: z_hat = np.zeros((n_atoms, n_times_valid)) else: z_hat = z0.copy() if n_seg == 'auto': if strategy == 'greedy': n_seg = max(n_times_valid // (2 * n_times_atom), 1) elif strategy in ('random', 'cyclic'): n_seg = 1 n_coordinates = n_times_valid * n_atoms rng = check_random_state(random_state) max_iter *= n_seg n_times_seg = n_times_valid // n_seg + 1 def objective(zi): Dzi = _choose_convolve_multi(zi, D=D, n_channels=n_channels) Dzi -= Xi func = 0.5 * np.dot(Dzi.ravel(), Dzi.ravel()) func += reg * zi.sum() return func DtD = constants["DtD"] norm_Dk = np.array([DtD[k, k, t0] for k in range(n_atoms)])[:, None] if timing: times = [time.time() - t_start] pobj = [objective(z_hat)] t_start = time.time() beta, dz_opt, tol = _init_beta(Xi, z_hat, D, constants, reg, norm_Dk, tol, use_sparse_dz=False) # If we freeze the support, we put dz_opt to zero outside the support of z0 if freeze_support: mask = z0 == 0 dz_opt[mask] = 0 accumulator = n_seg active_segs = np.array([True] * n_seg) i_seg = 0 seg_bounds = [0, n_times_seg] t0, k0 = -1, 0 for ii in range(int(max_iter)): k0, t0, dz = _select_coordinate(strategy, dz_opt, active_segs[i_seg], n_atoms, n_times_valid, n_times_seg, seg_bounds, (t0, k0), rng=rng) if strategy in ['random', 'cyclic']: # accumulate on all coordinates from the stopping criterion if ii % n_coordinates == 0: accumulator = 0 accumulator += abs(dz) # Update the selected coordinate and beta, only if the update is # greater than the convergence tolerance. if abs(dz) > tol: # update the selected coordinate z_hat[k0, t0] += dz # update beta beta, dz_opt, accumulator, active_segs = _update_beta( beta, dz_opt, accumulator, active_segs, z_hat, DtD, norm_Dk, dz, k0, t0, reg, tol, seg_bounds, i_seg, n_times_atom, z0, freeze_support, debug) elif active_segs[i_seg]: accumulator -= 1 active_segs[i_seg] = False if timing and (ii % max(100, n_seg // 100) == 0): times.append(time.time() - t_start) pobj.append(objective(z_hat)) t_start = time.time() # check stopping criterion if strategy == 'greedy': if accumulator == 0: if verbose > 10: print('[{}] {} iterations'.format(name, ii + 1)) break else: # only check at the last coordinate if (ii + 1) % n_coordinates == 0 and accumulator <= tol: if verbose > 10: print('[{}] {} iterations'.format(name, ii + 1)) break # increment to next segment i_seg += 1 seg_bounds[0] += n_times_seg seg_bounds[1] += n_times_seg if seg_bounds[0] >= n_times_valid: # reset to first segment i_seg = 0 seg_bounds = [0, n_times_seg] else: if verbose > 10: print('[{}] did not converge'.format(name)) if timing: return z_hat, pobj, times return z_hat def _init_beta(Xi, z_hat, D, constants, reg, norm_Dk, tol, use_sparse_dz=False): # Init beta with -DtX beta = gradient_zi(Xi, z_hat, D=D, reg=None, loss='l2', return_func=False, constants=constants) for k, t in zip(*z_hat.nonzero()): beta[k, t] -= z_hat[k, t] * norm_Dk[k] # np.sum(DtD[k, k, t0]) dz_opt = np.maximum(-beta - reg, 0) / norm_Dk - z_hat tol = tol * np.std(Xi) if use_sparse_dz: dz_opt[abs(dz_opt) < tol] = 0 dz_opt = sparse.lil_matrix(dz_opt) return beta, dz_opt, tol def _update_beta(beta, dz_opt, accumulator, active_segs, z_hat, DtD, norm_Dk, dz, k0, t0, reg, tol, seg_bounds, i_seg, n_times_atom, z0, freeze_support, debug): n_atoms, n_times_valid = beta.shape # define the bounds for the beta update t_start_up = max(0, t0 - n_times_atom + 1) t_end_up = min(t0 + n_times_atom, n_times_valid) # update beta beta_i0 = beta[k0, t0] ll = t_end_up - t_start_up offset = max(0, n_times_atom - t0 - 1) beta[:, t_start_up:t_end_up] += DtD[:, k0, offset:offset + ll] * dz beta[k0, t0] = beta_i0 # update dz_opt tmp = np.maximum(-beta[:, t_start_up:t_end_up] - reg, 0) / norm_Dk dz_opt[:, t_start_up:t_end_up] = tmp - z_hat[:, t_start_up:t_end_up] dz_opt[k0, t0] = 0 # reunable greedy updates in the segments immediately before or after # if beta was update outside the segment t_start_seg, t_end_seg = seg_bounds if t_start_up < t_start_seg and not active_segs[i_seg - 1]: accumulator += 1 active_segs[i_seg - 1] = True if t_end_up > t_end_seg and not active_segs[i_seg + 1]: accumulator += 1 active_segs[i_seg + 1] = True # If we freeze the support, we put dz_opt to zero outside the support of z0 if freeze_support: mask = z0[:, t_start_up:t_end_up] == 0 dz_opt[:, t_start_up:t_end_up][mask] = 0 if debug: # Check that we do not changed the support while updating beta nnz_z0 = list(zip(*z0[:, t_start_up:t_end_up].nonzero())) nnz_dz = list(zip(*dz_opt[:, t_start_up:t_end_up].nonzero())) assert all([nnz in nnz_z0 for nnz in nnz_dz]) return beta, dz_opt, accumulator, active_segs def _select_coordinate(strategy, dz_opt, active_seg, n_atoms, n_times_valid, n_times_seg, seg_bounds, prev_idx, rng): # Pick a coordinate to update if strategy == 'random': k0 = rng.randint(n_atoms) t0 = rng.randint(n_times_valid) dz = dz_opt[k0, t0] elif strategy == 'cyclic': t0, k0 = prev_idx t0 += 1 if t0 >= n_times_valid: t0 = 0 k0 += 1 if k0 >= n_atoms: k0 = 0 dz = dz_opt[k0, t0] elif strategy == 'greedy': # if dZs[i_seg] > tol: t_start_seg, t_end_seg = seg_bounds if active_seg: i0 = abs(dz_opt[:, t_start_seg:t_end_seg]).argmax() n_times_current = min(n_times_seg, n_times_valid - t_start_seg) k0, t0 = np.unravel_index(i0, (n_atoms, n_times_current)) t0 += t_start_seg dz = dz_opt[k0, t0] else: k0, t0, dz = None, None, 0 else: raise ValueError("'The coordinate selection method should be in " "{'greedy' | 'random' | 'cyclic'}. Got '%s'." % (strategy, )) return k0, t0, dz
########################################################################## # # Copyright (c) 2012, John Haddon. All rights reserved. # Copyright (c) 2013-2015, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of John Haddon nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import os import shutil import unittest import imath import random import six import IECore import IECoreImage import Gaffer import GafferTest import GafferImage import GafferImageTest class OpenImageIOReaderTest( GafferImageTest.ImageTestCase ) : fileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checker.exr" ) offsetDataWindowFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/rgb.100x100.exr" ) negativeDataWindowFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checkerWithNegativeDataWindow.200x150.exr" ) negativeDisplayWindowFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/negativeDisplayWindow.exr" ) circlesExrFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/circles.exr" ) circlesJpgFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/circles.jpg" ) alignmentTestSourceFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/colorbars_half_max.exr" ) multipartFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/multipart.exr" ) unsupportedMultipartFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/unsupportedMultipart.exr" ) multipartDefaultChannelsFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/multipartDefaultChannels.exr" ) multipartDefaultChannelsOverlapFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/multipartDefaultChannelsOverlap.exr" ) def testInternalImageSpaceConversion( self ) : r = IECore.Reader.create( self.negativeDataWindowFileName ) image = r.read() exrDisplayWindow = image.displayWindow exrDataWindow = image.dataWindow n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.negativeDataWindowFileName ) gafferFormat = n["out"]["format"].getValue() self.assertEqual( gafferFormat.toEXRSpace( gafferFormat.getDisplayWindow() ), exrDisplayWindow, ) self.assertEqual( gafferFormat.toEXRSpace( n["out"]["dataWindow"].getValue() ), exrDataWindow, ) def test( self ) : n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.fileName ) self.assertEqual( n["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( 0 ), imath.V2i( 200, 150 ) ) ) self.assertEqual( n["out"]["format"].getValue().getDisplayWindow(), imath.Box2i( imath.V2i( 0 ), imath.V2i( 200, 150 ) ) ) expectedMetadata = IECore.CompoundData( { "oiio:ColorSpace" : IECore.StringData( 'Linear' ), "compression" : IECore.StringData( 'zips' ), "PixelAspectRatio" : IECore.FloatData( 1 ), "screenWindowCenter" : IECore.V2fData( imath.V2f( 0, 0 ) ), "screenWindowWidth" : IECore.FloatData( 1 ), "fileFormat" : IECore.StringData( "openexr" ), "dataType" : IECore.StringData( "float" ), } ) self.assertEqual( n["out"]["metadata"].getValue(), expectedMetadata ) channelNames = n["out"]["channelNames"].getValue() self.assertIsInstance( channelNames, IECore.StringVectorData ) self.assertIn( "R", channelNames ) self.assertIn( "G", channelNames ) self.assertIn( "B", channelNames ) self.assertIn( "A", channelNames ) image = GafferImage.ImageAlgo.image( n["out"] ) self.assertEqual( image.blindData(), IECore.CompoundData( dict(expectedMetadata) ) ) image2 = IECore.Reader.create( self.fileName ).read() image.blindData().clear() image2.blindData().clear() self.assertEqual( image, image2 ) def testNegativeDisplayWindowRead( self ) : n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.negativeDisplayWindowFileName ) f = n["out"]["format"].getValue() d = n["out"]["dataWindow"].getValue() self.assertEqual( f.getDisplayWindow(), imath.Box2i( imath.V2i( -5, -5 ), imath.V2i( 21, 21 ) ) ) self.assertEqual( d, imath.Box2i( imath.V2i( 2, -14 ), imath.V2i( 36, 20 ) ) ) expectedImage = IECore.Reader.create( self.negativeDisplayWindowFileName ).read() outImage = GafferImage.ImageAlgo.image( n["out"] ) expectedImage.blindData().clear() outImage.blindData().clear() self.assertEqual( expectedImage, outImage ) def testNegativeDataWindow( self ) : n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.negativeDataWindowFileName ) self.assertEqual( n["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( -25, -30 ), imath.V2i( 175, 120 ) ) ) self.assertEqual( n["out"]["format"].getValue().getDisplayWindow(), imath.Box2i( imath.V2i( 0 ), imath.V2i( 200, 150 ) ) ) channelNames = n["out"]["channelNames"].getValue() self.assertIsInstance( channelNames, IECore.StringVectorData ) self.assertIn( "R", channelNames ) self.assertIn( "G", channelNames ) self.assertIn( "B", channelNames ) image = GafferImage.ImageAlgo.image( n["out"] ) image2 = IECore.Reader.create( self.negativeDataWindowFileName ).read() op = IECoreImage.ImageDiffOp() res = op( imageA = image, imageB = image2 ) self.assertFalse( res.value ) def testTileSize( self ) : n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.fileName ) tile = n["out"].channelData( "R", imath.V2i( 0 ) ) self.assertEqual( len( tile ), GafferImage.ImagePlug().tileSize() **2 ) def testUnspecifiedFilename( self ) : n = GafferImage.OpenImageIOReader() n["out"]["channelNames"].getValue() n["out"].channelData( "R", imath.V2i( 0 ) ) def testChannelDataHashes( self ) : # Test that two tiles within the same image have different hashes. n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.fileName ) h1 = n["out"].channelData( "R", imath.V2i( 0 ) ).hash() h2 = n["out"].channelData( "R", imath.V2i( GafferImage.ImagePlug().tileSize() ) ).hash() self.assertNotEqual( h1, h2 ) def testDisabledChannelDataHashes( self ) : # Test that two tiles within the same image have the same hash when disabled. n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.fileName ) n["enabled"].setValue( False ) h1 = n["out"].channelData( "R", imath.V2i( 0 ) ).hash() h2 = n["out"].channelData( "R", imath.V2i( GafferImage.ImagePlug().tileSize() ) ).hash() self.assertEqual( h1, h2 ) def testOffsetDataWindowOrigin( self ) : n = GafferImage.OpenImageIOReader() n["fileName"].setValue( self.offsetDataWindowFileName ) image = GafferImage.ImageAlgo.image( n["out"] ) image2 = IECore.Reader.create( self.offsetDataWindowFileName ).read() image.blindData().clear() image2.blindData().clear() self.assertEqual( image, image2 ) def testJpgRead( self ) : exrReader = GafferImage.OpenImageIOReader() exrReader["fileName"].setValue( self.circlesExrFileName ) jpgReader = GafferImage.OpenImageIOReader() jpgReader["fileName"].setValue( self.circlesJpgFileName ) jpgOCIO = GafferImage.ColorSpace() jpgOCIO["in"].setInput( jpgReader["out"] ) jpgOCIO["inputSpace"].setValue( "sRGB" ) jpgOCIO["outputSpace"].setValue( "linear" ) self.assertImagesEqual( exrReader["out"], jpgOCIO["out"], ignoreMetadata = True, maxDifference = 0.001 ) def testSupportedExtensions( self ) : e = GafferImage.OpenImageIOReader.supportedExtensions() self.assertTrue( "exr" in e ) self.assertTrue( "jpg" in e ) self.assertTrue( "tif" in e ) self.assertTrue( "png" in e ) self.assertTrue( "cin" in e ) self.assertTrue( "dpx" in e ) def testFileRefresh( self ) : testFile = self.temporaryDirectory() + "/refresh.exr" shutil.copyfile( self.fileName, testFile ) reader = GafferImage.OpenImageIOReader() reader["fileName"].setValue( testFile ) image1 = GafferImage.ImageAlgo.image( reader["out"] ) # even though we've change the image on disk, gaffer will # still have the old one in its cache. shutil.copyfile( self.offsetDataWindowFileName, testFile ) self.assertEqual( GafferImage.ImageAlgo.image( reader["out"] ), image1 ) # until we force a refresh reader["refreshCount"].setValue( reader["refreshCount"].getValue() + 1 ) self.assertNotEqual( GafferImage.ImageAlgo.image( reader["out"] ), image1 ) def testNonexistentFiles( self ) : reader = GafferImage.OpenImageIOReader() reader["fileName"].setValue( "wellIDontExist.exr" ) six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["format"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["dataWindow"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["metadata"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["channelNames"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"].channelData, "R", imath.V2i( 0 ) ) six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", GafferImage.ImageAlgo.image, reader["out"] ) def testAvailableFrames( self ) : testSequence = IECore.FileSequence( self.temporaryDirectory() + "/incompleteSequence.####.exr" ) shutil.copyfile( self.fileName, testSequence.fileNameForFrame( 1 ) ) shutil.copyfile( self.offsetDataWindowFileName, testSequence.fileNameForFrame( 3 ) ) reader = GafferImage.OpenImageIOReader() reader["fileName"].setValue( testSequence.fileName ) self.assertEqual( reader["availableFrames"].getValue(), IECore.IntVectorData( [ 1, 3 ] ) ) # it doesn't update until we refresh shutil.copyfile( self.offsetDataWindowFileName, testSequence.fileNameForFrame( 5 ) ) self.assertEqual( reader["availableFrames"].getValue(), IECore.IntVectorData( [ 1, 3 ] ) ) reader["refreshCount"].setValue( reader["refreshCount"].getValue() + 1 ) self.assertEqual( reader["availableFrames"].getValue(), IECore.IntVectorData( [ 1, 3, 5 ] ) ) # explicit file paths aren't considered a sequence reader["fileName"].setValue( self.fileName ) self.assertEqual( reader["availableFrames"].getValue(), IECore.IntVectorData( [] ) ) reader["fileName"].setValue( testSequence.fileNameForFrame( 1 ) ) self.assertEqual( reader["availableFrames"].getValue(), IECore.IntVectorData( [] ) ) def testMissingFrameMode( self ) : testSequence = IECore.FileSequence( self.temporaryDirectory() + "/incompleteSequence.####.exr" ) shutil.copyfile( self.fileName, testSequence.fileNameForFrame( 1 ) ) shutil.copyfile( self.offsetDataWindowFileName, testSequence.fileNameForFrame( 3 ) ) reader = GafferImage.OpenImageIOReader() reader["fileName"].setValue( testSequence.fileName ) context = Gaffer.Context() # get frame 1 data for comparison context.setFrame( 1 ) with context : f1Image = GafferImage.ImageAlgo.image( reader["out"] ) f1Format = reader["out"]["format"].getValue() f1DataWindow = reader["out"]["dataWindow"].getValue() f1Metadata = reader["out"]["metadata"].getValue() f1ChannelNames = reader["out"]["channelNames"].getValue() f1Tile = reader["out"].channelData( "R", imath.V2i( 0 ) ) # make sure the tile we're comparing isn't black # so we can tell if MissingFrameMode::Black is working. blackTile = IECore.FloatVectorData( [ 0 ] * GafferImage.ImagePlug.tileSize() * GafferImage.ImagePlug.tileSize() ) self.assertNotEqual( f1Tile, blackTile ) # set to a missing frame context.setFrame( 2 ) # everything throws reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Error ) with context : six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", GafferImage.ImageAlgo.image, reader["out"] ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["format"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["dataWindow"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["metadata"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["channelNames"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"].channelData, "R", imath.V2i( 0 ) ) # everything matches frame 1 reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold ) with context : self.assertEqual( GafferImage.ImageAlgo.image( reader["out"] ), f1Image ) self.assertEqual( reader["out"]["format"].getValue(), f1Format ) self.assertEqual( reader["out"]["dataWindow"].getValue(), f1DataWindow ) self.assertEqual( reader["out"]["metadata"].getValue(), f1Metadata ) self.assertEqual( reader["out"]["channelNames"].getValue(), f1ChannelNames ) self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), f1Tile ) # the windows match frame 1, but everything else is default reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black ) with context : self.assertNotEqual( GafferImage.ImageAlgo.image( reader["out"] ), f1Image ) self.assertEqual( reader["out"]["format"].getValue(), f1Format ) self.assertEqual( reader["out"]["dataWindow"].getValue(), reader["out"]["dataWindow"].defaultValue() ) self.assertEqual( reader["out"]["metadata"].getValue(), reader["out"]["metadata"].defaultValue() ) self.assertEqual( reader["out"]["channelNames"].getValue(), reader["out"]["channelNames"].defaultValue() ) self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), blackTile ) # get frame 3 data for comparison context.setFrame( 3 ) with context : f3Image = GafferImage.ImageAlgo.image( reader["out"] ) f3Format = reader["out"]["format"].getValue() f3DataWindow = reader["out"]["dataWindow"].getValue() f3Metadata = reader["out"]["metadata"].getValue() f3ChannelNames = reader["out"]["channelNames"].getValue() f3Tile = reader["out"].channelData( "R", imath.V2i( 0 ) ) # set to a different missing frame context.setFrame( 4 ) # everything matches frame 3 reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold ) with context : self.assertNotEqual( GafferImage.ImageAlgo.image( reader["out"] ), f1Image ) self.assertNotEqual( reader["out"]["format"].getValue(), f1Format ) self.assertNotEqual( reader["out"]["dataWindow"].getValue(), f1DataWindow ) self.assertNotEqual( reader["out"]["metadata"].getValue(), f1Metadata ) # same channel names is fine self.assertEqual( reader["out"]["channelNames"].getValue(), f1ChannelNames ) self.assertNotEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), f1Tile ) self.assertEqual( GafferImage.ImageAlgo.image( reader["out"] ), f3Image ) self.assertEqual( reader["out"]["format"].getValue(), f3Format ) self.assertEqual( reader["out"]["dataWindow"].getValue(), f3DataWindow ) self.assertEqual( reader["out"]["metadata"].getValue(), f3Metadata ) self.assertEqual( reader["out"]["channelNames"].getValue(), f3ChannelNames ) self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), f3Tile ) # the windows match frame 3, but everything else is default reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black ) with context : self.assertNotEqual( reader["out"]["format"].getValue(), f1Format ) self.assertEqual( reader["out"]["format"].getValue(), f3Format ) self.assertEqual( reader["out"]["dataWindow"].getValue(), reader["out"]["dataWindow"].defaultValue() ) self.assertEqual( reader["out"]["metadata"].getValue(), reader["out"]["metadata"].defaultValue() ) self.assertEqual( reader["out"]["channelNames"].getValue(), reader["out"]["channelNames"].defaultValue() ) self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), blackTile ) # set to a missing frame before the start of the sequence context.setFrame( 0 ) # everything matches frame 1 reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold ) with context : self.assertEqual( GafferImage.ImageAlgo.image( reader["out"] ), f1Image ) self.assertEqual( reader["out"]["format"].getValue(), f1Format ) self.assertEqual( reader["out"]["dataWindow"].getValue(), f1DataWindow ) self.assertEqual( reader["out"]["metadata"].getValue(), f1Metadata ) self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), f1Tile ) # the windows match frame 1, but everything else is default reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black ) with context : self.assertEqual( reader["out"]["format"].getValue(), f1Format ) self.assertEqual( reader["out"]["dataWindow"].getValue(), reader["out"]["dataWindow"].defaultValue() ) self.assertEqual( reader["out"]["metadata"].getValue(), reader["out"]["metadata"].defaultValue() ) self.assertEqual( reader["out"]["channelNames"].getValue(), reader["out"]["channelNames"].defaultValue() ) self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), blackTile ) # explicit fileNames do not support MissingFrameMode reader["fileName"].setValue( testSequence.fileNameForFrame( 0 ) ) reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold ) with context : six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", GafferImage.ImageAlgo.image, reader["out"] ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["format"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["dataWindow"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["metadata"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["channelNames"].getValue ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"].channelData, "R", imath.V2i( 0 ) ) reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black ) with context : six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", GafferImage.ImageAlgo.image, reader["out"] ) six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["format"].getValue ) self.assertEqual( reader["out"]["dataWindow"].getValue(), reader["out"]["dataWindow"].defaultValue() ) self.assertEqual( reader["out"]["metadata"].getValue(), reader["out"]["metadata"].defaultValue() ) self.assertEqual( reader["out"]["channelNames"].getValue(), reader["out"]["channelNames"].defaultValue() ) self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), blackTile ) def testHashesFrame( self ) : # the fileName excludes FrameSubstitutions, but # the internal implementation can still rely on # frame, so we need to check that the output # still responds to frame changes. testSequence = IECore.FileSequence( self.temporaryDirectory() + "/incompleteSequence.####.exr" ) shutil.copyfile( self.fileName, testSequence.fileNameForFrame( 0 ) ) shutil.copyfile( self.offsetDataWindowFileName, testSequence.fileNameForFrame( 1 ) ) reader = GafferImage.OpenImageIOReader() reader["fileName"].setValue( testSequence.fileName ) context = Gaffer.Context() # get frame 0 data for comparison context.setFrame( 0 ) with context : sequenceMetadataHash = reader["out"]["metadata"].hash() sequenceMetadataValue = reader["out"]["metadata"].getValue() context.setFrame( 1 ) with context : self.assertNotEqual( reader["out"]["metadata"].hash(), sequenceMetadataHash ) self.assertNotEqual( reader["out"]["metadata"].getValue(), sequenceMetadataValue ) # but when we set an explicit fileName, # we no longer re-compute per frame. reader["fileName"].setValue( testSequence.fileNameForFrame( 0 ) ) # get frame 0 data for comparison context.setFrame( 0 ) with context : explicitMetadataHash = reader["out"]["metadata"].hash() self.assertNotEqual( explicitMetadataHash, sequenceMetadataHash ) self.assertEqual( reader["out"]["metadata"].getValue(), sequenceMetadataValue ) context.setFrame( 1 ) with context : self.assertNotEqual( reader["out"]["metadata"].hash(), sequenceMetadataHash ) self.assertEqual( reader["out"]["metadata"].hash(), explicitMetadataHash ) self.assertEqual( reader["out"]["metadata"].getValue(), sequenceMetadataValue ) def testFileFormatMetadata( self ) : r = GafferImage.OpenImageIOReader() r["fileName"].setValue( self.circlesJpgFileName ) self.assertEqual( r["out"]["metadata"].getValue()["dataType"].value, "uint8" ) self.assertEqual( r["out"]["metadata"].getValue()["fileFormat"].value, "jpeg" ) r["fileName"].setValue( "${GAFFER_ROOT}/python/GafferImageTest/images/rgb.100x100.dpx" ) self.assertEqual( r["out"]["metadata"].getValue()["dataType"].value, "uint10" ) self.assertEqual( r["out"]["metadata"].getValue()["fileFormat"].value, "dpx" ) def testOffsetAlignment( self ) : # Test a bunch of different data window alignments on disk. This exercises code for reading # weirdly aligned scanlines and partial tiles tempFile = self.temporaryDirectory() + "/tempOffsetImage.exr" r = GafferImage.OpenImageIOReader() r["fileName"].setValue( self.alignmentTestSourceFileName ) offsetOut = GafferImage.Offset() offsetOut["in"].setInput( r["out"] ) w = GafferImage.ImageWriter() w["in"].setInput( offsetOut["out"] ) w["fileName"].setValue( tempFile ) rBack = GafferImage.OpenImageIOReader() rBack["fileName"].setValue( tempFile ) offsetIn = GafferImage.Offset() offsetIn["in"].setInput( rBack["out"] ) random.seed( 42 ) offsets = [ imath.V2i(x,y) for x in [-1,0,1] for y in [-1, 0, 1] ] + [ imath.V2i( random.randint( -32, 32 ), random.randint( -32, 32 ) ) for i in range( 10 ) ] for mode in [ GafferImage.ImageWriter.Mode.Scanline, GafferImage.ImageWriter.Mode.Tile ]: w['openexr']['mode'].setValue( mode ) for offset in offsets: offsetOut['offset'].setValue( offset ) offsetIn['offset'].setValue( -offset ) w.execute() rBack['refreshCount'].setValue( rBack['refreshCount'].getValue() + 1 ) self.assertImagesEqual( r["out"], offsetIn["out"], ignoreMetadata = True ) def testFileNameContext( self ) : s = Gaffer.ScriptNode() s["reader"] = GafferImage.OpenImageIOReader() s["expression"] = Gaffer.Expression() s["expression"].setExpression( 'parent["reader"]["fileName"] = "%s"' % self.fileName ) with Gaffer.ContextMonitor( root = s["expression"] ) as cm : GafferImage.ImageAlgo.tiles( s["reader"]["out"] ) self.assertEqual( set( cm.combinedStatistics().variableNames() ), set( ['frame', 'framesPerSecond'] ) ) def testMultipartRead( self ) : rgbReader = GafferImage.OpenImageIOReader() rgbReader["fileName"].setValue( self.offsetDataWindowFileName ) compareDelete = GafferImage.DeleteChannels() compareDelete["in"].setInput( rgbReader["out"] ) # This test multipart file contains a "customRgb" subimage, a "customRgba" subimage, # and a "customDepth" subimage, with one channel named "Z" ( copied from the green # channel of our reference image. ) # We don't use the subimage names "rgb", "rgba" or "depth", because we want to look # at channels which don't get automatically mapped to the default channel names. # ( see testDefaultChannelsMultipartRead for that ) # The test file was created using this command: # > oiiotool rgb.100x100.exr --attrib "oiio:subimagename" customRgb -ch "R,G,B" rgb.100x100.exr --attrib "oiio:subimagename" customRgba rgb.100x100.exr --attrib "oiio:subimagename" customDepth --ch "G" --chnames "Z" --siappendall -o multipart.exr multipartReader = GafferImage.OpenImageIOReader() multipartReader["fileName"].setValue( self.multipartFileName ) multipartShuffle = GafferImage.Shuffle() multipartShuffle["in"].setInput( multipartReader["out"] ) multipartDelete = GafferImage.DeleteChannels() multipartDelete["in"].setInput( multipartShuffle["out"] ) multipartDelete['channels'].setValue( "*.*" ) self.assertEqual( set( multipartReader["out"]["channelNames"].getValue() ), set([ "customRgba.R", "customRgba.G", "customRgba.B", "customRgba.A", "customRgb.R", "customRgb.G", "customRgb.B", "customDepth.Z" ]) ) multipartShuffle["channels"].clearChildren() multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "R", "customRgba.R" ) ) multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "G", "customRgba.G" ) ) multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "B", "customRgba.B" ) ) multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "A", "customRgba.A" ) ) self.assertImagesEqual( compareDelete["out"], multipartDelete["out"], ignoreMetadata = True ) multipartShuffle["channels"].clearChildren() multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "R", "customRgb.R" ) ) multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "G", "customRgb.G" ) ) multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "B", "customRgb.B" ) ) compareDelete['channels'].setValue( "A" ) self.assertImagesEqual( compareDelete["out"], multipartDelete["out"], ignoreMetadata = True ) multipartShuffle["channels"].clearChildren() multipartShuffle["channels"].addChild( GafferImage.Shuffle.ChannelPlug( "G", "customDepth.Z" ) ) compareDelete['channels'].setValue( "R B A" ) self.assertImagesEqual( compareDelete["out"], multipartDelete["out"], ignoreMetadata = True ) def testUnsupportedMultipartRead( self ) : rgbReader = GafferImage.OpenImageIOReader() rgbReader["fileName"].setValue( self.offsetDataWindowFileName ) # This test multipart file contains a "rgba" subimage, and a second subimage with a # differing data window. The second part can currently not be loaded, because Gaffer images # have a single data window for the whole image. # # In the future, should we union the data windows? Are subimages with differing data windows common? # This would probably happen with stereo images, but we should probably put work into handling stereo # images differently - with a context variable to control which eye we get, rather than loading everything # as channels. # # It was created using this command: # > oiiotool rgb.100x100.exr --attrib "oiio:subimagename" rgba checkerboard.100x100.exr --attrib "oiio:subimagename" fullDataWindow --siappendall -o unsupportedMultipart.exr multipartReader = GafferImage.OpenImageIOReader() multipartReader["fileName"].setValue( self.unsupportedMultipartFileName ) # When we compare to the single part comparison file, the image will come out the same, because # the second part is ignored - and we should get a message about it being ignored with IECore.CapturingMessageHandler() as mh : self.assertImagesEqual( rgbReader["out"], multipartReader["out"], ignoreMetadata = True ) self.assertEqual( len( mh.messages ), 1 ) self.assertTrue( mh.messages[0].message.startswith( "Ignoring subimage 1 of " ) ) def testDefaultChannelMultipartRead( self ) : # This test multipart file contains a "rgb" subimage with R, G, B channels, an "RGBA" subimage # with an A channel, and a "depth" subimage with a Z channel. # The standard would expect this to be loaded with channel names like "RGBA.A" and "depth.Z", # but in practice, applications expect these default layers to be loaded as the standard layer # names, so we conform to this pratical expection, and just name the channels R, G, B, A, and Z # The test file was created with this command # > oiiotool --create 4x4 3 --addc 0.1,0.2,0.3 --attrib "oiio:subimagename" rgb -create 4x4 1 --chnames A --addc 0.4 --attrib "oiio:subimagename" RGBA -create 4x4 1 --chnames Z --addc 4.2 --attrib "oiio:subimagename" depth --siappendall -o multipartDefaultChannels.exr multipartReader = GafferImage.OpenImageIOReader() multipartReader["fileName"].setValue( self.multipartDefaultChannelsFileName ) self.assertEqual( set( multipartReader["out"]["channelNames"].getValue() ), set([ "R", "G", "B", "A", "Z" ]) ) sampler = GafferImage.ImageSampler() sampler["image"].setInput( multipartReader["out"] ) sampler["pixel"].setValue( imath.V2f( 2 ) ) self.assertEqual( sampler["color"].getValue(), imath.Color4f( 0.1, 0.2, 0.3, 0.4 ) ) sampler['channels'].setValue( IECore.StringVectorData( ["Z", "Z", "Z", "Z"] ) ) self.assertEqual( sampler["color"].getValue(), imath.Color4f( 4.2 ) ) # Similar sort of image, but this time is ambiguous because subimages "rgb" and "RGBA" both # define channels RGB. This should trigger a warning, and take RGB from the first subimage, # but A from the second subimage, because it is only found there. # The test file was created with this command: # > oiiotool --create 4x4 3 --addc 0.1,0.2,0.3 --attrib "oiio:subimagename" rgb -create 4x4 4 --addc 0.4,0.5,0.6,0.7 --attrib "oiio:subimagename" RGBA --siappendall -o multipartDefaultChannelsOverlap.exr multipartReader["fileName"].setValue( self.multipartDefaultChannelsOverlapFileName ) with IECore.CapturingMessageHandler() as mh : self.assertEqual( set( multipartReader["out"]["channelNames"].getValue() ), set([ "R", "G", "B", "A" ]) ) self.assertEqual( len( mh.messages ), 3 ) self.assertTrue( mh.messages[0].message.startswith( 'Ignoring channel "R" in subimage "1"' ) ) self.assertTrue( mh.messages[1].message.startswith( 'Ignoring channel "G" in subimage "1"' ) ) self.assertTrue( mh.messages[2].message.startswith( 'Ignoring channel "B" in subimage "1"' ) ) for i in range( 3 ): self.assertTrue( mh.messages[i].message.endswith( 'already in subimage "0".' ) ) sampler['channels'].setToDefault() self.assertEqual( sampler["color"].getValue(), imath.Color4f( 0.1, 0.2, 0.3, 0.7 ) ) def testDefaultFormatHash( self ) : r = GafferImage.OpenImageIOReader() with Gaffer.Context() as c : GafferImage.FormatPlug.setDefaultFormat( c, GafferImage.Format( 100, 200 ) ) h1 = r["out"].formatHash() GafferImage.FormatPlug.setDefaultFormat( c, GafferImage.Format( 200, 300 ) ) h2 = r["out"].formatHash() GafferImage.FormatPlug.setDefaultFormat( c, GafferImage.Format( 100, 300, 2.0 ) ) h3 = r["out"].formatHash() GafferImage.FormatPlug.setDefaultFormat( c, GafferImage.Format( 100, 200 ) ) h4 = r["out"].formatHash() self.assertNotEqual( h1, h2 ) self.assertNotEqual( h1, h3 ) self.assertNotEqual( h2, h3 ) self.assertEqual( h1, h4 ) def testOpenFilesLimit( self ) : l = GafferImage.OpenImageIOReader.getOpenFilesLimit() try : GafferImage.OpenImageIOReader.setOpenFilesLimit( l + 1 ) self.assertEqual( GafferImage.OpenImageIOReader.getOpenFilesLimit(), l + 1 ) finally : GafferImage.OpenImageIOReader.setOpenFilesLimit( l ) def testSubimageMetadataNotLoaded( self ) : reader = GafferImage.ImageReader() reader["fileName"].setValue( "${GAFFER_ROOT}/python/GafferImageTest/images/multipart.exr" ) metadata = reader["out"].metadata() self.assertNotIn( "name", metadata ) self.assertNotIn( "oiio:subimagename", metadata ) self.assertNotIn( "oiio:subimages", metadata ) if __name__ == "__main__": unittest.main()
#!/usr/bin/env python '''unit testing code for pysam. Execute in the :file:`tests` directory as it requires the Makefile and data files located there. ''' import unittest import os import sys import re import copy from collections import OrderedDict as odict import pysam import pysam.samtools from TestUtils import get_temp_filename, make_data_files, BAM_DATADIR if sys.version_info.major >= 3: from io import StringIO else: from StringIO import StringIO def setUpModule(): make_data_files(BAM_DATADIR) class TestHeaderConstruction(unittest.TestCase): """testing header construction.""" header_dict = odict( [('SQ', [odict([('LN', 1575), ('SN', 'chr1'), ('AH', 'chr1:5000000-5010000')]), odict([('LN', 1584), ('SN', 'chr2'), ('AH', '*')])]), ('RG', [odict([('LB', 'SC_1'), ('ID', 'L1'), ('SM', 'NA12891'), ('PU', 'SC_1_10'), ("CN", "name:with:colon")]), odict([('LB', 'SC_2'), ('ID', 'L2'), ('SM', 'NA12891'), ('PU', 'SC_2_12'), ("CN", "name:with:colon")])]), ('PG', [odict([('ID', 'P1'), ('VN', '1.0')]), odict([('ID', 'P2'), ('VN', '1.1')])]), ('HD', odict([('VN', '1.0')])), ('CO', ['this is a comment', 'this is another comment']), ]) header_text = ("@HD\tVN:1.0\n" "@SQ\tSN:chr1\tLN:1575\tAH:chr1:5000000-5010000\n" "@SQ\tSN:chr2\tLN:1584\tAH:*\n" "@RG\tID:L1\tPU:SC_1_10\tLB:SC_1\tSM:NA12891\tCN:name:with:colon\n" "@RG\tID:L2\tPU:SC_2_12\tLB:SC_2\tSM:NA12891\tCN:name:with:colon\n" "@PG\tID:P1\tVN:1.0\n" "@PG\tID:P2\tVN:1.1\n" "@CO\tthis is a comment\n" "@CO\tthis is another comment\n") header_from_references = odict( [('SQ', [odict([('LN', 1575), ('SN', 'chr1')]), odict([('LN', 1584), ('SN', 'chr2')])]), ('RG', [odict([('LB', 'SC_1'), ('ID', 'L1'), ('SM', 'NA12891'), ('PU', 'SC_1_10'), ("CN", "name:with:colon")]), odict([('LB', 'SC_2'), ('ID', 'L2'), ('SM', 'NA12891'), ('PU', 'SC_2_12'), ("CN", "name:with:colon")])]), ('PG', [odict([('ID', 'P1'), ('VN', '1.0')]), odict([('ID', 'P2'), ('VN', '1.1')])]), ('HD', odict([('VN', '1.0')])), ('CO', ['this is a comment', 'this is another comment']), ]) header_without_text = odict( [('SQ', [odict([('LN', 1575), ('SN', 'chr1')]), odict([('LN', 1584), ('SN', 'chr2')])]), ]) def compare_headers(self, test_header, ref_header=None): '''compare two headers a and b.''' test_header_dict = test_header.as_dict() if ref_header is None: ref_header = self.header_dict for ak, av in test_header_dict.items(): self.assertTrue(ak in self.header_dict, "key '%s' not in '%s' " % (ak, ref_header)) self.assertEqual(av, ref_header[ak]) for ak, av in ref_header.items(): self.assertTrue(ak in test_header_dict, "key '%s' not in '%s' " % (ak, test_header_dict)) self.assertEqual(av, test_header_dict[ak]) def check_name_mapping(self, test_header): for x, y in enumerate(("chr1", "chr2")): tid = test_header.get_tid(y) ref = test_header.get_reference_name(x) self.assertEqual(tid, x) self.assertEqual(ref, y) self.assertEqual(test_header.get_tid("chr?"), -1) self.assertRaises(ValueError, test_header.get_reference_name, 2) def test_header_constructed_from_dict(self): header = pysam.AlignmentHeader.from_dict(self.header_dict) self.compare_headers(header) self.check_name_mapping(header) def test_header_constructed_from_text(self): header = pysam.AlignmentHeader.from_text(self.header_text) self.compare_headers(header) self.check_name_mapping(header) def test_header_constructed_from_header(self): header = pysam.AlignmentHeader.from_text(self.header_text) self.compare_headers(header.copy()) self.check_name_mapping(header) def test_header_constructed_from_references(self): text = re.sub("@SQ[^\n]+\n", "", self.header_text) assert "@SQ" not in text header = pysam.AlignmentHeader.from_references( reference_names=["chr1", "chr2"], reference_lengths=[1575, 1584], text=text) self.compare_headers(header, self.header_from_references) self.check_name_mapping(header) def test_header_constructed_from_references_without_text(self): header = pysam.AlignmentHeader.from_references( reference_names=["chr1", "chr2"], reference_lengths=[1575, 1584]) self.compare_headers(header, self.header_without_text) self.check_name_mapping(header) class TestHeaderSAM(unittest.TestCase): """testing header manipulation""" header = {'SQ': [{'LN': 1575, 'SN': 'chr1', 'AH': 'chr1:5000000-5010000'}, {'LN': 1584, 'SN': 'chr2', 'AH': '*'}], 'RG': [{'LB': 'SC_1', 'ID': 'L1', 'SM': 'NA12891', 'PU': 'SC_1_10', "CN": "name:with:colon"}, {'LB': 'SC_2', 'ID': 'L2', 'SM': 'NA12891', 'PU': 'SC_2_12', "CN": "name:with:colon"}], 'PG': [{'ID': 'P1', 'VN': '1.0'}, {'ID': 'P2', 'VN': '1.1'}], 'HD': {'VN': '1.0'}, 'CO': ['this is a comment', 'this is another comment'], } def compare_headers(self, a, b): '''compare two headers a and b.''' for ak, av in a.items(): self.assertTrue(ak in b, "key '%s' not in '%s' " % (ak, b)) self.assertEqual(av, b[ak]) def setUp(self): self.samfile = pysam.AlignmentFile( os.path.join(BAM_DATADIR, "ex3.sam"), "r") def test_header_content_is_as_expected(self): self.compare_headers(self.header, self.samfile.header.to_dict()) self.compare_headers(self.samfile.header.to_dict(), self.header) def test_text_access_works(self): self.assertEqual(self.samfile.text, self.samfile.header.__str__()) def test_name_mapping(self): for x, y in enumerate(("chr1", "chr2")): tid = self.samfile.gettid(y) ref = self.samfile.getrname(x) self.assertEqual(tid, x) self.assertEqual(ref, y) self.assertEqual(self.samfile.gettid("chr?"), -1) self.assertRaises(ValueError, self.samfile.getrname, 2) def test_dictionary_access_works(self): for key in self.header.keys(): self.compare_headers({key: self.header[key]}, {key: self.samfile.header[key]}) def test_dictionary_setting_raises_error(self): self.assertRaises(TypeError, self.samfile.header.__setitem__, "CO", ["This is a final comment"]) def test_dictionary_len_works(self): self.assertEqual(len(self.header), len(self.samfile.header)) def test_dictionary_keys_works(self): # sort for py2.7 self.assertEqual(sorted(self.header.keys()), sorted(self.samfile.header.keys())) def test_dictionary_values_works(self): self.assertEqual(len(self.header.values()), len(self.samfile.header.values())) def test_dictionary_get_works(self): self.assertEqual(self.header.get("HD"), {'VN': '1.0'}) self.assertEqual(self.header.get("UK", "xyz"), "xyz") self.assertEqual(self.header.get("UK"), None) def test_dictionary_contains_works(self): self.assertTrue("HD" in self.header) self.assertFalse("UK" in self.header) def tearDown(self): self.samfile.close() class TestHeaderBAM(TestHeaderSAM): def setUp(self): self.samfile = pysam.AlignmentFile( os.path.join(BAM_DATADIR, "ex3.bam"), "rb") class TestHeaderCRAM(TestHeaderSAM): def setUp(self): self.samfile = pysam.AlignmentFile( os.path.join(BAM_DATADIR, "ex3.cram"), "rc") def compare_headers(self, a, b): '''compare two headers a and b.''' def _strip(dd): for x in dd: for y in ("M5", "UR"): if y in x: del x[y] for ak, av in a.items(): _strip(av) self.assertTrue(ak in b, "key '%s' not in '%s' " % (ak, b)) _strip(b[ak]) self.assertEqual(av, b[ak]) class TestHeaderFromRefs(unittest.TestCase): '''see issue 144 reference names need to be converted to string for python 3 ''' # def testHeader( self ): # refs = ['chr1', 'chr2'] # tmpfile = "tmp_%i" % id(self) # s = pysam.AlignmentFile(tmpfile, 'wb', # referencenames=refs, # referencelengths=[100]*len(refs)) # s.close() # self.assertTrue( checkBinaryEqual( 'issue144.bam', tmpfile ), # 'bam files differ') # os.unlink( tmpfile ) class TestHeaderWriteRead(unittest.TestCase): header = {'SQ': [{'LN': 1575, 'SN': 'chr1'}, {'LN': 1584, 'SN': 'chr2'}], 'RG': [{'LB': 'SC_1', 'ID': 'L1', 'SM': 'NA12891', 'PU': 'SC_1_10', "CN": "name:with:colon"}, {'LB': 'SC_2', 'ID': 'L2', 'SM': 'NA12891', 'PU': 'SC_2_12', "CN": "name:with:colon"}], 'PG': [{'ID': 'P1', 'VN': '1.0', 'CL': 'tool'}, {'ID': 'P2', 'VN': '1.1', 'CL': 'tool with in option -R a\tb', 'PP': 'P1'}], 'HD': {'VN': '1.0'}, 'CO': ['this is a comment', 'this is another comment'], } def compare_headers(self, a, header_b): '''compare two headers a and b. Ignore M5 and UR field as they are set application specific. ''' b = header_b.to_dict() for ak, av in a.items(): self.assertTrue(ak in b, "key '%s' not in '%s' " % (ak, b)) self.assertEqual( len(av), len(b[ak]), "unequal number of entries for key {}: {} vs {}" .format(ak, av, b[ak])) for row_a, row_b in zip(av, b[ak]): if isinstance(row_b, dict): for x in ["M5", "UR"]: try: del row_b[x] except KeyError: pass self.assertEqual(row_a, row_b) def check_read_write(self, flag_write, header): fn = get_temp_filename() print(fn) with pysam.AlignmentFile( fn, flag_write, header=header, reference_filename=os.path.join(BAM_DATADIR, "ex1.fa")) as outf: a = pysam.AlignedSegment() a.query_name = "abc" outf.write(a) with pysam.AlignmentFile(fn) as inf: read_header = inf.header # os.unlink(fn) self.compare_headers(header, read_header) expected_lengths = dict([(x["SN"], x["LN"]) for x in header["SQ"]]) self.assertEqual(expected_lengths, dict(zip(read_header.references, read_header.lengths))) def test_SAM(self): self.check_read_write("wh", self.header) def test_BAM(self): self.check_read_write("wb", self.header) def test_CRAM(self): header = copy.copy(self.header) if "PG" in header: # for CRAM, \t needs to be quoted: header['PG'][1]['CL'] = re.sub(r"\t", r"\\\\t", header['PG'][1]['CL']) self.check_read_write("wc", header) class TestHeaderLargeContigs(TestHeaderWriteRead): """see issue 741""" header = {'SQ': [{'LN': 2147483647, 'SN': 'chr1'}, {'LN': 1584, 'SN': 'chr2'}], 'HD': {'VN': '1.0'}}
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Presubmit script for Chromium WebUI resources. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details about the presubmit API built into gcl/git cl, and see http://www.chromium.org/developers/web-development-style-guide for the rules we're checking against here. """ # TODO(dbeam): Real CSS parser? pycss? http://code.google.com/p/pycss/ class CSSChecker(object): def __init__(self, input_api, output_api, file_filter=None): self.input_api = input_api self.output_api = output_api self.file_filter = file_filter def RunChecks(self): # We use this a lot, so make a nick name variable. re = self.input_api.re def _collapseable_hex(s): return (len(s) == 6 and s[0] == s[1] and s[2] == s[3] and s[4] == s[5]) def _is_gray(s): return s[0] == s[1] == s[2] if len(s) == 3 else s[0:2] == s[2:4] == s[4:6] def _remove_all(s): return _remove_grit(_remove_ats(_remove_comments(s))) def _remove_ats(s): return re.sub(re.compile(r'@\w+.*?{(.*{.*?})+.*?}', re.DOTALL), '\\1', s) def _remove_comments(s): return re.sub(re.compile(r'/\*.*?\*/', re.DOTALL), '', s) def _remove_grit(s): grit_reg = r'<if[^>]+>.*?<\s*/\s*if[^>]*>|<include[^>]+>' return re.sub(re.compile(grit_reg, re.DOTALL), '', s) def _rgb_from_hex(s): if len(s) == 3: r, g, b = s[0] + s[0], s[1] + s[1], s[2] + s[2] else: r, g, b = s[0:2], s[2:4], s[4:6] return int(r, base=16), int(g, base=16), int(b, base=16) def alphabetize_props(contents): errors = [] for rule in re.finditer(r'{(.*?)}', contents, re.DOTALL): semis = map(lambda t: t.strip(), rule.group(1).split(';'))[:-1] rules = filter(lambda r: ': ' in r, semis) props = map(lambda r: r[0:r.find(':')], rules) if props != sorted(props): errors.append(' %s;\n' % (';\n '.join(rules))) return errors def braces_have_space_before_and_nothing_after(line): return re.search(r'(?:^|\S){|{\s*\S+\s*$', line) def classes_use_dashes(line): # Intentionally dumbed down version of CSS 2.1 grammar for class without # non-ASCII, escape chars, or whitespace. m = re.search(r'\.(-?[_a-zA-Z0-9-]+).*[,{]\s*$', line) return (m and (m.group(1).lower() != m.group(1) or m.group(1).find('_') >= 0)) # Ignore single frames in a @keyframe, i.e. 0% { margin: 50px; } frame_reg = r'\s*\d+%\s*{\s*[_a-zA-Z0-9-]+:(\s*[_a-zA-Z0-9-]+)+\s*;\s*}\s*' def close_brace_on_new_line(line): return (line.find('}') >= 0 and re.search(r'[^ }]', line) and not re.match(frame_reg, line)) def colons_have_space_after(line): return re.search(r'(?<!data):(?!//)\S[^;]+;\s*', line) def favor_single_quotes(line): return line.find('"') >= 0 # Shared between hex_could_be_shorter and rgb_if_not_gray. hex_reg = (r'#([a-fA-F0-9]{3}|[a-fA-F0-9]{6})(?=[^_a-zA-Z0-9-]|$)' r'(?!.*(?:{.*|,\s*)$)') def hex_could_be_shorter(line): m = re.search(hex_reg, line) return (m and _is_gray(m.group(1)) and _collapseable_hex(m.group(1))) small_seconds = r'(?:^|[^_a-zA-Z0-9-])(0?\.[0-9]+)s(?!-?[_a-zA-Z0-9-])' def milliseconds_for_small_times(line): return re.search(small_seconds, line) def no_data_uris_in_source_files(line): return re.search(r'\(\s*\'?\s*data:', line) def one_rule_per_line(line): return re.search(r'[_a-zA-Z0-9-](?<!data):(?!//)[^;]+;\s*[^ }]\s*', line) any_reg = re.compile(r':(?:-webkit-)?any\(.*?\)', re.DOTALL) multi_sels = re.compile(r'(?:}[\n\s]*)?([^,]+,(?=[^{}]+?{).*[,{])\s*$', re.MULTILINE) def one_selector_per_line(contents): errors = [] for b in re.finditer(multi_sels, re.sub(any_reg, '', contents)): errors.append(' ' + b.group(1).strip().splitlines()[-1:][0]) return errors def rgb_if_not_gray(line): m = re.search(hex_reg, line) return (m and not _is_gray(m.group(1))) def suggest_ms_from_s(line): ms = int(float(re.search(small_seconds, line).group(1)) * 1000) return ' (replace with %dms)' % ms def suggest_rgb_from_hex(line): suggestions = ['rgb(%d, %d, %d)' % _rgb_from_hex(h.group(1)) for h in re.finditer(hex_reg, line)] return ' (replace with %s)' % ', '.join(suggestions) def suggest_short_hex(line): h = re.search(hex_reg, line).group(1) return ' (replace with #%s)' % (h[0] + h[2] + h[4]) hsl = r'hsl\([^\)]*(?:[, ]|(?<=\())(?:0?\.?)?0%' zeros = (r'^.*(?:^|\D)' r'(?:\.0|0(?:\.0?|px|em|%|in|cm|mm|pc|pt|ex|deg|g?rad|m?s|k?hz))' r'(?:\D|$)(?=[^{}]+?}).*$') def zero_length_values(contents): errors = [] for z in re.finditer(re.compile(zeros, re.MULTILINE), contents): first_line = z.group(0).strip().splitlines()[0] if not re.search(hsl, first_line): errors.append(' ' + first_line) return errors added_or_modified_files_checks = [ { 'desc': 'Alphabetize properties and list vendor specific (i.e. ' '-webkit) above standard.', 'test': alphabetize_props, 'multiline': True, }, { 'desc': 'Start braces ({) end a selector, have a space before them ' 'and no rules after.', 'test': braces_have_space_before_and_nothing_after, }, { 'desc': 'Classes use .dash-form.', 'test': classes_use_dashes, }, { 'desc': 'Always put a rule closing brace (}) on a new line.', 'test': close_brace_on_new_line, }, { 'desc': 'Colons (:) should have a space after them.', 'test': colons_have_space_after, }, { 'desc': 'Use single quotes (\') instead of double quotes (") in ' 'strings.', 'test': favor_single_quotes, }, { 'desc': 'Use abbreviated hex (#rgb) when in form #rrggbb.', 'test': hex_could_be_shorter, 'after': suggest_short_hex, }, { 'desc': 'Use milliseconds for time measurements under 1 second.', 'test': milliseconds_for_small_times, 'after': suggest_ms_from_s, }, { 'desc': 'Don\'t use data URIs in source files. Use grit instead.', 'test': no_data_uris_in_source_files, }, { 'desc': 'One rule per line (what not to do: color: red; margin: 0;).', 'test': one_rule_per_line, }, { 'desc': 'One selector per line (what not to do: a, b {}).', 'test': one_selector_per_line, 'multiline': True, }, { 'desc': 'Use rgb() over #hex when not a shade of gray (like #333).', 'test': rgb_if_not_gray, 'after': suggest_rgb_from_hex, }, { 'desc': 'Make all zero length terms (i.e. 0px) 0 unless inside of ' 'hsl() or part of @keyframe.', 'test': zero_length_values, 'multiline': True, }, ] results = [] affected_files = self.input_api.AffectedFiles(include_deletes=False, file_filter=self.file_filter) files = [] for f in affected_files: # Remove all /*comments*/, @at-keywords, and grit <if|include> tags; we're # not using a real parser. TODO(dbeam): Check alpha in <if> blocks. file_contents = _remove_all('\n'.join(f.NewContents())) files.append((f.LocalPath(), file_contents)) # Only look at CSS files for now. for f in filter(lambda f: f[0].endswith('.css'), files): file_errors = [] for check in added_or_modified_files_checks: # If the check is multiline, it receieves the whole file and gives us # back a list of things wrong. If the check isn't multiline, we pass it # each line and the check returns something truthy if there's an issue. if ('multiline' in check and check['multiline']): check_errors = check['test'](f[1]) if len(check_errors) > 0: # There are currently no multiline checks with ['after']. file_errors.append('- %s\n%s' % (check['desc'], '\n'.join(check_errors).rstrip())) else: check_errors = [] lines = f[1].splitlines() for lnum in range(0, len(lines)): line = lines[lnum] if check['test'](line): error = ' ' + line.strip() if 'after' in check: error += check['after'](line) check_errors.append(error) if len(check_errors) > 0: file_errors.append('- %s\n%s' % (check['desc'], '\n'.join(check_errors))) if file_errors: results.append(self.output_api.PresubmitPromptWarning( '%s:\n%s' % (f[0], '\n\n'.join(file_errors)))) if results: # Add your name if you're here often mucking around in the code. authors = ['dbeam@chromium.org'] results.append(self.output_api.PresubmitNotifyResult( 'Was the CSS checker useful? Send feedback or hate mail to %s.' % ', '.join(authors))) return results
import matplotlib matplotlib.use('Agg') import os import tensorflow as tf import fid import numpy as np import math import utils import fidutils from glob import glob import argparse # # parse params # parser = argparse.ArgumentParser() parser.add_argument('--path_IncNet', type=str, help='Path to inception net.') parser.add_argument('--dataset', type=str, default='CelebA', help='Possible options: CelebA, Cifar10, Other. (default: CelebA)') parser.add_argument('--path_data', type=str, help='Path to images') parser.add_argument('--path_out', type=str, help="Path to output directory") parser.add_argument('--path_stats', type=str, help='Path to precalculated statistics') hp_str = '''Possible nois types: sp (salt and pepper), rect (black rectangles), swirl, blur, gn (gaussian noise) mixed (mixture with ImageNet images) To make multiple experiments, pass noise types seperated by colons (e.g. sp:rect:swirl). (default: sp) ''' parser.add_argument('--noise_type', type=str, default='sp', help=hp_str) parser.add_argument('--img_file_ext',type=str, default='*.png', help='Extension of image files. If no specific extenison i ') parser.add_argument('--n_imgs', type=int, default=50000, help='Number of images used to calc the distances. (default: 50000)') parser.add_argument('--gpu', type=str, default='', help='GPU to use (leave blank for CPU only)') parser.add_argument('--verbose', type=str, default='', help='Report status of program in console. \"Y\" for yes. (default: status is not reported)') parser.add_argument('--sub_paths', type=str, default='', help='Create sub directories per distortion type. \"Y\" for yes. (default: sub directories are not created)') parser.add_argument('--img_dims', type=int, default=None, nargs=3, metavar=('HIGHT', 'WIDTH', 'CHANNELS'), help='dimensions of images in the order "H W C" for hight, width and channels. Only needed for dataset "Other" (no default value)') args = parser.parse_args() #------------------------------------------------------------------------------- # # check parameters # PATH_INC = args.path_IncNet if not PATH_INC.endswith("classify_image_graph_def.pb"): PATH_INC = os.join(PATH_INC,"classify_image_graph_def.pb") if not os.path.exists(PATH_INC): raise RuntimeError("Invalid path: %s" % PATH_INC) PATH_DATA = args.path_data #print("# DEBUG:::PATH_DATA = " + str(PATH_DATA)) if not os.path.exists(PATH_DATA): raise RuntimeError("Invalid path: %s" % PATH_DATA) PATH_DATA = os.path.join(PATH_DATA,'*') data = glob(PATH_DATA) #data = glob(os.path.join(PATH_DATA, '*.jp') PATH_OUT = args.path_out if not os.path.exists(PATH_OUT): raise RuntimeError("Invalid path: %s" % PATH_OUT) _H_, _W_, _C_ = None, None, None PATH_STATS = args.path_stats #print("# DEBUG:::args.dataset = " + str(args.dataset)) if args.dataset == "CelebA": _H_ = 64; _W_ = 64; _C_ = 3 if not PATH_STATS.endswith("fid_stats_celeba.npz"): PATH_STATS = os.path.join(PATH_STATS,"fid_stats_celeba.npz") elif args.dataset == "Cifar10": _H_ = 32; _W_ = 32; _C_ = 3 if not PATH_STATS.endswith("fid_stats_cifar10_train.npz"): PATH_STATS = os.path.join(PATH_STATS,"fid_stats_cifar10_train.npz") elif args.dataset == "Other": _H_ = args.img_dims[0]; _W_ = args.img_dims[1]; _C_ = args.img_dims[2] if not PATH_STATS.endswith(".npz"): raise RuntimeError("Invalid path: pleas state the full path, including the file name <file_name>.npz") if not os.path.exists(PATH_STATS): raise RuntimeError("Invalid path: %s" % PATH_STATS) args.noise_type = args.noise_type.split(':') for t in args.noise_type: if not t in ["sp", "rect", "swirl", "blur", "gn", "mixed"]: raise RuntimeError("Invalid noise type: %s" % args.nois_type) verbose=False if args.verbose == 'Y': verbose = True if args.verbose and args.gpu != "": print("# Setting CUDA_VISIBLE_DEVICES to: " + str(args.gpu)) os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu pth_out = args.path_out #pth_gan = "/publicwork/ramsauer/experiments2/celebA_sanity_collaps_FIDandINC" n_repeats = 1 #------------------------------------------------------------------------------- # # read data # if verbose: print("# Reading %d images..." % args.n_imgs ,end="", flush=True) # read stats f = np.load(PATH_STATS) mu_real, sigma_real = f['mu'][:], f['sigma'][:] f.close() # read imgs N_FEATURES = _H_*_W_*_C_ N_LOAD_IMGS = args.n_imgs # 50000 X = fidutils.DataContainer(np.zeros((N_LOAD_IMGS, N_FEATURES)), epoch_shuffle=False) for i in range(N_LOAD_IMGS): img = utils.get_image(data[i], input_height=_H_, input_width=_W_, resize_height=_H_, resize_width=_W_, is_crop=False, is_grayscale=False) X._data[i,:] = img.flatten() assert X._data.max() <= 1. assert X._data.min() >= -1. if verbose: print("done") print("# image values in range [%.2f, %.2f]" % (X._data.min(), X._data.max())) #------------------------------------------------------------------------------- # # load inference model # fid.create_inception_graph(PATH_INC) batch_size = 100 softmax = None #------------------------------------------------------------------------------- # # run # init = tf.global_variables_initializer() sess = tf.Session() with sess.as_default(): sess.run(init) query_tensor = fid._get_inception_layer(sess) if softmax is None: softmax = fidutils.get_softmax(sess, query_tensor) for noise_type in args.noise_type: if args.verbose: print("# Noise type: " + noise_type) alphas = None if noise_type in ["gn", "rect", "mixed"] : alphas = [0.0, 0.25, 0.5, 0.75] elif noise_type in ["blur", "swirl"]: alphas = [0.0, 1.0, 2.0, 4.0] elif noise_type == "sp": alphas = [0.0, 0.1, 0.2, 0.3] # prepare result writer tmp_PATH_OUT = PATH_OUT if args.sub_paths: tmp_PATH_OUT = PATH_OUT + "/" + noise_type os.mkdir(tmp_PATH_OUT) res_writer = fidutils.ResultWriter(tmp_PATH_OUT, out_dir_name=noise_type, out_name=noise_type, zfill=3) res_writer.new_enumerated_path(force=True) n_repeats=1 save_interval = len(alphas) res_writer.add_iter_tracker('Fid', save_interval, n_repeats) res_writer.add_iter_tracker('Inc', save_interval, n_repeats) res_desc = [] n_rect = 5 for i,a in enumerate(alphas): if args.verbose: print("# Alpha = %s" % a) res_desc.append({'alpha':a}) if noise_type == "gn": X.apply_gauss_noise(alpha=a, mi=-1, ma=1) elif noise_type == "rect": X.apply_mult_rect(n_rect, _H_, _W_, _C_, share=a, val=X._data.min()) elif noise_type == "blur": X.apply_gaussian_blur(a, _H_, _W_) elif noise_type == "swirl": if args.dataset == "CelebA": # bigger radius to make the effect more visible X.apply_local_swirl(_H_, _W_, _C_, n_swirls=1, radius=70, strength=a, positioning="center", directions="random") else: X.apply_local_swirl(_H_, _W_, _C_, n_swirls=1, radius=25, strength=a, positioning="center", directions="random") elif noise_type == "sp": X.salt_and_pepper( _H_, _W_, _C_, p=a, mi=-1, ma=1) if args.verbose: print("# -- Range of transformed images: [%.2f, %.2f]" % ( X._transf_data.min(), X._transf_data.max()) ) X._transf_data = (X._transf_data + 1.) * 127.5 if args.verbose: print("# -- Range of upscaled images: [ %.2f, %.2f]" % ( X._transf_data.min(), X._transf_data.max()) ) res_writer.plot_enumerate_RGB(X._transf_data[0], _H_, _W_, i) # calc FID if args.verbose: print("# -- Calculating frechet distance...", flush=True) mu_gen, sigma_gen = fid.calculate_activation_statistics( X._transf_data.reshape( -1, _H_, _W_, _C_), sess, batch_size=batch_size) act = fid.get_activations( X._transf_data.reshape( -1, _H_, _W_, _C_), sess, batch_size=batch_size, verbose=False) fid_value = fid.calculate_frechet_distance(mu_gen, sigma_gen, mu_real, sigma_real) res_writer.save_to_iter_tracker('Fid', fid_value) if args.verbose: print("# -- FID = %.5f" % fid_value) # calc Inception score inc = None if args.verbose: print("# -- Calculating inception score...", flush=True) inc,_ = fidutils.get_inception_score( X._transf_data.reshape( -1, _H_, _W_, _C_), softmax, sess, splits=10, verbose=False) if args.verbose: print("# -- INC = %.5f" % inc) res_writer.save_to_iter_tracker('Inc', inc); res_writer.inc_idx() res_writer.write_result_enumerate_internal(res_desc)
import sys import csv from collections import defaultdict def loadData(csvfile): listRawData = list() if csvfile == '-': fh = sys.stdin else: fh = open(csvfile, 'r') try: csvreader = csv.reader(fh, delimiter=',') for nRow, row in enumerate(csvreader): if nRow == 0: header = row else: listRawData.append(row) except Exception, sErr: print sErr sys.exit(1) finally: fh.close() return header, listRawData def getColumns(header): # CSV columns # HomeTeam = Home Team # AwayTeam = Away Team # FTR = Full Time Result (H=Home Win, D=Draw, A=Away Win) # Date = Date # B365H = Bet365 home win odds # B365D = Bet365 draw odds # B365A = Bet365 away win odds # BSH = Blue Square home win odds # BSD = Blue Square draw odds # BSA = Blue Square away win odds # BWH = Bet&Win home win odds # BWD = Bet&Win draw odds # BWA = Bet&Win away win odds # GBH = Gamebookers home win odds # GBD = Gamebookers draw odds # GBA = Gamebookers away win odds # IWH = Interwetten home win odds # IWD = Interwetten draw odds # IWA = Interwetten away win odds # LBH = Ladbrokes home win odds # LBD = Ladbrokes draw odds # LBA = Ladbrokes away win odds # PSH = Pinnacle Sports home win odds # PSD = Pinnacle Sports draw odds # PSA = Pinnacle Sports away win odds # SOH = Sporting Odds home win odds # SOD = Sporting Odds draw odds # SOA = Sporting Odds away win odds # SBH = Sportingbet home win odds # SBD = Sportingbet draw odds # SBA = Sportingbet away win odds # SJH = Stan James home win odds # SJD = Stan James draw odds # SJA = Stan James away win odds # SYH = Stanleybet home win odds # SYD = Stanleybet draw odds # SYA = Stanleybet away win odds # VCH = VC Bet home win odds # VCD = VC Bet draw odds # VCA = VC Bet away win odds # WHH = William Hill home win odds # WHD = William Hill draw odds # WHA = William Hill away win odds dictCol = dict() dictCol['nColHomeTeam'] = header.index("HomeTeam") dictCol['nColAwayTeam'] = header.index("AwayTeam") # dictCol['nColsOddsHome'] = [i for i in range(header.index("B365H"), range(header.index("B365H") + 3*13, 3)] # dictCol['nColsOddsAway'] = [i for i in range(header.index("B365A"), range(header.index("B365A") + 3*13, 3)] # dictCol['nColsOddsDraw'] = [i for i in range(header.index("B365D"), range(header.index("B365D") + 3*13, 3)] dictCol['nColResult'] = header.index("FTR") dictCol['date'] = header.index("Date") dictOddCol = dict() try: dictOddCol['Bet365'] = {'nColOddsHome' : header.index("B365H"), 'nColOddsAway' : header.index("B365A"), 'nColOddsDraw' : header.index("B365D"),} except ValueError: pass try: dictOddCol['Blue Square'] = {'nColOddsHome' : header.index("BSH"), 'nColOddsAway' : header.index("BSA"), 'nColOddsDraw' : header.index("BSD"),} except ValueError: pass try: dictOddCol['Bet&Win'] = {'nColOddsHome' : header.index("BWH"), 'nColOddsAway' : header.index("BWA"), 'nColOddsDraw' : header.index("BWD"),} except ValueError: pass try: dictOddCol['Gamebookers'] = {'nColOddsHome' : header.index("GBH"), 'nColOddsAway' : header.index("GBA"), 'nColOddsDraw' : header.index("GBD"),} except ValueError: pass try: dictOddCol['Interwetten'] = {'nColOddsHome' : header.index("IWH"), 'nColOddsAway' : header.index("IWA"), 'nColOddsDraw' : header.index("IWD"),} except ValueError: pass try: dictOddCol['Ladbrokes'] = {'nColOddsHome' : header.index("LBH"), 'nColOddsAway' : header.index("LBA"), 'nColOddsDraw' : header.index("LBD"),} except ValueError: pass try: dictOddCol['Pinnacle Sports'] = {'nColOddsHome' : header.index("PSH"), 'nColOddsAway' : header.index("PSA"), 'nColOddsDraw' : header.index("PSD"),} except ValueError: pass try: dictOddCol['Sporting Odds'] = {'nColOddsHome' : header.index("SOH"), 'nColOddsAway' : header.index("SOA"), 'nColOddsDraw' : header.index("SOD"),} except ValueError: pass try: dictOddCol['Sportingbet'] = {'nColOddsHome' : header.index("SBH"), 'nColOddsAway' : header.index("SBA"), 'nColOddsDraw' : header.index("SBD"),} except ValueError: pass try: dictOddCol['Stan James'] = {'nColOddsHome' : header.index("SJH"), 'nColOddsAway' : header.index("SJA"), 'nColOddsDraw' : header.index("SJD"),} except ValueError: pass try: dictOddCol['Stanleybet'] = {'nColOddsHome' : header.index("SYH"), 'nColOddsAway' : header.index("SYA"), 'nColOddsDraw' : header.index("SYD"),} except ValueError: pass try: dictOddCol['VC Bet'] = {'nColOddsHome' : header.index("VCH"), 'nColOddsAway' : header.index("VCA"), 'nColOddsDraw' : header.index("VCD"),} except ValueError: pass try: dictOddCol['William Hill'] = {'nColOddsHome' : header.index("WHH"), 'nColOddsAway' : header.index("WHA"), 'nColOddsDraw' : header.index("WHD"),} except ValueError: pass return dictCol, dictOddCol def main(): if len(sys.argv) > 1: csvfile = sys.argv[1] else: csvfile = 'D1_2013-14.csv' header, listRawData = loadData(csvfile) dictCol, dictOddCol = getColumns(header) dictGames = defaultdict(int) dictCorrect = defaultdict(int) for data in listRawData: for sBettingAgent in dictOddCol.keys(): try: nHomeOdds = float(data[dictOddCol[sBettingAgent]['nColOddsHome']]) nAwayOdds = float(data[dictOddCol[sBettingAgent]['nColOddsAway']]) nDrawOdds = float(data[dictOddCol[sBettingAgent]['nColOddsDraw']]) except: continue tupOdds = ( ('H', nHomeOdds), ('A', nAwayOdds), ('D', nDrawOdds), ) sResult = data[dictCol['nColResult']] if sResult == min(tupOdds, key=lambda x: x[1])[0]: dictCorrect[sBettingAgent] += 1 dictGames[sBettingAgent] += 1 print '\t'.join(sorted(dictOddCol.keys())) print '\t'.join([ "{0:.2f}".format(dictCorrect[sBetAgent] / float(dictGames[sBetAgent]) * 100) for sBetAgent in sorted(dictOddCol.keys())]) if __name__ == "__main__": main()
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from proton import Message from system_test import TestCase, Qdrouterd, main_module, TIMEOUT, unittest, TestTimeout, PollTimeout, Logger from proton.handlers import MessagingHandler from proton.reactor import Container, DynamicNodeProperties from qpid_dispatch_internal.compat import UNICODE class RouterMultitenantPolicyTest(TestCase): inter_router_port = None @classmethod def setUpClass(cls): """Start a router""" super(RouterMultitenantPolicyTest, cls).setUpClass() def router(name, connection): config = [ ('router', {'mode': 'interior', 'id': name}), ('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}), ('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'multiTenant': 'yes'}), ('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'role': 'route-container'}), ('linkRoute', {'prefix': 'hosted-group-1/link', 'direction': 'in', 'containerId': 'LRC'}), ('linkRoute', {'prefix': 'hosted-group-1/link', 'direction': 'out', 'containerId': 'LRC'}), ('autoLink', {'address': 'hosted-group-1/queue.waypoint', 'containerId': 'ALC', 'direction': 'in'}), ('autoLink', {'address': 'hosted-group-1/queue.waypoint', 'containerId': 'ALC', 'direction': 'out'}), ('autoLink', {'address': 'hosted-group-1/queue.ext', 'containerId': 'ALCE', 'direction': 'in', 'externalAddress': 'EXT'}), ('autoLink', {'address': 'hosted-group-1/queue.ext', 'containerId': 'ALCE', 'direction': 'out', 'externalAddress': 'EXT'}), ('address', {'prefix': 'closest', 'distribution': 'closest'}), ('address', {'prefix': 'spread', 'distribution': 'balanced'}), ('address', {'prefix': 'multicast', 'distribution': 'multicast'}), ('address', {'prefix': 'hosted-group-1/queue', 'waypoint': 'yes'}), ('policy', {'enableVhostPolicy': 'true'}), ('vhost', {'hostname': 'hosted-group-1', 'allowUnknownUser': 'true', 'aliases': '0.0.0.0', 'groups': { '$default': { 'users': '*', 'maxConnections': 100, 'remoteHosts': '*', 'sources': '*', 'targets': '*', 'allowAnonymousSender': 'true', 'allowWaypointLinks': 'true', 'allowDynamicSource': 'true' } } }), connection ] config = Qdrouterd.Config(config) cls.routers.append(cls.tester.qdrouterd(name, config, wait=True)) cls.routers = [] inter_router_port = cls.tester.get_port() router('A', ('listener', {'role': 'inter-router', 'port': inter_router_port})) router('B', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port})) cls.routers[0].wait_router_connected('B') cls.routers[1].wait_router_connected('A') def test_01_one_router_targeted_sender_no_tenant(self): test = MessageTransferTest(self.routers[0].addresses[0], self.routers[0].addresses[0], "anything/addr_01", "anything/addr_01", self.routers[0].addresses[0], "M0anything/addr_01") test.run() self.assertIsNone(test.error) def test_02_one_router_targeted_sender_tenant_on_sender(self): test = MessageTransferTest(self.routers[0].addresses[1], self.routers[0].addresses[0], "addr_02", "hosted-group-1/addr_02", self.routers[0].addresses[0], "M0hosted-group-1/addr_02") test.run() self.assertIsNone(test.error) def test_03_one_router_targeted_sender_tenant_on_receiver(self): test = MessageTransferTest(self.routers[0].addresses[0], self.routers[0].addresses[1], "hosted-group-1/addr_03", "addr_03", self.routers[0].addresses[0], "M0hosted-group-1/addr_03") test.run() self.assertIsNone(test.error) def test_04_one_router_targeted_sender_tenant_on_both(self): test = MessageTransferTest(self.routers[0].addresses[1], self.routers[0].addresses[1], "addr_04", "addr_04", self.routers[0].addresses[0], "M0hosted-group-1/addr_04") test.run() self.assertIsNone(test.error) def test_05_two_router_targeted_sender_no_tenant(self): test = MessageTransferTest(self.routers[0].addresses[0], self.routers[1].addresses[0], "hosted-group-1/addr_05", "hosted-group-1/addr_05", self.routers[0].addresses[0], "M0hosted-group-1/addr_05") test.run() self.assertIsNone(test.error) def test_06_two_router_targeted_sender_tenant_on_sender(self): test = MessageTransferTest(self.routers[0].addresses[1], self.routers[1].addresses[0], "addr_06", "hosted-group-1/addr_06", self.routers[0].addresses[0], "M0hosted-group-1/addr_06") test.run() self.assertIsNone(test.error) def test_07_two_router_targeted_sender_tenant_on_receiver(self): test = MessageTransferTest(self.routers[0].addresses[0], self.routers[1].addresses[1], "hosted-group-1/addr_07", "addr_07", self.routers[0].addresses[0], "M0hosted-group-1/addr_07") test.run() self.assertIsNone(test.error) def test_08_two_router_targeted_sender_tenant_on_both(self): test = MessageTransferTest(self.routers[0].addresses[1], self.routers[1].addresses[1], "addr_08", "addr_08", self.routers[0].addresses[0], "M0hosted-group-1/addr_08") test.run() self.assertIsNone(test.error) def test_09_one_router_anonymous_sender_no_tenant(self): test = MessageTransferAnonTest(self.routers[0].addresses[0], self.routers[0].addresses[0], "anything/addr_09", "anything/addr_09", self.routers[0].addresses[0], "M0anything/addr_09") test.run() self.assertIsNone(test.error) def test_10_one_router_anonymous_sender_tenant_on_sender(self): test = MessageTransferAnonTest(self.routers[0].addresses[1], self.routers[0].addresses[0], "addr_10", "hosted-group-1/addr_10", self.routers[0].addresses[0], "M0hosted-group-1/addr_10") test.run() self.assertIsNone(test.error) def test_11_one_router_anonymous_sender_tenant_on_receiver(self): test = MessageTransferAnonTest(self.routers[0].addresses[0], self.routers[0].addresses[1], "hosted-group-1/addr_11", "addr_11", self.routers[0].addresses[0], "M0hosted-group-1/addr_11") test.run() self.assertIsNone(test.error) def test_12_one_router_anonymous_sender_tenant_on_both(self): test = MessageTransferAnonTest(self.routers[0].addresses[1], self.routers[0].addresses[1], "addr_12", "addr_12", self.routers[0].addresses[0], "M0hosted-group-1/addr_12") test.run() self.assertIsNone(test.error) def test_13_two_router_anonymous_sender_no_tenant(self): test = MessageTransferAnonTest(self.routers[0].addresses[0], self.routers[1].addresses[0], "anything/addr_13", "anything/addr_13", self.routers[0].addresses[0], "M0anything/addr_13") test.run() self.assertIsNone(test.error) def test_14_two_router_anonymous_sender_tenant_on_sender(self): test = MessageTransferAnonTest(self.routers[0].addresses[1], self.routers[1].addresses[0], "addr_14", "hosted-group-1/addr_14", self.routers[0].addresses[0], "M0hosted-group-1/addr_14") test.run() self.assertIsNone(test.error) def test_15_two_router_anonymous_sender_tenant_on_receiver(self): test = MessageTransferAnonTest(self.routers[0].addresses[0], self.routers[1].addresses[1], "hosted-group-1/addr_15", "addr_15", self.routers[0].addresses[0], "M0hosted-group-1/addr_15") test.run() self.assertIsNone(test.error) def test_16_two_router_anonymous_sender_tenant_on_both(self): test = MessageTransferAnonTest(self.routers[0].addresses[1], self.routers[1].addresses[1], "addr_16", "addr_16", self.routers[0].addresses[0], "M0hosted-group-1/addr_16") test.run() self.assertIsNone(test.error) def test_17_one_router_link_route_targeted(self): test = LinkRouteTest(self.routers[0].addresses[1], self.routers[0].addresses[2], "link.addr_17", "hosted-group-1/link.addr_17", False, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_18_one_router_link_route_targeted_no_tenant(self): test = LinkRouteTest(self.routers[0].addresses[0], self.routers[0].addresses[2], "hosted-group-1/link.addr_18", "hosted-group-1/link.addr_18", False, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_19_one_router_link_route_dynamic(self): test = LinkRouteTest(self.routers[0].addresses[1], self.routers[0].addresses[2], "link.addr_19", "hosted-group-1/link.addr_19", True, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_20_one_router_link_route_dynamic_no_tenant(self): test = LinkRouteTest(self.routers[0].addresses[0], self.routers[0].addresses[2], "hosted-group-1/link.addr_20", "hosted-group-1/link.addr_20", True, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_21_two_router_link_route_targeted(self): test = LinkRouteTest(self.routers[0].addresses[1], self.routers[1].addresses[2], "link.addr_21", "hosted-group-1/link.addr_21", False, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_22_two_router_link_route_targeted_no_tenant(self): test = LinkRouteTest(self.routers[0].addresses[0], self.routers[1].addresses[2], "hosted-group-1/link.addr_22", "hosted-group-1/link.addr_22", False, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_23_two_router_link_route_dynamic(self): test = LinkRouteTest(self.routers[0].addresses[1], self.routers[1].addresses[2], "link.addr_23", "hosted-group-1/link.addr_23", True, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_24_two_router_link_route_dynamic_no_tenant(self): test = LinkRouteTest(self.routers[0].addresses[0], self.routers[1].addresses[2], "hosted-group-1/link.addr_24", "hosted-group-1/link.addr_24", True, self.routers[0].addresses[0]) test.run() self.assertIsNone(test.error) def test_25_one_router_anonymous_sender_non_mobile(self): test = MessageTransferAnonTest(self.routers[0].addresses[1], self.routers[0].addresses[0], "_local/addr_25", "_local/addr_25", self.routers[0].addresses[0], "Laddr_25") test.run() self.assertIsNone(test.error) def test_26_one_router_targeted_sender_non_mobile(self): test = MessageTransferTest(self.routers[0].addresses[1], self.routers[0].addresses[0], "_local/addr_26", "_local/addr_26", self.routers[0].addresses[0], "Laddr_26") test.run() self.assertIsNone(test.error) def test_27_two_router_anonymous_sender_non_mobile(self): test = MessageTransferAnonTest(self.routers[0].addresses[1], self.routers[1].addresses[0], "_topo/0/B/addr_27", "_local/addr_27", self.routers[1].addresses[0], "Laddr_27") test.run() self.assertIsNone(test.error) def test_28_two_router_targeted_sender_non_mobile(self): test = MessageTransferTest(self.routers[0].addresses[1], self.routers[1].addresses[0], "_topo/0/B/addr_28", "_local/addr_28", self.routers[1].addresses[0], "Laddr_28") test.run() self.assertIsNone(test.error) def test_29_one_router_waypoint_no_tenant(self): test = WaypointTest(self.routers[0].addresses[0], self.routers[0].addresses[2], "hosted-group-1/queue.waypoint", "hosted-group-1/queue.waypoint") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) def test_30_one_router_waypoint(self): test = WaypointTest(self.routers[0].addresses[1], self.routers[0].addresses[2], "queue.waypoint", "hosted-group-1/queue.waypoint") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) def test_31_two_router_waypoint_no_tenant(self): test = WaypointTest(self.routers[0].addresses[0], self.routers[1].addresses[2], "hosted-group-1/queue.waypoint", "hosted-group-1/queue.waypoint") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) def test_32_two_router_waypoint(self): test = WaypointTest(self.routers[0].addresses[1], self.routers[1].addresses[2], "queue.waypoint", "hosted-group-1/queue.waypoint") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) def test_33_one_router_waypoint_no_tenant_external_addr(self): test = WaypointTest(self.routers[0].addresses[0], self.routers[0].addresses[2], "hosted-group-1/queue.ext", "EXT", "ALCE") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) def test_34_one_router_waypoint_external_addr(self): test = WaypointTest(self.routers[0].addresses[1], self.routers[0].addresses[2], "queue.ext", "EXT", "ALCE") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) def test_35_two_router_waypoint_no_tenant_external_addr(self): test = WaypointTest(self.routers[0].addresses[0], self.routers[1].addresses[2], "hosted-group-1/queue.ext", "EXT", "ALCE") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) def test_36_two_router_waypoint_external_addr(self): test = WaypointTest(self.routers[0].addresses[1], self.routers[1].addresses[2], "queue.ext", "EXT", "ALCE") test.run() # Dump the logger output only if there is a test error, otherwise dont bother if test.error: test.logger.dump() self.assertIsNone(test.error) class Entity(object): def __init__(self, status_code, status_description, attrs): self.status_code = status_code self.status_description = status_description self.attrs = attrs def __getattr__(self, key): return self.attrs[key] class RouterProxy(object): def __init__(self, reply_addr): self.reply_addr = reply_addr def response(self, msg): ap = msg.properties return Entity(ap['statusCode'], ap['statusDescription'], msg.body) def read_address(self, name): ap = {'operation': 'READ', 'type': 'org.apache.qpid.dispatch.router.address', 'name': name} return Message(properties=ap, reply_to=self.reply_addr) def query_addresses(self): ap = {'operation': 'QUERY', 'type': 'org.apache.qpid.dispatch.router.address'} return Message(properties=ap, reply_to=self.reply_addr) class MessageTransferTest(MessagingHandler): def __init__(self, sender_host, receiver_host, sender_address, receiver_address, lookup_host, lookup_address): super(MessageTransferTest, self).__init__() self.sender_host = sender_host self.receiver_host = receiver_host self.sender_address = sender_address self.receiver_address = receiver_address self.lookup_host = lookup_host self.lookup_address = lookup_address self.sender_conn = None self.receiver_conn = None self.lookup_conn = None self.error = None self.sender = None self.receiver = None self.proxy = None self.count = 10 self.n_sent = 0 self.n_rcvd = 0 self.n_accepted = 0 self.n_receiver_opened = 0 self.n_sender_opened = 0 def timeout(self): self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_accepted=%d n_receiver_opened=%d n_sender_opened=%d" %\ (self.n_sent, self.n_rcvd, self.n_accepted, self.n_receiver_opened, self.n_sender_opened) self.sender_conn.close() self.receiver_conn.close() self.lookup_conn.close() def on_start(self, event): self.timer = event.reactor.schedule(TIMEOUT, TestTimeout(self)) self.sender_conn = event.container.connect(self.sender_host) self.receiver_conn = event.container.connect(self.receiver_host) self.lookup_conn = event.container.connect(self.lookup_host) self.reply_receiver = event.container.create_receiver(self.lookup_conn, dynamic=True) self.agent_sender = event.container.create_sender(self.lookup_conn, "$management") def send(self): while self.sender.credit > 0 and self.n_sent < self.count: self.n_sent += 1 m = Message(body="Message %d of %d" % (self.n_sent, self.count)) self.sender.send(m) def on_link_opened(self, event): if event.receiver: self.n_receiver_opened += 1 else: self.n_sender_opened += 1 if event.receiver == self.reply_receiver: self.proxy = RouterProxy(self.reply_receiver.remote_source.address) self.sender = event.container.create_sender(self.sender_conn, self.sender_address) self.receiver = event.container.create_receiver(self.receiver_conn, self.receiver_address) def on_sendable(self, event): if event.sender == self.sender: self.send() def on_message(self, event): if event.receiver == self.receiver: self.n_rcvd += 1 if event.receiver == self.reply_receiver: response = self.proxy.response(event.message) if response.status_code != 200: self.error = "Unexpected error code from agent: %d - %s" % (response.status_code, response.status_description) if self.n_sent != self.count or self.n_rcvd != self.count: self.error = "Unexpected counts: n_sent=%d n_rcvd=%d n_accepted=%d" % (self.n_sent, self.n_rcvd, self.n_accepted) self.sender_conn.close() self.receiver_conn.close() self.lookup_conn.close() self.timer.cancel() def on_accepted(self, event): if event.sender == self.sender: self.n_accepted += 1 if self.n_accepted == self.count: request = self.proxy.read_address(self.lookup_address) self.agent_sender.send(request) def run(self): Container(self).run() class MessageTransferAnonTest(MessagingHandler): def __init__(self, sender_host, receiver_host, sender_address, receiver_address, lookup_host, lookup_address): super(MessageTransferAnonTest, self).__init__() self.sender_host = sender_host self.receiver_host = receiver_host self.sender_address = sender_address self.receiver_address = receiver_address self.lookup_host = lookup_host self.lookup_address = lookup_address self.sender_conn = None self.receiver_conn = None self.lookup_conn = None self.error = None self.sender = None self.receiver = None self.proxy = None self.count = 10 self.n_sent = 0 self.n_rcvd = 0 self.n_accepted = 0 self.n_agent_reads = 0 self.n_receiver_opened = 0 self.n_sender_opened = 0 def timeout(self): self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_accepted=%d n_agent_reads=%d n_receiver_opened=%d n_sender_opened=%d" %\ (self.n_sent, self.n_rcvd, self.n_accepted, self.n_agent_reads, self.n_receiver_opened, self.n_sender_opened) self.sender_conn.close() self.receiver_conn.close() self.lookup_conn.close() if self.poll_timer: self.poll_timer.cancel() def poll_timeout(self): self.poll() def on_start(self, event): self.timer = event.reactor.schedule(TIMEOUT, TestTimeout(self)) self.poll_timer = None self.sender_conn = event.container.connect(self.sender_host) self.receiver_conn = event.container.connect(self.receiver_host) self.lookup_conn = event.container.connect(self.lookup_host) self.reply_receiver = event.container.create_receiver(self.lookup_conn, dynamic=True) self.agent_sender = event.container.create_sender(self.lookup_conn, "$management") self.receiver = event.container.create_receiver(self.receiver_conn, self.receiver_address) def send(self): while self.sender.credit > 0 and self.n_sent < self.count: self.n_sent += 1 m = Message(body="Message %d of %d" % (self.n_sent, self.count)) m.address = self.sender_address self.sender.send(m) def poll(self): request = self.proxy.read_address(self.lookup_address) self.agent_sender.send(request) self.n_agent_reads += 1 def on_link_opened(self, event): if event.receiver: self.n_receiver_opened += 1 else: self.n_sender_opened += 1 if event.receiver == self.reply_receiver: self.proxy = RouterProxy(self.reply_receiver.remote_source.address) self.poll() def on_sendable(self, event): if event.sender == self.sender: self.send() def on_message(self, event): if event.receiver == self.receiver: self.n_rcvd += 1 if event.receiver == self.reply_receiver: response = self.proxy.response(event.message) if response.status_code == 200 and (response.remoteCount + response.subscriberCount) > 0: self.sender = event.container.create_sender(self.sender_conn, None) if self.poll_timer: self.poll_timer.cancel() self.poll_timer = None else: self.poll_timer = event.reactor.schedule(0.25, PollTimeout(self)) def on_accepted(self, event): if event.sender == self.sender: self.n_accepted += 1 if self.n_accepted == self.count: self.sender_conn.close() self.receiver_conn.close() self.lookup_conn.close() self.timer.cancel() def run(self): Container(self).run() class LinkRouteTest(MessagingHandler): def __init__(self, first_host, second_host, first_address, second_address, dynamic, lookup_host): super(LinkRouteTest, self).__init__(prefetch=0) self.first_host = first_host self.second_host = second_host self.first_address = first_address self.second_address = second_address self.dynamic = dynamic self.lookup_host = lookup_host self.first_conn = None self.second_conn = None self.error = None self.first_sender = None self.first_receiver = None self.second_sender = None self.second_receiver = None self.poll_timer = None self.count = 10 self.n_sent = 0 self.n_rcvd = 0 self.n_settled = 0 def timeout(self): self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_settled=%d" % (self.n_sent, self.n_rcvd, self.n_settled) self.first_conn.close() self.second_conn.close() self.lookup_conn.close() if self.poll_timer: self.poll_timer.cancel() def poll_timeout(self): self.poll() def fail(self, text): self.error = text self.second_conn.close() self.first_conn.close() self.timer.cancel() self.lookup_conn.close() if self.poll_timer: self.poll_timer.cancel() def send(self): while self.first_sender.credit > 0 and self.n_sent < self.count: self.n_sent += 1 m = Message(body="Message %d of %d" % (self.n_sent, self.count)) self.first_sender.send(m) def poll(self): request = self.proxy.read_address("Dhosted-group-1/link") self.agent_sender.send(request) def setup_first_links(self, event): self.first_sender = event.container.create_sender(self.first_conn, self.first_address) if self.dynamic: self.first_receiver = event.container.create_receiver(self.first_conn, dynamic=True, options=DynamicNodeProperties({"x-opt-qd.address": UNICODE(self.first_address)})) else: self.first_receiver = event.container.create_receiver(self.first_conn, self.first_address) def on_start(self, event): self.timer = event.reactor.schedule(TIMEOUT, TestTimeout(self)) self.first_conn = event.container.connect(self.first_host) self.second_conn = event.container.connect(self.second_host) self.lookup_conn = event.container.connect(self.lookup_host) self.reply_receiver = event.container.create_receiver(self.lookup_conn, dynamic=True) self.agent_sender = event.container.create_sender(self.lookup_conn, "$management") def on_link_opening(self, event): if event.sender: self.second_sender = event.sender if self.dynamic: if event.sender.remote_source.dynamic: event.sender.source.address = self.second_address event.sender.open() else: self.fail("Expected dynamic source on sender") else: if event.sender.remote_source.address == self.second_address: event.sender.source.address = self.second_address event.sender.open() else: self.fail("Incorrect address on incoming sender: got %s, expected %s" % (event.sender.remote_source.address, self.second_address)) elif event.receiver: self.second_receiver = event.receiver if event.receiver.remote_target.address == self.second_address: event.receiver.target.address = self.second_address event.receiver.open() else: self.fail("Incorrect address on incoming receiver: got %s, expected %s" % (event.receiver.remote_target.address, self.second_address)) def on_link_opened(self, event): if event.receiver: event.receiver.flow(self.count) if event.receiver == self.reply_receiver: self.proxy = RouterProxy(self.reply_receiver.remote_source.address) self.poll() def on_sendable(self, event): if event.sender == self.first_sender: self.send() def on_message(self, event): if event.receiver == self.first_receiver: self.n_rcvd += 1 if event.receiver == self.reply_receiver: response = self.proxy.response(event.message) if response.status_code == 200 and (response.remoteCount + response.containerCount) > 0: if self.poll_timer: self.poll_timer.cancel() self.poll_timer = None self.setup_first_links(event) else: self.poll_timer = event.reactor.schedule(0.25, PollTimeout(self)) def on_settled(self, event): if event.sender == self.first_sender: self.n_settled += 1 if self.n_settled == self.count: self.fail(None) def run(self): container = Container(self) container.container_id = 'LRC' container.run() class WaypointTest(MessagingHandler): def __init__(self, first_host, second_host, first_address, second_address, container_id="ALC"): super(WaypointTest, self).__init__() self.first_host = first_host self.second_host = second_host self.first_address = first_address self.second_address = second_address self.container_id = container_id self.logger = Logger(title="WaypointTest") self.first_conn = None self.second_conn = None self.error = None self.first_sender = None self.first_sender_created = False self.first_sender_link_opened = False self.first_receiver = None self.first_receiver_created = False self.waypoint_sender = None self.waypoint_receiver = None self.waypoint_queue = [] self.waypoint_sender_opened = False self.waypoint_receiver_opened = False self.firsts_created = False self.count = 10 self.n_sent = 0 self.n_rcvd = 0 self.n_waypoint_rcvd = 0 self.n_thru = 0 self.outs = None def timeout(self): self.error = "Timeout Expired: n_sent=%d n_rcvd=%d n_thru=%d n_waypoint_rcvd=%d" % (self.n_sent, self.n_rcvd, self.n_thru, self.n_waypoint_rcvd) self.first_conn.close() self.second_conn.close() self.logger.dump() def fail(self, text): self.error = text self.second_conn.close() self.first_conn.close() self.timer.cancel() self.outs = "n_sent=%d n_rcvd=%d n_thru=%d n_waypoint_rcvd=%d" % (self.n_sent, self.n_rcvd, self.n_thru, self.n_waypoint_rcvd) print(self.outs) def send_client(self): while self.first_sender.credit > 0 and self.n_sent < self.count: self.n_sent += 1 m = Message(body="Message %d of %d" % (self.n_sent, self.count)) self.first_sender.send(m) def send_waypoint(self): self.logger.log("send_waypoint called") while self.waypoint_sender.credit > 0 and len(self.waypoint_queue) > 0: self.n_thru += 1 m = self.waypoint_queue.pop() self.waypoint_sender.send(m) self.logger.log("waypoint_sender message sent") else: self.logger.log("waypoint_sender did not sent - credit = %s, len(self.waypoint_queue) = %s" % (str(self.waypoint_sender.credit), str(len(self.waypoint_queue)))) def on_start(self, event): self.timer = event.reactor.schedule(TIMEOUT, TestTimeout(self)) self.first_conn = event.container.connect(self.first_host) self.second_conn = event.container.connect(self.second_host) def on_link_flow(self, event): if event.sender == self.waypoint_sender and self.first_sender_link_opened and not self.first_sender_created: self.first_sender_created = True self.first_sender = event.container.create_sender(self.first_conn, self.first_address) def on_link_opened(self, event): if event.receiver == self.waypoint_receiver and not self.first_sender_link_opened: self.first_sender_link_opened = True def on_link_opening(self, event): if event.sender and not self.waypoint_sender: self.waypoint_sender = event.sender if event.sender.remote_source.address == self.second_address: event.sender.source.address = self.second_address event.sender.open() self.waypoint_sender_opened = True else: self.fail("Incorrect address on incoming sender: got %s, expected %s" % (event.sender.remote_source.address, self.second_address)) elif event.receiver and not self.waypoint_receiver: self.waypoint_receiver = event.receiver if event.receiver.remote_target.address == self.second_address: event.receiver.target.address = self.second_address event.receiver.open() self.waypoint_receiver_opened = True else: self.fail("Incorrect address on incoming receiver: got %s, expected %s" % (event.receiver.remote_target.address, self.second_address)) if self.waypoint_sender_opened and self.waypoint_receiver_opened and not self.first_receiver_created: self.first_receiver_created = True self.first_receiver = event.container.create_receiver(self.first_conn, self.first_address) def on_sendable(self, event): if event.sender == self.first_sender: self.send_client() def on_message(self, event): if event.receiver == self.first_receiver: self.n_rcvd += 1 if self.n_rcvd == self.count and self.n_thru == self.count: self.fail(None) elif event.receiver == self.waypoint_receiver: self.n_waypoint_rcvd += 1 m = Message(body=event.message.body) self.waypoint_queue.append(m) self.send_waypoint() def run(self): container = Container(self) container.container_id = self.container_id container.run() if __name__ == '__main__': unittest.main(main_module())
"""Test config flow.""" from unittest.mock import patch from aiomusiccast import MusicCastConnectionException import pytest from homeassistant import config_entries, data_entry_flow from homeassistant.components import ssdp from homeassistant.components.yamaha_musiccast.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from tests.common import MockConfigEntry @pytest.fixture(autouse=True) async def silent_ssdp_scanner(hass): """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with patch( "homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners" ), patch("homeassistant.components.ssdp.Scanner._async_stop_ssdp_listeners"), patch( "homeassistant.components.ssdp.Scanner.async_scan" ): yield @pytest.fixture(autouse=True) def mock_setup_entry(): """Mock setting up a config entry.""" with patch( "homeassistant.components.yamaha_musiccast.async_setup_entry", return_value=True ): yield @pytest.fixture def mock_get_device_info_valid(): """Mock getting valid device info from musiccast API.""" with patch( "aiomusiccast.MusicCastDevice.get_device_info", return_value={"system_id": "1234567890", "model_name": "MC20"}, ): yield @pytest.fixture def mock_get_device_info_invalid(): """Mock getting invalid device info from musiccast API.""" with patch( "aiomusiccast.MusicCastDevice.get_device_info", return_value={"type": "no_yamaha"}, ): yield @pytest.fixture def mock_get_device_info_exception(): """Mock raising an unexpected Exception.""" with patch( "aiomusiccast.MusicCastDevice.get_device_info", side_effect=Exception("mocked error"), ): yield @pytest.fixture def mock_get_device_info_mc_exception(): """Mock raising an unexpected Exception.""" with patch( "aiomusiccast.MusicCastDevice.get_device_info", side_effect=MusicCastConnectionException("mocked error"), ): yield @pytest.fixture def mock_ssdp_yamaha(): """Mock that the SSDP detected device is a musiccast device.""" with patch("aiomusiccast.MusicCastDevice.check_yamaha_ssdp", return_value=True): yield @pytest.fixture def mock_ssdp_no_yamaha(): """Mock that the SSDP detected device is not a musiccast device.""" with patch("aiomusiccast.MusicCastDevice.check_yamaha_ssdp", return_value=False): yield @pytest.fixture def mock_valid_discovery_information(): """Mock that the ssdp scanner returns a useful upnp description.""" with patch( "homeassistant.components.ssdp.async_get_discovery_info_by_st", return_value=[ { "ssdp_location": "http://127.0.0.1:9000/MediaRenderer/desc.xml", "_host": "127.0.0.1", } ], ): yield @pytest.fixture def mock_empty_discovery_information(): """Mock that the ssdp scanner returns no upnp description.""" with patch( "homeassistant.components.ssdp.async_get_discovery_info_by_st", return_value=[] ): yield # User Flows async def test_user_input_device_not_found( hass, mock_get_device_info_mc_exception, mock_get_source_ip ): """Test when user specifies a non-existing device.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "none"}, ) assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["errors"] == {"base": "cannot_connect"} async def test_user_input_non_yamaha_device_found( hass, mock_get_device_info_invalid, mock_get_source_ip ): """Test when user specifies an existing device, which does not provide the musiccast API.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "127.0.0.1"}, ) assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["errors"] == {"base": "no_musiccast_device"} async def test_user_input_device_already_existing( hass, mock_get_device_info_valid, mock_get_source_ip ): """Test when user specifies an existing device.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id="1234567890", data={CONF_HOST: "192.168.188.18", "model": "MC20", "serial": "1234567890"}, ) mock_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "192.168.188.18"}, ) assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result2["reason"] == "already_configured" async def test_user_input_unknown_error( hass, mock_get_device_info_exception, mock_get_source_ip ): """Test when user specifies an existing device, which does not provide the musiccast API.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "127.0.0.1"}, ) assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM assert result2["errors"] == {"base": "unknown"} async def test_user_input_device_found( hass, mock_get_device_info_valid, mock_valid_discovery_information, mock_get_source_ip, ): """Test when user specifies an existing device.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "127.0.0.1"}, ) assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert isinstance(result2["result"], ConfigEntry) assert result2["data"] == { "host": "127.0.0.1", "serial": "1234567890", "upnp_description": "http://127.0.0.1:9000/MediaRenderer/desc.xml", } async def test_user_input_device_found_no_ssdp( hass, mock_get_device_info_valid, mock_empty_discovery_information, mock_get_source_ip, ): """Test when user specifies an existing device, which no discovery data are present for.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "127.0.0.1"}, ) assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert isinstance(result2["result"], ConfigEntry) assert result2["data"] == { "host": "127.0.0.1", "serial": "1234567890", "upnp_description": "http://127.0.0.1:49154/MediaRenderer/desc.xml", } async def test_import_device_already_existing( hass, mock_get_device_info_valid, mock_get_source_ip ): """Test when the configurations.yaml contains an existing device.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id="1234567890", data={CONF_HOST: "192.168.188.18", "model": "MC20", "serial": "1234567890"}, ) mock_entry.add_to_hass(hass) config = {"platform": "yamaha_musiccast", "host": "192.168.188.18", "port": 5006} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" async def test_import_error(hass, mock_get_device_info_exception, mock_get_source_ip): """Test when in the configuration.yaml a device is configured, which cannot be added..""" config = {"platform": "yamaha_musiccast", "host": "192.168.188.18", "port": 5006} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {"base": "unknown"} async def test_import_device_successful( hass, mock_get_device_info_valid, mock_valid_discovery_information, mock_get_source_ip, ): """Test when the device was imported successfully.""" config = {"platform": "yamaha_musiccast", "host": "127.0.0.1", "port": 5006} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert isinstance(result["result"], ConfigEntry) assert result["data"] == { "host": "127.0.0.1", "serial": "1234567890", "upnp_description": "http://127.0.0.1:9000/MediaRenderer/desc.xml", } # SSDP Flows async def test_ssdp_discovery_failed(hass, mock_ssdp_no_yamaha, mock_get_source_ip): """Test when an SSDP discovered device is not a musiccast device.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_SSDP}, data={ ssdp.ATTR_SSDP_LOCATION: "http://127.0.0.1/desc.xml", ssdp.ATTR_UPNP_MODEL_NAME: "MC20", ssdp.ATTR_UPNP_SERIAL: "123456789", }, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "yxc_control_url_missing" async def test_ssdp_discovery_successful_add_device( hass, mock_ssdp_yamaha, mock_get_source_ip ): """Test when the SSDP discovered device is a musiccast device and the user confirms it.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_SSDP}, data={ ssdp.ATTR_SSDP_LOCATION: "http://127.0.0.1/desc.xml", ssdp.ATTR_UPNP_MODEL_NAME: "MC20", ssdp.ATTR_UPNP_SERIAL: "1234567890", }, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] is None assert result["step_id"] == "confirm" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert isinstance(result2["result"], ConfigEntry) assert result2["data"] == { "host": "127.0.0.1", "serial": "1234567890", "upnp_description": "http://127.0.0.1/desc.xml", } async def test_ssdp_discovery_existing_device_update( hass, mock_ssdp_yamaha, mock_get_source_ip ): """Test when the SSDP discovered device is a musiccast device, but it already exists with another IP.""" mock_entry = MockConfigEntry( domain=DOMAIN, unique_id="1234567890", data={CONF_HOST: "192.168.188.18", "model": "MC20", "serial": "1234567890"}, ) mock_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_SSDP}, data={ ssdp.ATTR_SSDP_LOCATION: "http://127.0.0.1/desc.xml", ssdp.ATTR_UPNP_MODEL_NAME: "MC20", ssdp.ATTR_UPNP_SERIAL: "1234567890", }, ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" assert mock_entry.data[CONF_HOST] == "127.0.0.1" assert mock_entry.data["upnp_description"] == "http://127.0.0.1/desc.xml"
#!/usr/bin/env python # # Copyright 2008 The Closure Linter Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Logic for computing dependency information for closurized JavaScript files. Closurized JavaScript files express dependencies using goog.require and goog.provide statements. In order for the linter to detect when a statement is missing or unnecessary, all identifiers in the JavaScript file must first be processed to determine if they constitute the creation or usage of a dependency. """ from closure_linter import javascripttokens from closure_linter import tokenutil # pylint: disable-msg=C6409 TokenType = javascripttokens.JavaScriptTokenType DEFAULT_EXTRA_NAMESPACES = [ 'goog.testing.asserts', 'goog.testing.jsunit', ] class ClosurizedNamespacesInfo(object): """Dependency information for closurized JavaScript files. Processes token streams for dependency creation or usage and provides logic for determining if a given require or provide statement is unnecessary or if there are missing require or provide statements. """ def __init__(self, closurized_namespaces, ignored_extra_namespaces): """Initializes an instance the ClosurizedNamespacesInfo class. Args: closurized_namespaces: A list of namespace prefixes that should be processed for dependency information. Non-matching namespaces are ignored. ignored_extra_namespaces: A list of namespaces that should not be reported as extra regardless of whether they are actually used. """ self._closurized_namespaces = closurized_namespaces self._ignored_extra_namespaces = (ignored_extra_namespaces + DEFAULT_EXTRA_NAMESPACES) self.Reset() def Reset(self): """Resets the internal state to prepare for processing a new file.""" # A list of goog.provide tokens in the order they appeared in the file. self._provide_tokens = [] # A list of goog.require tokens in the order they appeared in the file. self._require_tokens = [] # Namespaces that are already goog.provided. self._provided_namespaces = [] # Namespaces that are already goog.required. self._required_namespaces = [] # Note that created_namespaces and used_namespaces contain both namespaces # and identifiers because there are many existing cases where a method or # constant is provided directly instead of its namespace. Ideally, these # two lists would only have to contain namespaces. # A list of tuples where the first element is the namespace of an identifier # created in the file and the second is the identifier itself. self._created_namespaces = [] # A list of tuples where the first element is the namespace of an identifier # used in the file and the second is the identifier itself. self._used_namespaces = [] # A list of seemingly-unnecessary namespaces that are goog.required() and # annotated with @suppress {extraRequire}. self._suppressed_requires = [] # A list of goog.provide tokens which are duplicates. self._duplicate_provide_tokens = [] # A list of goog.require tokens which are duplicates. self._duplicate_require_tokens = [] # Whether this file is in a goog.scope. Someday, we may add support # for checking scopified namespaces, but for now let's just fail # in a more reasonable way. self._scopified_file = False # TODO(user): Handle the case where there are 2 different requires # that can satisfy the same dependency, but only one is necessary. def GetProvidedNamespaces(self): """Returns the namespaces which are already provided by this file. Returns: A list of strings where each string is a 'namespace' corresponding to an existing goog.provide statement in the file being checked. """ return set(self._provided_namespaces) def GetRequiredNamespaces(self): """Returns the namespaces which are already required by this file. Returns: A list of strings where each string is a 'namespace' corresponding to an existing goog.require statement in the file being checked. """ return set(self._required_namespaces) def IsExtraProvide(self, token): """Returns whether the given goog.provide token is unnecessary. Args: token: A goog.provide token. Returns: True if the given token corresponds to an unnecessary goog.provide statement, otherwise False. """ if self._scopified_file: return False namespace = tokenutil.Search(token, TokenType.STRING_TEXT).string base_namespace = namespace.split('.', 1)[0] if base_namespace not in self._closurized_namespaces: return False if token in self._duplicate_provide_tokens: return True # TODO(user): There's probably a faster way to compute this. for created_namespace, created_identifier in self._created_namespaces: if namespace == created_namespace or namespace == created_identifier: return False return True def IsExtraRequire(self, token): """Returns whether the given goog.require token is unnecessary. Args: token: A goog.require token. Returns: True if the given token corresponds to an unnecessary goog.require statement, otherwise False. """ if self._scopified_file: return False namespace = tokenutil.Search(token, TokenType.STRING_TEXT).string base_namespace = namespace.split('.', 1)[0] if base_namespace not in self._closurized_namespaces: return False if namespace in self._ignored_extra_namespaces: return False if token in self._duplicate_require_tokens: return True if namespace in self._suppressed_requires: return False # If the namespace contains a component that is initial caps, then that # must be the last component of the namespace. parts = namespace.split('.') if len(parts) > 1 and parts[-2][0].isupper(): return True # TODO(user): There's probably a faster way to compute this. for used_namespace, used_identifier in self._used_namespaces: if namespace == used_namespace or namespace == used_identifier: return False return True def GetMissingProvides(self): """Returns the set of missing provided namespaces for the current file. Returns: Returns a set of strings where each string is a namespace that should be provided by this file, but is not. """ if self._scopified_file: return set() missing_provides = set() for namespace, identifier in self._created_namespaces: if (not self._IsPrivateIdentifier(identifier) and namespace not in self._provided_namespaces and identifier not in self._provided_namespaces and namespace not in self._required_namespaces): missing_provides.add(namespace) return missing_provides def GetMissingRequires(self): """Returns the set of missing required namespaces for the current file. For each non-private identifier used in the file, find either a goog.require, goog.provide or a created identifier that satisfies it. goog.require statements can satisfy the identifier by requiring either the namespace of the identifier or the identifier itself. goog.provide statements can satisfy the identifier by providing the namespace of the identifier. A created identifier can only satisfy the used identifier if it matches it exactly (necessary since things can be defined on a namespace in more than one file). Note that provided namespaces should be a subset of created namespaces, but we check both because in some cases we can't always detect the creation of the namespace. Returns: Returns a set of strings where each string is a namespace that should be required by this file, but is not. """ if self._scopified_file: return set() external_dependencies = set(self._required_namespaces) # Assume goog namespace is always available. external_dependencies.add('goog') created_identifiers = set() for namespace, identifier in self._created_namespaces: created_identifiers.add(identifier) missing_requires = set() for namespace, identifier in self._used_namespaces: if (not self._IsPrivateIdentifier(identifier) and namespace not in external_dependencies and namespace not in self._provided_namespaces and identifier not in external_dependencies and identifier not in created_identifiers): missing_requires.add(namespace) return missing_requires def _IsPrivateIdentifier(self, identifier): """Returns whether the given identifer is private.""" pieces = identifier.split('.') for piece in pieces: if piece.endswith('_'): return True return False def IsFirstProvide(self, token): """Returns whether token is the first provide token.""" return self._provide_tokens and token == self._provide_tokens[0] def IsFirstRequire(self, token): """Returns whether token is the first require token.""" return self._require_tokens and token == self._require_tokens[0] def IsLastProvide(self, token): """Returns whether token is the last provide token.""" return self._provide_tokens and token == self._provide_tokens[-1] def IsLastRequire(self, token): """Returns whether token is the last require token.""" return self._require_tokens and token == self._require_tokens[-1] def ProcessToken(self, token, state_tracker): """Processes the given token for dependency information. Args: token: The token to process. state_tracker: The JavaScript state tracker. """ # Note that this method is in the critical path for the linter and has been # optimized for performance in the following ways: # - Tokens are checked by type first to minimize the number of function # calls necessary to determine if action needs to be taken for the token. # - The most common tokens types are checked for first. # - The number of function calls has been minimized (thus the length of this # function. if token.type == TokenType.IDENTIFIER: # TODO(user): Consider saving the whole identifier in metadata. whole_identifier_string = tokenutil.GetIdentifierForToken(token) if whole_identifier_string is None: # We only want to process the identifier one time. If the whole string # identifier is None, that means this token was part of a multi-token # identifier, but it was not the first token of the identifier. return # In the odd case that a goog.require is encountered inside a function, # just ignore it (e.g. dynamic loading in test runners). if token.string == 'goog.require' and not state_tracker.InFunction(): self._require_tokens.append(token) namespace = tokenutil.Search(token, TokenType.STRING_TEXT).string if namespace in self._required_namespaces: self._duplicate_require_tokens.append(token) else: self._required_namespaces.append(namespace) # If there is a suppression for the require, add a usage for it so it # gets treated as a regular goog.require (i.e. still gets sorted). jsdoc = state_tracker.GetDocComment() if jsdoc and ('extraRequire' in jsdoc.suppressions): self._suppressed_requires.append(namespace) self._AddUsedNamespace(state_tracker, namespace) elif token.string == 'goog.provide': self._provide_tokens.append(token) namespace = tokenutil.Search(token, TokenType.STRING_TEXT).string if namespace in self._provided_namespaces: self._duplicate_provide_tokens.append(token) else: self._provided_namespaces.append(namespace) # If there is a suppression for the provide, add a creation for it so it # gets treated as a regular goog.provide (i.e. still gets sorted). jsdoc = state_tracker.GetDocComment() if jsdoc and ('extraProvide' in jsdoc.suppressions): self._AddCreatedNamespace(state_tracker, namespace) elif token.string == 'goog.scope': self._scopified_file = True else: jsdoc = state_tracker.GetDocComment() if jsdoc and jsdoc.HasFlag('typedef'): self._AddCreatedNamespace(state_tracker, whole_identifier_string, self.GetClosurizedNamespace( whole_identifier_string)) else: self._AddUsedNamespace(state_tracker, whole_identifier_string) elif token.type == TokenType.SIMPLE_LVALUE: identifier = token.values['identifier'] namespace = self.GetClosurizedNamespace(identifier) if state_tracker.InFunction(): self._AddUsedNamespace(state_tracker, identifier) elif namespace and namespace != 'goog': self._AddCreatedNamespace(state_tracker, identifier, namespace) elif token.type == TokenType.DOC_FLAG: flag_type = token.attached_object.flag_type is_interface = state_tracker.GetDocComment().HasFlag('interface') if flag_type == 'implements' or (flag_type == 'extends' and is_interface): # Interfaces should be goog.require'd. doc_start = tokenutil.Search(token, TokenType.DOC_START_BRACE) interface = tokenutil.Search(doc_start, TokenType.COMMENT) self._AddUsedNamespace(state_tracker, interface.string) def _AddCreatedNamespace(self, state_tracker, identifier, namespace=None): """Adds the namespace of an identifier to the list of created namespaces. If the identifier is annotated with a 'missingProvide' suppression, it is not added. Args: state_tracker: The JavaScriptStateTracker instance. identifier: The identifier to add. namespace: The namespace of the identifier or None if the identifier is also the namespace. """ if not namespace: namespace = identifier jsdoc = state_tracker.GetDocComment() if jsdoc and 'missingProvide' in jsdoc.suppressions: return self._created_namespaces.append([namespace, identifier]) def _AddUsedNamespace(self, state_tracker, identifier): """Adds the namespace of an identifier to the list of used namespaces. If the identifier is annotated with a 'missingRequire' suppression, it is not added. Args: state_tracker: The JavaScriptStateTracker instance. identifier: An identifier which has been used. """ jsdoc = state_tracker.GetDocComment() if jsdoc and 'missingRequire' in jsdoc.suppressions: return namespace = self.GetClosurizedNamespace(identifier) if namespace: self._used_namespaces.append([namespace, identifier]) def GetClosurizedNamespace(self, identifier): """Given an identifier, returns the namespace that identifier is from. Args: identifier: The identifier to extract a namespace from. Returns: The namespace the given identifier resides in, or None if one could not be found. """ if identifier.startswith('goog.global'): # Ignore goog.global, since it is, by definition, global. return None parts = identifier.split('.') for namespace in self._closurized_namespaces: if not identifier.startswith(namespace + '.'): continue last_part = parts[-1] if not last_part: # TODO(robbyw): Handle this: it's a multi-line identifier. return None # The namespace for a class is the shortest prefix ending in a class # name, which starts with a capital letter but is not a capitalized word. # # We ultimately do not want to allow requiring or providing of inner # classes/enums. Instead, a file should provide only the top-level class # and users should require only that. namespace = [] for part in parts: if part == 'prototype' or part.isupper(): return '.'.join(namespace) namespace.append(part) if part[0].isupper(): return '.'.join(namespace) # At this point, we know there's no class or enum, so the namespace is # just the identifier with the last part removed. With the exception of # apply, inherits, and call, which should also be stripped. if parts[-1] in ('apply', 'inherits', 'call'): parts.pop() parts.pop() # If the last part ends with an underscore, it is a private variable, # method, or enum. The namespace is whatever is before it. if parts and parts[-1].endswith('_'): parts.pop() return '.'.join(parts) return None
import os import numpy as np import numpy.testing as npt import pytest from skimage.io import imsave from pulse2percept.stimuli import (ImageStimulus, LogoBVL, LogoUCSB, SnellenChart) def create_dummy_img(fname, shape, mode, gray=1.0, return_data=False): if mode == 'ones': ndarray = np.ones(shape) * gray elif mode == 'zeros': ndarray = np.zeros(shape) elif mode == 'rand': ndarray = np.random.rand(*shape) * gray elif mode == 'custom': ndarray = shape imsave(fname, (255 * ndarray).astype(np.uint8)) if return_data: return ndarray def test_ImageStimulus(): # Create a dummy image: fname = 'test.png' shape = (25, 37, 4) ndarray = create_dummy_img(fname, shape, 'rand', return_data=True) # Make sure ImageStimulus loaded is identical to dummy image: stim = ImageStimulus(fname) npt.assert_equal(stim.shape, (np.prod(shape), 1)) npt.assert_almost_equal(stim.data, ndarray.reshape((-1, 1)), decimal=2) npt.assert_equal(stim.metadata['source'], fname) npt.assert_equal(stim.metadata['source_shape'], shape) npt.assert_equal(stim.time, None) npt.assert_equal(stim.electrodes, np.arange(np.prod(shape))) os.remove(fname) def test_ImageStimulus_invert(): # Create a dummy image: fname = 'test.png' shape = (25, 37) gray = 1 / 255.0 create_dummy_img(fname, shape, 'ones', gray=gray) # Gray levels are between 0 and 1, and can be inverted: stim = ImageStimulus(fname) npt.assert_almost_equal(stim.data, gray) npt.assert_almost_equal(stim.invert().data, 1 - gray) # Inverting does not change the original object: npt.assert_almost_equal(stim.data, gray) os.remove(fname) def test_ImageStimulus_rgb2gray(): # Create a dummy image: fname = 'test.png' shape = (25, 37, 3) gray = 1 / 255.0 create_dummy_img(fname, shape, 'ones', gray=gray) # Gray levels are between 0 and 1, and can be inverted: stim_rgb = ImageStimulus(fname) stim_gray = stim_rgb.rgb2gray() npt.assert_almost_equal(stim_gray.data, gray) npt.assert_equal(stim_gray.img_shape, shape[:2]) # Original stim unchanged: npt.assert_equal(stim_rgb.img_shape, shape) os.remove(fname) def test_ImageStimulus_resize(): fname = 'test.png' shape = (25, 37, 3) gray = 129 / 255.0 create_dummy_img(fname, shape, 'ones', gray=gray) # Gray levels are between 0 and 1, and can be inverted: stim = ImageStimulus(fname) npt.assert_almost_equal(stim.data, gray) npt.assert_equal(stim.resize((13, -1)).img_shape, (13, 19, 3)) # Resize with one dimension -1: npt.assert_equal(stim.resize((-1, 24)).img_shape, (16, 24, 3)) with pytest.raises(ValueError): stim.resize((-1, -1)) os.remove(fname) def test_ImageStimulus_crop(): fname = 'test.png' shape = (30, 50, 3) gray = create_dummy_img(fname, shape, 'rand') stim = ImageStimulus(fname) stim_cropped = stim.crop(idx_rect=[5, 10, 25, 40]) npt.assert_equal(stim_cropped.img_shape, (20, 30, 3)) npt.assert_equal(stim_cropped.data.reshape(stim_cropped.img_shape)[3, 7], stim.data.reshape(stim.img_shape)[8, 17]) npt.assert_equal(stim_cropped.data.reshape(stim_cropped.img_shape)[10, 28], stim.data.reshape(stim.img_shape)[15, 38]) npt.assert_equal(stim.electrodes.reshape(30, 50, 3)[8, 17, 0], stim_cropped.electrodes.reshape(20, 30, 3)[3, 7, 0]) npt.assert_equal(stim.electrodes.reshape(30, 50, 3)[15, 38, 2], stim_cropped.electrodes.reshape(20, 30, 3)[10, 28, 2]) stim_cropped2 = stim.crop(left=10, right=8, top=6, bottom=7) npt.assert_equal(stim_cropped2.img_shape, (17, 32, 3)) npt.assert_equal(stim_cropped2.data.reshape(stim_cropped2.img_shape)[3, 7], stim.data.reshape(stim.img_shape)[9, 17]) npt.assert_equal(stim_cropped2.data.reshape(stim_cropped2.img_shape)[10, 28], stim.data.reshape(stim.img_shape)[16, 38]) #"crop-indices and crop-width (left, right, up, down) cannot exist at the same time" with pytest.raises(ValueError): stim.crop(idx_rect=[5, 10, 25, 40], left=10) with pytest.raises(ValueError): stim.crop([5, 10, 25, 40], right=8) with pytest.raises(ValueError): stim.crop([5, 10, 25, 40], top=6) with pytest.raises(ValueError): stim.crop([5, 10, 25, 40], bottom=7) # "crop-width(left, right, up, down) cannot be negative" with pytest.raises(ValueError): stim.crop(left=-1) with pytest.raises(ValueError): stim.crop(right=-1) with pytest.raises(ValueError): stim.crop(top=-1) with pytest.raises(ValueError): stim.crop(bottom=-1) # "crop-width should be smaller than the shape of the image" with pytest.raises(ValueError): stim.crop(left=32, right=20) with pytest.raises(ValueError): stim.crop(top=12, bottom=18) # "crop-indices must be on the image" with pytest.raises(ValueError): stim.crop([-1, 10, 25, 40]) with pytest.raises(ValueError): stim.crop([5, -1, 25, 40]) with pytest.raises(ValueError): stim.crop([5, 10, 31, 40]) with pytest.raises(ValueError): stim.crop([5, 10, 25, 51]) # "crop-indices is invalid. It should be [y1,x1,y2,x2], where (y1,x1) is upperleft and (y2,x2) is bottom-right" with pytest.raises(ValueError): stim.crop([5, 10, 4, 40]) with pytest.raises(ValueError): stim.crop([5, 10, 25, 9]) def test_ImageStimulus_trim(): shape = (13, 29) ndarray = np.zeros(shape) ndarray[1:-1, 1:-1] = 0.1 ndarray[2:-2, 2:-2] = 0.2 stim = ImageStimulus(ndarray) npt.assert_equal(stim.trim().img_shape, (shape[0] - 2, shape[1] - 2)) npt.assert_equal(stim.trim(tol=0.05).img_shape, (shape[0] - 2, shape[1] - 2)) npt.assert_equal(stim.trim(tol=0.1).img_shape, (shape[0] - 4, shape[1] - 4)) npt.assert_equal(stim.trim(tol=0.2).img_shape, (1, 0)) npt.assert_equal(stim.trim(tol=0.1).img_shape, stim.trim().trim(tol=0.1).img_shape) def test_ImageStimulus_threshold(): # Create a dummy image: fname = 'test.png' shape = (25, 37, 3) gray = 129 / 255.0 create_dummy_img(fname, shape, 'ones', gray=gray) # Gray levels are between 0 and 1, and can be inverted: stim = ImageStimulus(fname, as_gray=True) stim_th = stim.threshold(0.5) npt.assert_almost_equal(stim.data, gray) npt.assert_equal(stim.img_shape, shape[:2]) os.remove(fname) def test_ImageStimulus_rotate(): # Create a horizontal bar: fname = 'test.png' shape = (5, 5) ndarray = np.zeros(shape, dtype=np.uint8) ndarray[2, :] = 255 imsave(fname, ndarray) stim = ImageStimulus(fname) # Vertical line: vert = stim.rotate(90, mode='reflect') npt.assert_almost_equal(vert.data.reshape(stim.img_shape)[:, 0], 0) npt.assert_almost_equal(vert.data.reshape(stim.img_shape)[:, 1], 0) npt.assert_almost_equal(vert.data.reshape(stim.img_shape)[:, 2], 1) npt.assert_almost_equal(vert.data.reshape(stim.img_shape)[:, 3], 0) npt.assert_almost_equal(vert.data.reshape(stim.img_shape)[:, 4], 0) # Diagonal, bottom-left to top-right: diag = stim.rotate(45, mode='reflect') npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[0, 4], 1) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[2, 2], 1) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[4, 0], 1) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[0, 0], 0) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[4, 4], 0) # Diagonal, top-left to bottom-right: diag = stim.rotate(-45, mode='reflect') npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[0, 0], 1) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[2, 2], 1) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[4, 4], 1) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[0, 4], 0) npt.assert_almost_equal(diag.data.reshape(stim.img_shape)[4, 0], 0) os.remove(fname) def test_ImageStimulus_shift(): # Create a horizontal bar: fname = 'test.png' shape = (5, 5) ndarray = np.zeros(shape, dtype=np.uint8) ndarray[2, :] = 255 imsave(fname, ndarray) stim = ImageStimulus(fname) # Top row: top = stim.shift(0, -2) npt.assert_almost_equal(top.data.reshape(stim.img_shape)[0, :], 1) npt.assert_almost_equal(top.data.reshape(stim.img_shape)[1:, :], 0) # Bottom row: bottom = stim.shift(0, 2) npt.assert_almost_equal(bottom.data.reshape(stim.img_shape)[:4, :], 0) npt.assert_almost_equal(bottom.data.reshape(stim.img_shape)[4, :], 1) # Bottom right pixel: bottom = stim.shift(4, 2) npt.assert_almost_equal(bottom.data.reshape(stim.img_shape)[4, 4], 1) npt.assert_almost_equal(bottom.data.reshape(stim.img_shape)[:4, :], 0) npt.assert_almost_equal(bottom.data.reshape(stim.img_shape)[:, :4], 0) os.remove(fname) def test_ImageStimulus_center(): # Create a horizontal bar: fname = 'test.png' shape = (5, 5) ndarray = np.zeros(shape, dtype=np.uint8) ndarray[2, :] = 255 imsave(fname, ndarray) # Center phosphene: stim = ImageStimulus(fname) npt.assert_almost_equal(stim.data, stim.center().data) npt.assert_almost_equal(stim.data, stim.shift(0, 2).center().data) os.remove(fname) def test_ImageStimulus_scale(): # Create a horizontal bar: fname = 'test.png' shape = (5, 5) ndarray = np.zeros(shape, dtype=np.uint8) ndarray[2, :] = 255 imsave(fname, ndarray) # Scale phosphene: stim = ImageStimulus(fname) npt.assert_almost_equal(stim.data, stim.scale(1).data) npt.assert_almost_equal(stim.scale(0.1)[12], 1) npt.assert_almost_equal(stim.scale(0.1)[:12], 0) npt.assert_almost_equal(stim.scale(0.1)[13:], 0) with pytest.raises(ValueError): stim.scale(0) os.remove(fname) def test_ImageStimulus_filter(): # Create a dummy image: fname = 'test.png' shape = (25, 37) create_dummy_img(fname, shape, 'rand') stim = ImageStimulus(fname) for filt in ['sobel', 'scharr', 'canny', 'median']: filt_stim = stim.filter(filt) npt.assert_equal(filt_stim.shape, stim.shape) npt.assert_equal(filt_stim.img_shape, stim.img_shape) npt.assert_equal(filt_stim.electrodes, stim.electrodes) npt.assert_equal(filt_stim.time, None) # Invalid filter name: with pytest.raises(TypeError): stim.filter({'invalid'}) with pytest.raises(ValueError): stim.filter('invalid') os.remove(fname) def test_ImageStimulus_encode(): stim = ImageStimulus(np.random.rand(4, 5)) # Amplitude encoding in default range: enc = stim.encode() npt.assert_almost_equal(enc.time[-1], 500) npt.assert_almost_equal(enc.data.max(axis=1).min(), 0) npt.assert_almost_equal(enc.data.max(axis=1).max(), 50) # Amplitude encoding in custom range: enc = stim.encode(amp_range=(2, 43)) npt.assert_almost_equal(enc.time[-1], 500) npt.assert_almost_equal(enc.data.max(axis=1).min(), 2) npt.assert_almost_equal(enc.data.max(axis=1).max(), 43) with pytest.raises(TypeError): stim.encode(pulse={'invalid': 1}) with pytest.raises(ValueError): stim.encode(pulse=LogoUCSB()) def test_ImageStimulus_plot(): # Create a horizontal bar: fname = 'test.png' shape = (5, 5) ndarray = np.zeros(shape, dtype=np.uint8) ndarray[2, :] = 255 imsave(fname, ndarray) stim = ImageStimulus(fname) ax = stim.plot() npt.assert_equal(ax.axis(), (-0.5, 4.5, 4.5, -0.5)) os.remove(fname) def test_ImageStimulus_save(): # Create a horizontal bar: fname = 'test.png' shape = (5, 5) ndarray = np.zeros(shape, dtype=np.uint8) ndarray[2, :] = 255 imsave(fname, ndarray) stim = ImageStimulus(fname) fname2 = 'test2.png' stim.save(fname2) npt.assert_almost_equal(stim.data, ImageStimulus(fname2).data) os.remove(fname) os.remove(fname2) @pytest.mark.parametrize('show_annotations', (True, False)) def test_SnellenChart(show_annotations): width = 840 if show_annotations else 444 snellen = SnellenChart(show_annotations=show_annotations) npt.assert_equal(snellen.img_shape, (1348, width)) npt.assert_equal(snellen.time, None) npt.assert_almost_equal(snellen.data.max(), 1) npt.assert_almost_equal(snellen.data.min(), 0) snellen = SnellenChart(row=1, show_annotations=show_annotations) npt.assert_equal(snellen.img_shape, (255, width)) with pytest.raises(ValueError): SnellenChart(row=0) with pytest.raises(ValueError): SnellenChart(row=12) with pytest.raises(ValueError): SnellenChart(row=[1, 3]) def test_LogoBVL(): logo = LogoBVL() npt.assert_equal(logo.img_shape, (576, 720, 4)) npt.assert_equal(logo.time, None) npt.assert_almost_equal(logo.data.min(), 0) npt.assert_almost_equal(logo.data.max(), 1) def test_LogoUCSB(): logo = LogoUCSB() npt.assert_equal(logo.img_shape, (324, 727)) npt.assert_equal(logo.time, None) npt.assert_almost_equal(logo.data.min(), 0) npt.assert_almost_equal(logo.data.max(), 1)
import numpy as np import matplotlib.pyplot as plt import os.path import glob from joblib import Parallel, delayed import h5py from itertools import islice import time def window(seq, n=2): "Returns a sliding window (of width n) over data from the iterable" " s -> (s0,s1,...s[n-1]), (s1,s2,...,sn), ... " it = iter(seq) result = tuple(islice(it, n)) if len(result) == n: yield result for elem in it: result = result[1:] + (elem,) yield result def label_datasets(): targetfile = '/home/holiestcow/Documents/zephyr/datasets/muse/trainingData/answers.csv' head, tail = os.path.split(targetfile) # filename = [] source_labels = {} id2string = {0: 'Background', 1: 'HEU', 2: 'WGPu', 3: 'I131', 4: 'Co60', 5: 'Tc99', 6: 'HEUandTc99'} f = open(targetfile, 'r') a = f.readlines() for i in range(len(a)): line = a[i].strip() if line[0] == 'R': continue parsed = line.split(',') filename = parsed[0] source = parsed[1] source_time = parsed[2] source_labels[filename] = {'source': id2string[int(source)], 'time': float(source_time)} f.close() return source_labels def group_consecutives(vals, step=1): """Return list of consecutive lists of numbers from vals (number list).""" run = [] result = [run] expect = None for v in vals: if (v == expect) or (expect is None): run.append(v) else: run = [v] result.append(run) expect = v + step return result def reject_outliers(data, m = 2.): d = np.abs(data - np.median(data)) mdev = np.median(d) s = d/mdev if mdev else 0. return data[s<m] def findMiddle(input_list): middle = float(len(input_list))/2 if middle % 2 != 0: return input_list[int(middle - .5)] else: # return (input_list[int(middle)], input_list[int(middle-1)]) return input_list[int(middle)] def store_sequence(targetfile, labels): string2id = {'Background': 0, 'HEU': 1, 'WGPu': 2, 'I131': 3, 'Co60': 4, 'Tc99': 5, 'HEUandTc99': 6} random_file = targetfile x = np.load('./train_integrations/' + random_file + '.npy') x = x[:, 1:] source_index = int(labels[random_file]['time']) source_type = labels[random_file]['source'] if source_index <= 3: # This means this is a background file source_index = np.random.randint(x.shape[0]) source_type = 'Background' # left = source_index - 29 # right = source_index + 30 # left = source_index - 119 # right = source_index + 120 left = 0 right = x.shape[0] if left < 0: left = 0 if right >= x.shape[0]: right = x.shape[0] current_slice = x current_slice_counts = np.sum(current_slice, axis=1) # modified_slice_counts = reject_outliers(current_slice_counts, m=5) # current_slice_count_mean = np.mean(modified_slice_counts) current_slice_count_median = np.median(current_slice_counts) # current_slice_count_min = np.min(current_slice_counts) # current_slice_count_std = np.std(modified_slice_counts) hits = np.zeros((current_slice.shape[0], ), dtype=bool) # threshold = current_slice_count_min + (0.25 * current_slice_count_min) # threshold = current_slice_count_mean + (0.5 * current_slice_count_std) threshold = current_slice_count_median # print(threshold) # threshold = current_slice_count_mean + (1 * current_slice_count_std) hits[current_slice_counts > threshold] = True machine = np.argwhere(hits == True) hits = np.zeros((current_slice.shape[0], ), dtype=bool) machine = machine.reshape((machine.shape[0], )) grouping = group_consecutives(machine) for group in grouping: if source_index in group: hits[group] = True if np.sum(hits) == 0: listofmiddles = [] # print(grouping) for group in grouping: listofmiddles += [findMiddle(group)] # print(listofmiddles) listofmiddles = np.array(listofmiddles) delta = np.subtract(listofmiddles, np.tile(source_index, len(listofmiddles))) delta_squared = np.power(delta, 2) desired_index = np.argmin(delta_squared) hits[grouping[desired_index]] = True if np.sum(hits) == 1 or np.sum(hits) == 0: return np.zeros((0, 1024)), np.zeros((0,)) tostore_spectra = current_slice tostore_labels = np.zeros((tostore_spectra.shape[0], ), dtype=int) tostore_labels[hits] = int(string2id[source_type]) - 1 # THIS IS THE JUST SOURCE PORTION desired_index = tostore_labels != 0 tostore_spectra = tostore_spectra[desired_index, :] tostore_labels = tostore_labels[desired_index] # grphandle.create_dataset('measured_spectra', data=tostore_spectra, compression='gzip') # grphandle.create_dataset('labels', data=tostore_labels, compression='gzip') # Plot the hits: # for i in range(len(tostore_labels)): # if tostore_labels[i] != 0: # fig = plt.figure() # plt.plot(tostore_spectra[i, :]) # plt.title('source{}_time{}'.format(source_type, source_index)) # plt.axis([0, 1024, 0, 50]) # fig.savefig('./plots/{}_{}'.format(targetfile, i)) # plt.close() return tostore_spectra, tostore_labels # print(source_type) # print(np.sum(x[source_index-2, :]),np.sum(x[source_index-1, :]),np.sum(x[source_index, :]),np.sum(x[source_index+1, :]),np.sum(x[source_index+2, :])) # print(np.mean(np.sum(current_slice, axis=1)), np.std(np.sum(current_slice, axis=1))) # fig = plt.figure() # for j in range(current_slice.shape[0]): # if hits[j]: # # plt.plot(x[j, :], color='r') # plt.plot(j, current_slice_counts[j], 'r.') # else: # # plt.plot(x[j, :], color='b') # plt.plot(j, current_slice_counts[j], 'b.') # # plt.axis([0, 1024, 0, 50]) # plt.title('type{}_current{}_target{}'.format(source_type, j, center)) # fig.savefig('./{}/{:06d}_{:04d}.png'.format(outdir, int(targetfile), j)) # plt.close() def main(): # only need to do this once. ncores = 4 # test_filelist = glob.glob('/home/holiestcow/Documents/zephyr/datasets/muse/testData/2*.csv') id2string = {0: 'Background', 1: 'HEU', 2: 'WGPu', 3: 'I131', 4: 'Co60', 5: 'Tc99', 6: 'HEUandTc99'} string2id = {'Background': 0, 'HEU': 1, 'WGPu': 2, 'I131': 3, 'Co60': 4, 'Tc99': 5, 'HEUandTc99': 6} # filelist = glob.glob('/home/holiestcow/Documents/zephyr/datasets/muse/trainingData/1*.csv') # Parallel(n_jobs=ncores)(delayed(parse_datafiles)(item, binnumber) for item in filelist) labels = label_datasets() allfilelist = [] backgroundfilelist = [] sourcefilelist = [] for anyfile in labels.keys(): if labels[anyfile]['source'] == 'Background': backgroundfilelist += [anyfile] else: sourcefilelist += [anyfile] allfilelist += [anyfile] sourcefilelist.sort() backgroundfilelist.sort() bg_training_threshold = int(len(backgroundfilelist) / 2) s_training_threshold = int(len(sourcefilelist) / 2) backgroundfilelist_train = backgroundfilelist[:bg_training_threshold] backgroundfilelist_test = backgroundfilelist[bg_training_threshold:] sourcefilelist_train = sourcefilelist[:s_training_threshold] sourcefilelist_test = sourcefilelist[s_training_threshold:] validatelist = glob.glob('./test_integrations/2*.npy') validatelist.sort() # print(labels) #NOTE: PARALLEL STUFF # Parallel(n_jobs=ncores)(delayed(make_spectral_plots)(item, 'train_plots', labels) for item in sourcefilelist_train[:100]) # Parallel(n_jobs=ncores)(delayed(make_spectral_plots)(item, 'test_plots', labels) for item in sourcefilelist_test) #QUESTION: Keep or take out the background lists??? f = h5py.File('sequential_dataset_relabel_justsources.h5', 'w') train = f.create_group('train') test = f.create_group('test') validate = f.create_group('validate') a = time.time() tostore_spectra = [] tostore_labels = [] for i in range(len(sourcefilelist_train)): if i % 100 == 0: print('{} training samples done in {} s '.format(i, time.time() - a)) current_file = sourcefilelist_train[i] spectra, classifications = store_sequence(current_file, labels) tostore_spectra += [spectra] tostore_labels += [classifications] tostore_spectra = np.concatenate(tostore_spectra, axis=0) tostore_labels = np.concatenate(tostore_labels, axis=0) train.create_dataset('measured_spectra', data=tostore_spectra) train.create_dataset('labels', data=tostore_labels) # for i in range(len(backgroundfilelist_train)): # if i % 100 == 0: # print('{} training samples done in {} s '.format(i, time.time() - a)) # current_file = backgroundfilelist_train[i] # store_sequence(current_file, train, labels) tostore_spectra = [] tostore_labels = [] for i in range(len(sourcefilelist_test)): if i % 100 == 0: print('{} testing samples done in {} s '.format(i, time.time() - a)) current_file = sourcefilelist_test[i] spectra, classifications = store_sequence(current_file, labels) tostore_spectra += [spectra] tostore_labels += [classifications] tostore_spectra = np.concatenate(tostore_spectra, axis=0) tostore_labels = np.concatenate(tostore_labels, axis=0) test.create_dataset('measured_spectra', data=tostore_spectra, compression='gzip') test.create_dataset('labels', data=tostore_labels, compression='gzip') # for i in range(len(backgroundfilelist_test)): # if i % 100 == 0: # print('{} training samples done in {} s '.format(i, time.time() - a)) # current_file = backgroundfilelist_test[i] # store_sequence(current_file, train, labels) # Parallel(n_jobs=ncores)(delayed(store_sequence)(item, train, labels) for item in sourcefilelist_train) # Parallel(n_jobs=ncores)(delayed(store_sequence)(item, test, labels) for item in sourcefilelist_test) # for i in range(len(validatelist)): # random_file = validatelist[i] # if i % 100 == 0: # print('{} validation samples done in {} s '.format(i, time.time() - a)) # x = np.array(np.load(random_file)) # x = x[:, 1:] # head, tail = os.path.split(random_file) # runname = tail[:-4] # tostore_spectra = x # validate.create_dataset(runname, data=tostore_spectra, compression='gzip') return main()
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Descriptors essentially contain exactly the information found in a .proto file, in types that make this information accessible in Python. """ from google.net.proto2.python.internal import api_implementation _USE_C_DESCRIPTORS = False if api_implementation.Type() == 'cpp': import os import uuid from google.net.proto2.python.internal.cpp import _message _USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False) class Error(Exception): """Base error for this module.""" class TypeTransformationError(Error): """Error transforming between python proto type and corresponding C++ type.""" if _USE_C_DESCRIPTORS: class DescriptorMetaclass(type): def __instancecheck__(cls, obj): if super(DescriptorMetaclass, cls).__instancecheck__(obj): return True if isinstance(obj, cls._C_DESCRIPTOR_CLASS): return True return False else: DescriptorMetaclass = type class DescriptorBase(object): """Descriptors base class. This class is the base of all descriptor classes. It provides common options related functionality. Attributes: has_options: True if the descriptor has non-default options. Usually it is not necessary to read this -- just call GetOptions() which will happily return the default instance. However, it's sometimes useful for efficiency, and also useful inside the protobuf implementation to avoid some bootstrapping issues. """ __metaclass__ = DescriptorMetaclass if _USE_C_DESCRIPTORS: _C_DESCRIPTOR_CLASS = () def __init__(self, options, options_class_name): """Initialize the descriptor given its options message and the name of the class of the options message. The name of the class is required in case the options message is None and has to be created. """ self._options = options self._options_class_name = options_class_name self.has_options = options is not None def _SetOptions(self, options, options_class_name): """Sets the descriptor's options This function is used in generated proto2 files to update descriptor options. It must not be used outside proto2. """ self._options = options self._options_class_name = options_class_name self.has_options = options is not None def GetOptions(self): """Retrieves descriptor options. This method returns the options set or creates the default options for the descriptor. """ if self._options: return self._options from google.net.proto2.proto import descriptor_pb2 try: options_class = getattr(descriptor_pb2, self._options_class_name) except AttributeError: raise RuntimeError('Unknown options class name %s!' % (self._options_class_name)) self._options = options_class() return self._options class _NestedDescriptorBase(DescriptorBase): """Common class for descriptors that can be nested.""" def __init__(self, options, options_class_name, name, full_name, file, containing_type, serialized_start=None, serialized_end=None): """Constructor. Args: options: Protocol message options or None to use default message options. options_class_name: (str) The class name of the above options. name: (str) Name of this protocol message type. full_name: (str) Fully-qualified name of this protocol message type, which will include protocol "package" name and the name of any enclosing types. file: (FileDescriptor) Reference to file info. containing_type: if provided, this is a nested descriptor, with this descriptor as parent, otherwise None. serialized_start: The start index (inclusive) in block in the file.serialized_pb that describes this descriptor. serialized_end: The end index (exclusive) in block in the file.serialized_pb that describes this descriptor. """ super(_NestedDescriptorBase, self).__init__( options, options_class_name) self.name = name self.full_name = full_name self.file = file self.containing_type = containing_type self._serialized_start = serialized_start self._serialized_end = serialized_end def GetTopLevelContainingType(self): """Returns the root if this is a nested type, or itself if its the root.""" desc = self while desc.containing_type is not None: desc = desc.containing_type return desc def CopyToProto(self, proto): """Copies this to the matching proto in descriptor_pb2. Args: proto: An empty proto instance from descriptor_pb2. Raises: Error: If self couldnt be serialized, due to to few constructor arguments. """ if (self.file is not None and self._serialized_start is not None and self._serialized_end is not None): proto.ParseFromString(self.file.serialized_pb[ self._serialized_start:self._serialized_end]) else: raise Error('Descriptor does not contain serialization.') class Descriptor(_NestedDescriptorBase): """Descriptor for a protocol message type. A Descriptor instance has the following attributes: name: (str) Name of this protocol message type. full_name: (str) Fully-qualified name of this protocol message type, which will include protocol "package" name and the name of any enclosing types. containing_type: (Descriptor) Reference to the descriptor of the type containing us, or None if this is top-level. fields: (list of FieldDescriptors) Field descriptors for all fields in this type. fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor objects as in |fields|, but indexed by "number" attribute in each FieldDescriptor. fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor objects as in |fields|, but indexed by "name" attribute in each FieldDescriptor. nested_types: (list of Descriptors) Descriptor references for all protocol message types nested within this one. nested_types_by_name: (dict str -> Descriptor) Same Descriptor objects as in |nested_types|, but indexed by "name" attribute in each Descriptor. enum_types: (list of EnumDescriptors) EnumDescriptor references for all enums contained within this type. enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor objects as in |enum_types|, but indexed by "name" attribute in each EnumDescriptor. enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping from enum value name to EnumValueDescriptor for that value. extensions: (list of FieldDescriptor) All extensions defined directly within this message type (NOT within a nested type). extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor objects as |extensions|, but indexed by "name" attribute of each FieldDescriptor. is_extendable: Does this type define any extension ranges? options: (descriptor_pb2.MessageOptions) Protocol message options or None to use default message options. oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields in this message. oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|, but indexed by "name" attribute. file: (FileDescriptor) Reference to file descriptor. """ if _USE_C_DESCRIPTORS: _C_DESCRIPTOR_CLASS = _message.Descriptor def __new__(cls, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=None, is_extendable=True, extension_ranges=None, oneofs=None, file=None, serialized_start=None, serialized_end=None, syntax=None): _message.Message._CheckCalledFromGeneratedFile() return _message.Message._GetMessageDescriptor(full_name) def __init__(self, name, full_name, filename, containing_type, fields, nested_types, enum_types, extensions, options=None, is_extendable=True, extension_ranges=None, oneofs=None, file=None, serialized_start=None, serialized_end=None, syntax=None): """Arguments to __init__() are as described in the description of Descriptor fields above. Note that filename is an obsolete argument, that is not used anymore. Please use file.name to access this as an attribute. """ super(Descriptor, self).__init__( options, 'MessageOptions', name, full_name, file, containing_type, serialized_start=serialized_start, serialized_end=serialized_end) self.fields = fields for field in self.fields: field.containing_type = self self.fields_by_number = dict((f.number, f) for f in fields) self.fields_by_name = dict((f.name, f) for f in fields) self.nested_types = nested_types for nested_type in nested_types: nested_type.containing_type = self self.nested_types_by_name = dict((t.name, t) for t in nested_types) self.enum_types = enum_types for enum_type in self.enum_types: enum_type.containing_type = self self.enum_types_by_name = dict((t.name, t) for t in enum_types) self.enum_values_by_name = dict( (v.name, v) for t in enum_types for v in t.values) self.extensions = extensions for extension in self.extensions: extension.extension_scope = self self.extensions_by_name = dict((f.name, f) for f in extensions) self.is_extendable = is_extendable self.extension_ranges = extension_ranges self.oneofs = oneofs if oneofs is not None else [] self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) for oneof in self.oneofs: oneof.containing_type = self self.syntax = syntax or "proto2" def EnumValueName(self, enum, value): """Returns the string name of an enum value. This is just a small helper method to simplify a common operation. Args: enum: string name of the Enum. value: int, value of the enum. Returns: string name of the enum value. Raises: KeyError if either the Enum doesn't exist or the value is not a valid value for the enum. """ return self.enum_types_by_name[enum].values_by_number[value].name def CopyToProto(self, proto): """Copies this to a descriptor_pb2.DescriptorProto. Args: proto: An empty descriptor_pb2.DescriptorProto. """ super(Descriptor, self).CopyToProto(proto) class FieldDescriptor(DescriptorBase): """Descriptor for a single field in a .proto file. A FieldDescriptor instance has the following attributes: name: (str) Name of this field, exactly as it appears in .proto. full_name: (str) Name of this field, including containing scope. This is particularly relevant for extensions. index: (int) Dense, 0-indexed index giving the order that this field textually appears within its message in the .proto file. number: (int) Tag number declared for this field in the .proto file. type: (One of the TYPE_* constants below) Declared type. cpp_type: (One of the CPPTYPE_* constants below) C++ type used to represent this field. label: (One of the LABEL_* constants below) Tells whether this field is optional, required, or repeated. has_default_value: (bool) True if this field has a default value defined, otherwise false. default_value: (Varies) Default value of this field. Only meaningful for non-repeated scalar fields. Repeated fields should always set this to [], and non-repeated composite fields should always set this to None. containing_type: (Descriptor) Descriptor of the protocol message type that contains this field. Set by the Descriptor constructor if we're passed into one. Somewhat confusingly, for extension fields, this is the descriptor of the EXTENDED message, not the descriptor of the message containing this field. (See is_extension and extension_scope below). message_type: (Descriptor) If a composite field, a descriptor of the message type contained in this field. Otherwise, this is None. enum_type: (EnumDescriptor) If this field contains an enum, a descriptor of that enum. Otherwise, this is None. is_extension: True iff this describes an extension field. extension_scope: (Descriptor) Only meaningful if is_extension is True. Gives the message that immediately contains this extension field. Will be None iff we're a top-level (file-level) extension field. options: (descriptor_pb2.FieldOptions) Protocol message field options or None to use default field options. containing_oneof: (OneofDescriptor) If the field is a member of a oneof union, contains its descriptor. Otherwise, None. """ TYPE_DOUBLE = 1 TYPE_FLOAT = 2 TYPE_INT64 = 3 TYPE_UINT64 = 4 TYPE_INT32 = 5 TYPE_FIXED64 = 6 TYPE_FIXED32 = 7 TYPE_BOOL = 8 TYPE_STRING = 9 TYPE_GROUP = 10 TYPE_MESSAGE = 11 TYPE_BYTES = 12 TYPE_UINT32 = 13 TYPE_ENUM = 14 TYPE_SFIXED32 = 15 TYPE_SFIXED64 = 16 TYPE_SINT32 = 17 TYPE_SINT64 = 18 MAX_TYPE = 18 CPPTYPE_INT32 = 1 CPPTYPE_INT64 = 2 CPPTYPE_UINT32 = 3 CPPTYPE_UINT64 = 4 CPPTYPE_DOUBLE = 5 CPPTYPE_FLOAT = 6 CPPTYPE_BOOL = 7 CPPTYPE_ENUM = 8 CPPTYPE_STRING = 9 CPPTYPE_MESSAGE = 10 MAX_CPPTYPE = 10 _PYTHON_TO_CPP_PROTO_TYPE_MAP = { TYPE_DOUBLE: CPPTYPE_DOUBLE, TYPE_FLOAT: CPPTYPE_FLOAT, TYPE_ENUM: CPPTYPE_ENUM, TYPE_INT64: CPPTYPE_INT64, TYPE_SINT64: CPPTYPE_INT64, TYPE_SFIXED64: CPPTYPE_INT64, TYPE_UINT64: CPPTYPE_UINT64, TYPE_FIXED64: CPPTYPE_UINT64, TYPE_INT32: CPPTYPE_INT32, TYPE_SFIXED32: CPPTYPE_INT32, TYPE_SINT32: CPPTYPE_INT32, TYPE_UINT32: CPPTYPE_UINT32, TYPE_FIXED32: CPPTYPE_UINT32, TYPE_BYTES: CPPTYPE_STRING, TYPE_STRING: CPPTYPE_STRING, TYPE_BOOL: CPPTYPE_BOOL, TYPE_MESSAGE: CPPTYPE_MESSAGE, TYPE_GROUP: CPPTYPE_MESSAGE } LABEL_OPTIONAL = 1 LABEL_REQUIRED = 2 LABEL_REPEATED = 3 MAX_LABEL = 3 MAX_FIELD_NUMBER = (1 << 29) - 1 FIRST_RESERVED_FIELD_NUMBER = 19000 LAST_RESERVED_FIELD_NUMBER = 19999 if _USE_C_DESCRIPTORS: _C_DESCRIPTOR_CLASS = _message.FieldDescriptor def __new__(cls, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=None, has_default_value=True, containing_oneof=None): _message.Message._CheckCalledFromGeneratedFile() if is_extension: return _message.Message._GetExtensionDescriptor(full_name) else: return _message.Message._GetFieldDescriptor(full_name) def __init__(self, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options=None, has_default_value=True, containing_oneof=None): """The arguments are as described in the description of FieldDescriptor attributes above. Note that containing_type may be None, and may be set later if necessary (to deal with circular references between message types, for example). Likewise for extension_scope. """ super(FieldDescriptor, self).__init__(options, 'FieldOptions') self.name = name self.full_name = full_name self.index = index self.number = number self.type = type self.cpp_type = cpp_type self.label = label self.has_default_value = has_default_value self.default_value = default_value self.containing_type = containing_type self.message_type = message_type self.enum_type = enum_type self.is_extension = is_extension self.extension_scope = extension_scope self.containing_oneof = containing_oneof if api_implementation.Type() == 'cpp': if is_extension: self._cdescriptor = ( _message.Message._GetExtensionDescriptor(full_name)) else: self._cdescriptor = _message.Message._GetFieldDescriptor(full_name) else: self._cdescriptor = None @staticmethod def ProtoTypeToCppProtoType(proto_type): """Converts from a Python proto type to a C++ Proto Type. The Python ProtocolBuffer classes specify both the 'Python' datatype and the 'C++' datatype - and they're not the same. This helper method should translate from one to another. Args: proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) Returns: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. Raises: TypeTransformationError: when the Python proto type isn't known. """ try: return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] except KeyError: raise TypeTransformationError('Unknown proto_type: %s' % proto_type) class EnumDescriptor(_NestedDescriptorBase): """Descriptor for an enum defined in a .proto file. An EnumDescriptor instance has the following attributes: name: (str) Name of the enum type. full_name: (str) Full name of the type, including package name and any enclosing type(s). values: (list of EnumValueDescriptors) List of the values in this enum. values_by_name: (dict str -> EnumValueDescriptor) Same as |values|, but indexed by the "name" field of each EnumValueDescriptor. values_by_number: (dict int -> EnumValueDescriptor) Same as |values|, but indexed by the "number" field of each EnumValueDescriptor. containing_type: (Descriptor) Descriptor of the immediate containing type of this enum, or None if this is an enum defined at the top level in a .proto file. Set by Descriptor's constructor if we're passed into one. file: (FileDescriptor) Reference to file descriptor. options: (descriptor_pb2.EnumOptions) Enum options message or None to use default enum options. """ if _USE_C_DESCRIPTORS: _C_DESCRIPTOR_CLASS = _message.EnumDescriptor def __new__(cls, name, full_name, filename, values, containing_type=None, options=None, file=None, serialized_start=None, serialized_end=None): _message.Message._CheckCalledFromGeneratedFile() return _message.Message._GetEnumDescriptor(full_name) def __init__(self, name, full_name, filename, values, containing_type=None, options=None, file=None, serialized_start=None, serialized_end=None): """Arguments are as described in the attribute description above. Note that filename is an obsolete argument, that is not used anymore. Please use file.name to access this as an attribute. """ super(EnumDescriptor, self).__init__( options, 'EnumOptions', name, full_name, file, containing_type, serialized_start=serialized_start, serialized_end=serialized_end) self.values = values for value in self.values: value.type = self self.values_by_name = dict((v.name, v) for v in values) self.values_by_number = dict((v.number, v) for v in values) def CopyToProto(self, proto): """Copies this to a descriptor_pb2.EnumDescriptorProto. Args: proto: An empty descriptor_pb2.EnumDescriptorProto. """ super(EnumDescriptor, self).CopyToProto(proto) class EnumValueDescriptor(DescriptorBase): """Descriptor for a single value within an enum. name: (str) Name of this value. index: (int) Dense, 0-indexed index giving the order that this value appears textually within its enum in the .proto file. number: (int) Actual number assigned to this enum value. type: (EnumDescriptor) EnumDescriptor to which this value belongs. Set by EnumDescriptor's constructor if we're passed into one. options: (descriptor_pb2.EnumValueOptions) Enum value options message or None to use default enum value options options. """ if _USE_C_DESCRIPTORS: _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor def __new__(cls, name, index, number, type=None, options=None): _message.Message._CheckCalledFromGeneratedFile() return None def __init__(self, name, index, number, type=None, options=None): """Arguments are as described in the attribute description above.""" super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions') self.name = name self.index = index self.number = number self.type = type class OneofDescriptor(object): """Descriptor for a oneof field. name: (str) Name of the oneof field. full_name: (str) Full name of the oneof field, including package name. index: (int) 0-based index giving the order of the oneof field inside its containing type. containing_type: (Descriptor) Descriptor of the protocol message type that contains this field. Set by the Descriptor constructor if we're passed into one. fields: (list of FieldDescriptor) The list of field descriptors this oneof can contain. """ if _USE_C_DESCRIPTORS: _C_DESCRIPTOR_CLASS = _message.OneofDescriptor def __new__(cls, name, full_name, index, containing_type, fields): _message.Message._CheckCalledFromGeneratedFile() return _message.Message._GetOneofDescriptor(full_name) def __init__(self, name, full_name, index, containing_type, fields): """Arguments are as described in the attribute description above.""" self.name = name self.full_name = full_name self.index = index self.containing_type = containing_type self.fields = fields class ServiceDescriptor(_NestedDescriptorBase): """Descriptor for a service. name: (str) Name of the service. full_name: (str) Full name of the service, including package name. index: (int) 0-indexed index giving the order that this services definition appears withing the .proto file. methods: (list of MethodDescriptor) List of methods provided by this service. options: (descriptor_pb2.ServiceOptions) Service options message or None to use default service options. file: (FileDescriptor) Reference to file info. """ def __init__(self, name, full_name, index, methods, options=None, file=None, serialized_start=None, serialized_end=None): super(ServiceDescriptor, self).__init__( options, 'ServiceOptions', name, full_name, file, None, serialized_start=serialized_start, serialized_end=serialized_end) self.index = index self.methods = methods for method in self.methods: method.containing_service = self def FindMethodByName(self, name): """Searches for the specified method, and returns its descriptor.""" for method in self.methods: if name == method.name: return method return None def CopyToProto(self, proto): """Copies this to a descriptor_pb2.ServiceDescriptorProto. Args: proto: An empty descriptor_pb2.ServiceDescriptorProto. """ super(ServiceDescriptor, self).CopyToProto(proto) class MethodDescriptor(DescriptorBase): """Descriptor for a method in a service. name: (str) Name of the method within the service. full_name: (str) Full name of method. index: (int) 0-indexed index of the method inside the service. containing_service: (ServiceDescriptor) The service that contains this method. input_type: The descriptor of the message that this method accepts. output_type: The descriptor of the message that this method returns. options: (descriptor_pb2.MethodOptions) Method options message or None to use default method options. """ def __init__(self, name, full_name, index, containing_service, input_type, output_type, options=None): """The arguments are as described in the description of MethodDescriptor attributes above. Note that containing_service may be None, and may be set later if necessary. """ super(MethodDescriptor, self).__init__(options, 'MethodOptions') self.name = name self.full_name = full_name self.index = index self.containing_service = containing_service self.input_type = input_type self.output_type = output_type class FileDescriptor(DescriptorBase): """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. Note that enum_types_by_name, extensions_by_name, and dependencies fields are only set by the message_factory module, and not by the generated proto code. name: name of file, relative to root of source tree. package: name of the package syntax: string indicating syntax of the file (can be "proto2" or "proto3") serialized_pb: (str) Byte string of serialized descriptor_pb2.FileDescriptorProto. dependencies: List of other FileDescriptors this FileDescriptor depends on. message_types_by_name: Dict of message names of their descriptors. enum_types_by_name: Dict of enum names and their descriptors. extensions_by_name: Dict of extension names and their descriptors. """ if _USE_C_DESCRIPTORS: _C_DESCRIPTOR_CLASS = _message.FileDescriptor def __new__(cls, name, package, options=None, serialized_pb=None, dependencies=None, syntax=None): if serialized_pb: return _message.Message._BuildFile(serialized_pb) else: return super(FileDescriptor, cls).__new__(cls) def __init__(self, name, package, options=None, serialized_pb=None, dependencies=None, syntax=None): """Constructor.""" super(FileDescriptor, self).__init__(options, 'FileOptions') self.message_types_by_name = {} self.name = name self.package = package self.syntax = syntax or "proto2" self.serialized_pb = serialized_pb self.enum_types_by_name = {} self.extensions_by_name = {} self.dependencies = (dependencies or []) if (api_implementation.Type() == 'cpp' and self.serialized_pb is not None): _message.Message._BuildFile(self.serialized_pb) def CopyToProto(self, proto): """Copies this to a descriptor_pb2.FileDescriptorProto. Args: proto: An empty descriptor_pb2.FileDescriptorProto. """ proto.ParseFromString(self.serialized_pb) def _ParseOptions(message, string): """Parses serialized options. This helper function is used to parse serialized options in generated proto2 files. It must not be used outside proto2. """ message.ParseFromString(string) return message def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, syntax=None): """Make a protobuf Descriptor given a DescriptorProto protobuf. Handles nested descriptors. Note that this is limited to the scope of defining a message inside of another message. Composite fields can currently only be resolved if the message is defined in the same scope as the field. Args: desc_proto: The descriptor_pb2.DescriptorProto protobuf message. package: Optional package name for the new message Descriptor (string). build_file_if_cpp: Update the C++ descriptor pool if api matches. Set to False on recursion, so no duplicates are created. syntax: The syntax/semantics that should be used. Set to "proto3" to get proto3 field presence semantics. Returns: A Descriptor for protobuf messages. """ if api_implementation.Type() == 'cpp' and build_file_if_cpp: from google.net.proto2.proto import descriptor_pb2 file_descriptor_proto = descriptor_pb2.FileDescriptorProto() file_descriptor_proto.message_type.add().MergeFrom(desc_proto) proto_name = str(uuid.uuid4()) if package: file_descriptor_proto.name = os.path.join(package.replace('.', '/'), proto_name + '.proto') file_descriptor_proto.package = package else: file_descriptor_proto.name = proto_name + '.proto' result = _message.Message._BuildFile( file_descriptor_proto.SerializeToString()) if _USE_C_DESCRIPTORS: return result.message_types_by_name[desc_proto.name] full_message_name = [desc_proto.name] if package: full_message_name.insert(0, package) enum_types = {} for enum_proto in desc_proto.enum_type: full_name = '.'.join(full_message_name + [enum_proto.name]) enum_desc = EnumDescriptor( enum_proto.name, full_name, None, [ EnumValueDescriptor(enum_val.name, ii, enum_val.number) for ii, enum_val in enumerate(enum_proto.value)]) enum_types[full_name] = enum_desc nested_types = {} for nested_proto in desc_proto.nested_type: full_name = '.'.join(full_message_name + [nested_proto.name]) nested_desc = MakeDescriptor(nested_proto, package='.'.join(full_message_name), build_file_if_cpp=False, syntax=syntax) nested_types[full_name] = nested_desc fields = [] for field_proto in desc_proto.field: full_name = '.'.join(full_message_name + [field_proto.name]) enum_desc = None nested_desc = None if field_proto.HasField('type_name'): type_name = field_proto.type_name full_type_name = '.'.join(full_message_name + [type_name[type_name.rfind('.')+1:]]) if full_type_name in nested_types: nested_desc = nested_types[full_type_name] elif full_type_name in enum_types: enum_desc = enum_types[full_type_name] field = FieldDescriptor( field_proto.name, full_name, field_proto.number - 1, field_proto.number, field_proto.type, FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), field_proto.label, None, nested_desc, enum_desc, None, False, None, options=field_proto.options, has_default_value=False) fields.append(field) desc_name = '.'.join(full_message_name) return Descriptor(desc_proto.name, desc_name, None, None, fields, nested_types.values(), enum_types.values(), [], options=desc_proto.options)
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time import unittest from nova.openstack.common.log import logging from nova.tests import fake_network from nova.tests.integrated.api import client from nova.tests.integrated import integrated_helpers import nova.virt.fake LOG = logging.getLogger(__name__) class ServersTest(integrated_helpers._IntegratedTestBase): def _wait_for_state_change(self, server, from_status): for i in xrange(0, 50): server = self.api.get_server(server['id']) if server['status'] != from_status: break time.sleep(.1) return server def _restart_compute_service(self, *args, **kwargs): """restart compute service. NOTE: fake driver forgets all instances.""" self.compute.kill() self.compute = self.start_service('compute', *args, **kwargs) def test_get_servers(self): """Simple check that listing servers works.""" servers = self.api.get_servers() for server in servers: LOG.debug("server: %s" % server) def test_create_server_with_error(self): """Create a server which will enter error state.""" fake_network.set_stub_network_methods(self.stubs) def throw_error(*_): raise Exception() self.stubs.Set(nova.virt.fake.FakeDriver, 'spawn', throw_error) server = self._build_minimal_create_server_request() created_server = self.api.post_server({"server": server}) created_server_id = created_server['id'] found_server = self.api.get_server(created_server_id) self.assertEqual(created_server_id, found_server['id']) found_server = self._wait_for_state_change(found_server, 'BUILD') self.assertEqual('ERROR', found_server['status']) self._delete_server(created_server_id) def test_create_and_delete_server(self): """Creates and deletes a server.""" fake_network.set_stub_network_methods(self.stubs) # Create server # Build the server data gradually, checking errors along the way server = {} good_server = self._build_minimal_create_server_request() post = {'server': server} # Without an imageRef, this throws 500. # TODO(justinsb): Check whatever the spec says should be thrown here self.assertRaises(client.OpenStackApiException, self.api.post_server, post) # With an invalid imageRef, this throws 500. server['imageRef'] = self.get_invalid_image() # TODO(justinsb): Check whatever the spec says should be thrown here self.assertRaises(client.OpenStackApiException, self.api.post_server, post) # Add a valid imageRef server['imageRef'] = good_server.get('imageRef') # Without flavorRef, this throws 500 # TODO(justinsb): Check whatever the spec says should be thrown here self.assertRaises(client.OpenStackApiException, self.api.post_server, post) server['flavorRef'] = good_server.get('flavorRef') # Without a name, this throws 500 # TODO(justinsb): Check whatever the spec says should be thrown here self.assertRaises(client.OpenStackApiException, self.api.post_server, post) # Set a valid server name server['name'] = good_server['name'] created_server = self.api.post_server(post) LOG.debug("created_server: %s" % created_server) self.assertTrue(created_server['id']) created_server_id = created_server['id'] # Check it's there found_server = self.api.get_server(created_server_id) self.assertEqual(created_server_id, found_server['id']) # It should also be in the all-servers list servers = self.api.get_servers() server_ids = [server['id'] for server in servers] self.assertTrue(created_server_id in server_ids) found_server = self._wait_for_state_change(found_server, 'BUILD') # It should be available... # TODO(justinsb): Mock doesn't yet do this... self.assertEqual('ACTIVE', found_server['status']) servers = self.api.get_servers(detail=True) for server in servers: self.assertTrue("image" in server) self.assertTrue("flavor" in server) self._delete_server(created_server_id) def test_deferred_delete(self): """Creates, deletes and waits for server to be reclaimed.""" self.flags(reclaim_instance_interval=1) fake_network.set_stub_network_methods(self.stubs) # enforce periodic tasks run in short time to avoid wait for 60s. self._restart_compute_service( periodic_interval=0.3, periodic_fuzzy_delay=0) # Create server server = self._build_minimal_create_server_request() created_server = self.api.post_server({'server': server}) LOG.debug("created_server: %s" % created_server) self.assertTrue(created_server['id']) created_server_id = created_server['id'] # Wait for it to finish being created found_server = self._wait_for_state_change(created_server, 'BUILD') # It should be available... self.assertEqual('ACTIVE', found_server['status']) # Cannot restore unless instance is deleted self.assertRaises(client.OpenStackApiException, self.api.post_server_action, created_server_id, {'restore': {}}) # Cannot forceDelete unless instance is deleted self.assertRaises(client.OpenStackApiException, self.api.post_server_action, created_server_id, {'forceDelete': {}}) # Delete the server self.api.delete_server(created_server_id) # Wait for queued deletion found_server = self._wait_for_state_change(found_server, 'ACTIVE') self.assertEqual('DELETED', found_server['status']) # Wait for real deletion self._wait_for_deletion(created_server_id) def test_deferred_delete_restore(self): """Creates, deletes and restores a server.""" self.flags(reclaim_instance_interval=1) fake_network.set_stub_network_methods(self.stubs) # Create server server = self._build_minimal_create_server_request() created_server = self.api.post_server({'server': server}) LOG.debug("created_server: %s" % created_server) self.assertTrue(created_server['id']) created_server_id = created_server['id'] # Wait for it to finish being created found_server = self._wait_for_state_change(created_server, 'BUILD') # It should be available... self.assertEqual('ACTIVE', found_server['status']) # Delete the server self.api.delete_server(created_server_id) # Wait for queued deletion found_server = self._wait_for_state_change(found_server, 'ACTIVE') self.assertEqual('DELETED', found_server['status']) # Restore server self.api.post_server_action(created_server_id, {'restore': {}}) # Wait for server to become active again found_server = self._wait_for_state_change(found_server, 'DELETED') self.assertEqual('ACTIVE', found_server['status']) def test_deferred_delete_force(self): """Creates, deletes and force deletes a server.""" self.flags(reclaim_instance_interval=1) fake_network.set_stub_network_methods(self.stubs) # Create server server = self._build_minimal_create_server_request() created_server = self.api.post_server({'server': server}) LOG.debug("created_server: %s" % created_server) self.assertTrue(created_server['id']) created_server_id = created_server['id'] # Wait for it to finish being created found_server = self._wait_for_state_change(created_server, 'BUILD') # It should be available... self.assertEqual('ACTIVE', found_server['status']) # Delete the server self.api.delete_server(created_server_id) # Wait for queued deletion found_server = self._wait_for_state_change(found_server, 'ACTIVE') self.assertEqual('DELETED', found_server['status']) # Force delete server self.api.post_server_action(created_server_id, {'forceDelete': {}}) # Wait for real deletion self._wait_for_deletion(created_server_id) def _wait_for_deletion(self, server_id): # Wait (briefly) for deletion for _retries in range(50): try: found_server = self.api.get_server(server_id) except client.OpenStackApiNotFoundException: found_server = None LOG.debug("Got 404, proceeding") break LOG.debug("Found_server=%s" % found_server) # TODO(justinsb): Mock doesn't yet do accurate state changes #if found_server['status'] != 'deleting': # break time.sleep(.1) # Should be gone self.assertFalse(found_server) def _delete_server(self, server_id): # Delete the server self.api.delete_server(server_id) self._wait_for_deletion(server_id) def test_create_server_with_metadata(self): """Creates a server with metadata.""" fake_network.set_stub_network_methods(self.stubs) # Build the server data gradually, checking errors along the way server = self._build_minimal_create_server_request() metadata = {} for i in range(30): metadata['key_%s' % i] = 'value_%s' % i server['metadata'] = metadata post = {'server': server} created_server = self.api.post_server(post) LOG.debug("created_server: %s" % created_server) self.assertTrue(created_server['id']) created_server_id = created_server['id'] found_server = self.api.get_server(created_server_id) self.assertEqual(created_server_id, found_server['id']) self.assertEqual(metadata, found_server.get('metadata')) # The server should also be in the all-servers details list servers = self.api.get_servers(detail=True) server_map = dict((server['id'], server) for server in servers) found_server = server_map.get(created_server_id) self.assertTrue(found_server) # Details do include metadata self.assertEqual(metadata, found_server.get('metadata')) # The server should also be in the all-servers summary list servers = self.api.get_servers(detail=False) server_map = dict((server['id'], server) for server in servers) found_server = server_map.get(created_server_id) self.assertTrue(found_server) # Summary should not include metadata self.assertFalse(found_server.get('metadata')) # Cleanup self._delete_server(created_server_id) def test_create_and_rebuild_server(self): """Rebuild a server with metadata.""" fake_network.set_stub_network_methods(self.stubs) # create a server with initially has no metadata server = self._build_minimal_create_server_request() server_post = {'server': server} metadata = {} for i in range(30): metadata['key_%s' % i] = 'value_%s' % i server_post['server']['metadata'] = metadata created_server = self.api.post_server(server_post) LOG.debug("created_server: %s" % created_server) self.assertTrue(created_server['id']) created_server_id = created_server['id'] created_server = self._wait_for_state_change(created_server, 'BUILD') # rebuild the server with metadata and other server attributes post = {} post['rebuild'] = { "imageRef": "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6", "name": "blah", "accessIPv4": "172.19.0.2", "accessIPv6": "fe80::2", "metadata": {'some': 'thing'}, } self.api.post_server_action(created_server_id, post) LOG.debug("rebuilt server: %s" % created_server) self.assertTrue(created_server['id']) found_server = self.api.get_server(created_server_id) self.assertEqual(created_server_id, found_server['id']) self.assertEqual({'some': 'thing'}, found_server.get('metadata')) self.assertEqual('blah', found_server.get('name')) self.assertEqual(post['rebuild']['imageRef'], found_server.get('image')['id']) self.assertEqual('172.19.0.2', found_server['accessIPv4']) self.assertEqual('fe80::2', found_server['accessIPv6']) # rebuild the server with empty metadata and nothing else post = {} post['rebuild'] = { "imageRef": "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6", "metadata": {}, } self.api.post_server_action(created_server_id, post) LOG.debug("rebuilt server: %s" % created_server) self.assertTrue(created_server['id']) found_server = self.api.get_server(created_server_id) self.assertEqual(created_server_id, found_server['id']) self.assertEqual({}, found_server.get('metadata')) self.assertEqual('blah', found_server.get('name')) self.assertEqual(post['rebuild']['imageRef'], found_server.get('image')['id']) self.assertEqual('172.19.0.2', found_server['accessIPv4']) self.assertEqual('fe80::2', found_server['accessIPv6']) # Cleanup self._delete_server(created_server_id) def test_rename_server(self): """Test building and renaming a server.""" fake_network.set_stub_network_methods(self.stubs) # Create a server server = self._build_minimal_create_server_request() created_server = self.api.post_server({'server': server}) LOG.debug("created_server: %s" % created_server) server_id = created_server['id'] self.assertTrue(server_id) # Rename the server to 'new-name' self.api.put_server(server_id, {'server': {'name': 'new-name'}}) # Check the name of the server created_server = self.api.get_server(server_id) self.assertEqual(created_server['name'], 'new-name') # Cleanup self._delete_server(server_id) def test_create_multiple_servers(self): """Creates multiple servers and checks for reservation_id""" # Create 2 servers, setting 'return_reservation_id, which should # return a reservation_id server = self._build_minimal_create_server_request() server['min_count'] = 2 server['return_reservation_id'] = True post = {'server': server} response = self.api.post_server(post) self.assertIn('reservation_id', response) reservation_id = response['reservation_id'] self.assertNotIn(reservation_id, ['', None]) # Create 1 more server, which should not return a reservation_id server = self._build_minimal_create_server_request() post = {'server': server} created_server = self.api.post_server(post) self.assertTrue(created_server['id']) created_server_id = created_server['id'] # lookup servers created by the first request. servers = self.api.get_servers(detail=True, search_opts={'reservation_id': reservation_id}) server_map = dict((server['id'], server) for server in servers) found_server = server_map.get(created_server_id) # The server from the 2nd request should not be there. self.assertEqual(found_server, None) # Should have found 2 servers. self.assertEqual(len(server_map), 2) # Cleanup self._delete_server(created_server_id) for server_id in server_map.iterkeys(): self._delete_server(server_id) if __name__ == "__main__": unittest.main()
from contextlib import suppress from ctypes import byref, c_double from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope from django.contrib.gis.gdal.error import ( GDALException, OGRIndexError, SRSException, ) from django.contrib.gis.gdal.feature import Feature from django.contrib.gis.gdal.field import OGRFieldTypes from django.contrib.gis.gdal.geometries import OGRGeometry from django.contrib.gis.gdal.geomtype import OGRGeomType from django.contrib.gis.gdal.prototypes import ( ds as capi, geom as geom_api, srs as srs_api, ) from django.contrib.gis.gdal.srs import SpatialReference from django.utils.encoding import force_bytes, force_text # For more information, see the OGR C API source code: # http://www.gdal.org/ogr__api_8h.html # # The OGR_L_* routines are relevant here. class Layer(GDALBase): "A class that wraps an OGR Layer, needs to be instantiated from a DataSource object." def __init__(self, layer_ptr, ds): """ Initialize on an OGR C pointer to the Layer and the `DataSource` object that owns this layer. The `DataSource` object is required so that a reference to it is kept with this Layer. This prevents garbage collection of the `DataSource` while this Layer is still active. """ if not layer_ptr: raise GDALException('Cannot create Layer, invalid pointer given') self.ptr = layer_ptr self._ds = ds self._ldefn = capi.get_layer_defn(self._ptr) # Does the Layer support random reading? self._random_read = self.test_capability(b'RandomRead') def __getitem__(self, index): "Get the Feature at the specified index." if isinstance(index, int): # An integer index was given -- we cannot do a check based on the # number of features because the beginning and ending feature IDs # are not guaranteed to be 0 and len(layer)-1, respectively. if index < 0: raise OGRIndexError('Negative indices are not allowed on OGR Layers.') return self._make_feature(index) elif isinstance(index, slice): # A slice was given start, stop, stride = index.indices(self.num_feat) return [self._make_feature(fid) for fid in range(start, stop, stride)] else: raise TypeError('Integers and slices may only be used when indexing OGR Layers.') def __iter__(self): "Iterate over each Feature in the Layer." # ResetReading() must be called before iteration is to begin. capi.reset_reading(self._ptr) for i in range(self.num_feat): yield Feature(capi.get_next_feature(self._ptr), self) def __len__(self): "The length is the number of features." return self.num_feat def __str__(self): "The string name of the layer." return self.name def _make_feature(self, feat_id): """ Helper routine for __getitem__ that constructs a Feature from the given Feature ID. If the OGR Layer does not support random-access reading, then each feature of the layer will be incremented through until the a Feature is found matching the given feature ID. """ if self._random_read: # If the Layer supports random reading, return. with suppress(GDALException): return Feature(capi.get_feature(self.ptr, feat_id), self) else: # Random access isn't supported, have to increment through # each feature until the given feature ID is encountered. for feat in self: if feat.fid == feat_id: return feat # Should have returned a Feature, raise an OGRIndexError. raise OGRIndexError('Invalid feature id: %s.' % feat_id) # #### Layer properties #### @property def extent(self): "Return the extent (an Envelope) of this layer." env = OGREnvelope() capi.get_extent(self.ptr, byref(env), 1) return Envelope(env) @property def name(self): "Return the name of this layer in the Data Source." name = capi.get_fd_name(self._ldefn) return force_text(name, self._ds.encoding, strings_only=True) @property def num_feat(self, force=1): "Return the number of features in the Layer." return capi.get_feature_count(self.ptr, force) @property def num_fields(self): "Return the number of fields in the Layer." return capi.get_field_count(self._ldefn) @property def geom_type(self): "Return the geometry type (OGRGeomType) of the Layer." return OGRGeomType(capi.get_fd_geom_type(self._ldefn)) @property def srs(self): "Return the Spatial Reference used in this Layer." try: ptr = capi.get_layer_srs(self.ptr) return SpatialReference(srs_api.clone_srs(ptr)) except SRSException: return None @property def fields(self): """ Return a list of string names corresponding to each of the Fields available in this Layer. """ return [force_text(capi.get_field_name(capi.get_field_defn(self._ldefn, i)), self._ds.encoding, strings_only=True) for i in range(self.num_fields)] @property def field_types(self): """ Return a list of the types of fields in this Layer. For example, return the list [OFTInteger, OFTReal, OFTString] for an OGR layer that has an integer, a floating-point, and string fields. """ return [OGRFieldTypes[capi.get_field_type(capi.get_field_defn(self._ldefn, i))] for i in range(self.num_fields)] @property def field_widths(self): "Return a list of the maximum field widths for the features." return [capi.get_field_width(capi.get_field_defn(self._ldefn, i)) for i in range(self.num_fields)] @property def field_precisions(self): "Return the field precisions for the features." return [capi.get_field_precision(capi.get_field_defn(self._ldefn, i)) for i in range(self.num_fields)] def _get_spatial_filter(self): try: return OGRGeometry(geom_api.clone_geom(capi.get_spatial_filter(self.ptr))) except GDALException: return None def _set_spatial_filter(self, filter): if isinstance(filter, OGRGeometry): capi.set_spatial_filter(self.ptr, filter.ptr) elif isinstance(filter, (tuple, list)): if not len(filter) == 4: raise ValueError('Spatial filter list/tuple must have 4 elements.') # Map c_double onto params -- if a bad type is passed in it # will be caught here. xmin, ymin, xmax, ymax = map(c_double, filter) capi.set_spatial_filter_rect(self.ptr, xmin, ymin, xmax, ymax) elif filter is None: capi.set_spatial_filter(self.ptr, None) else: raise TypeError('Spatial filter must be either an OGRGeometry instance, a 4-tuple, or None.') spatial_filter = property(_get_spatial_filter, _set_spatial_filter) # #### Layer Methods #### def get_fields(self, field_name): """ Return a list containing the given field name for every Feature in the Layer. """ if field_name not in self.fields: raise GDALException('invalid field name: %s' % field_name) return [feat.get(field_name) for feat in self] def get_geoms(self, geos=False): """ Return a list containing the OGRGeometry for every Feature in the Layer. """ if geos: from django.contrib.gis.geos import GEOSGeometry return [GEOSGeometry(feat.geom.wkb) for feat in self] else: return [feat.geom for feat in self] def test_capability(self, capability): """ Return a bool indicating whether the this Layer supports the given capability (a string). Valid capability strings include: 'RandomRead', 'SequentialWrite', 'RandomWrite', 'FastSpatialFilter', 'FastFeatureCount', 'FastGetExtent', 'CreateField', 'Transactions', 'DeleteFeature', and 'FastSetNextByIndex'. """ return bool(capi.test_capability(self.ptr, force_bytes(capability)))
import logging import traceback import pkg_resources from gi.repository import Gtk, GdkPixbuf from .devicestatusbar import DeviceStatusBar from .logtreeview import LogTreeView from ..accounting import UserManager, ProjectManager from ..core import ToolWindow, error_message, ToolFrame, question_message from ..devices import Motors, GeniX, TPG201, HaakePhoenix, Pilatus, DeviceConnections from ..diagnostics import ResourceUsage from ..measurement import ScanMeasurement, SingleExposure, TransmissionMeasurement, ScriptMeasurement, CommandHelpDialog from ..setup import EditConfig, SampleEdit, DefineGeometry, Calibration from ..toolframes import ResourceUsageFrame, NextFSN, ShutterBeamstop, AccountingFrame from ..tools import ExposureViewer, CapillaryMeasurement, ScanViewer, MaskEditor, DataReduction, OptimizeGeometry from ...core.commands.command import CommandError from ...core.instrument.instrument import Instrument from ...core.services.interpreter import Interpreter # initialize the logger for the main window level. logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class CollectingHandler(logging.Handler): instance = None def __init__(self): self.collected = [] if self.__class__.instance is not None: raise RuntimeError('This is a singleton class!') super().__init__() self.__class__.instance = self @classmethod def get_default(cls): return cls.instance def emit(self, record): self.collected.append(record) class MainWindow(object): toolwindow_registry = [ ('sampleeditor', SampleEdit, 'samplesetup', 'setup_sampleedit.glade', {}), ('definegeometry', DefineGeometry, 'definegeometry', 'setup_definegeometry.glade', {}), ('editconfig', EditConfig, 'editconfig', 'setup_editconfig.glade', {}), ('calibration', Calibration, 'calibration', 'setup_calibration.glade', {}), ('xraysource', GeniX, 'genix', 'devices_genix.glade', {}), ('detector', Pilatus, 'pilatus', 'devices_pilatus.glade', {}), ('motors', Motors, 'motoroverview', 'devices_motors.glade', {}), ('vacgauge', TPG201, 'vacgauge', 'devices_tpg201.glade', {}), ('temperaturestage', HaakePhoenix, 'haakephoenix', 'devices_haakephoenix.glade', {}), ('connections', DeviceConnections, 'deviceconnections', 'devices_connection.glade', {}), ('scanmeasurement', ScanMeasurement, 'scan', 'measurement_scan.glade', {}), ('singleexposure', SingleExposure, 'singleexposure', 'measurement_singleexposure.glade', {}), ('transmission', TransmissionMeasurement, 'measuretransmission', 'measurement_transmission.glade', {}), ('scriptmeasurement', ScriptMeasurement, 'script', 'measurement_script.glade', {}), ('maskeditor', MaskEditor, 'maskeditor', 'tools_maskeditor.glade', {}), ('imgviewer', ExposureViewer, 'calibration', 'setup_calibration.glade', {}), ('viewscans', ScanViewer, 'scanviewer', 'tools_scanviewer.glade', {}), ('capillarymeasurement', CapillaryMeasurement, 'capillarymeasurement', 'tools_capillarymeasurement.glade', {}), ('datareduction', DataReduction, 'datareduction', 'tools_datareduction.glade', {}), ('resourceusage', ResourceUsage, 'resourceusagewindow', 'diagnostics_resourceusage.glade', {}), ('commandhelp', CommandHelpDialog, 'commandhelpbrowser', 'help_commandhelpbrowser.glade', {'insert': 'on_insert_command'}), ('users', UserManager, 'usermanager', 'accounting_usermanager.glade', {}), ('projects', ProjectManager, 'projectmanager', 'accounting_projectmanager.glade', {}), ('optimizegeometry', OptimizeGeometry, 'optimizegeometry', 'tools_optimizegeometry.glade', {}), ] class LogHandler(logging.Handler): def __init__(self, mainwindow): super().__init__() self.mw = mainwindow def emit(self, record): message = self.format(record) # GLib.idle_add(lambda msg=message, rec=record: self.mw.writelogline(msg, rec) and False) self.mw.writelogline(message, record) def __init__(self, instrument: Instrument): # initialize the main window self.builder = Gtk.Builder.new_from_file( pkg_resources.resource_filename('cct', 'resource/glade/mainwindow.glade')) self.builder.set_application(Gtk.Application.get_default()) self.widget = self.builder.get_object('mainwindow') self.builder.connect_signals(self) self.widget.set_show_menubar(True) self.widget.connect('delete-event', self.on_delete_event) self.widget.set_default_icon_list([GdkPixbuf.Pixbuf.new_from_file_at_size( pkg_resources.resource_filename('cct', 'resource/icons/scalable/cctlogo.svg'), sz, sz) for sz in [16, 32, 48, 64, 128, 256]]) self.widget.show_all() # Initialize the log textbuffer self._logtags = self.builder.get_object('log_texttags') self._logbuffer = self.builder.get_object('logbuffer') self._logbuffer.create_mark( 'log_end', self._logbuffer.get_end_iter(), False) self._logview = self.builder.get_object('logtext') self._logview2 = LogTreeView() self.builder.get_object('logviewer_stack').add_titled(self._logview2.widget, 'treelogviewer', 'Log tree') # initialize custom log handler for the root logger. This is responsible for printing # all log records in the main window. self._loghandler = self.LogHandler(self) self._loghandler.setLevel(logging.DEBUG) logging.root.addHandler(self._loghandler) self._loghandler.setFormatter(logging.Formatter( '%(asctime)s: %(levelname)s: %(message)s (Origin: %(name)s:%(lineno)d)')) ch = CollectingHandler.get_default() for record in ch.collected: self._loghandler.emit(record) logging.root.removeHandler(ch) del ch.collected self._toolwindows = {} self._toolwindow_connections = {} self.instrument = instrument self._instrumentconnections = [ self.instrument.connect('shutdown', self.on_instrument_shutdown), self.instrument.connect('device-connected', lambda i, d: self.set_menu_sensitivity()), self.instrument.connect('device-disconnected', lambda i, d, b: self.set_menu_sensitivity()), ] if self.instrument.online: self.instrument.connect_devices() logger.debug('Mainwindow: devices connected.') self._devicestatus = DeviceStatusBar(self.instrument) logger.debug('DeviceStatusBar initialized') self.builder.get_object('devicestatus_box').pack_start(self._devicestatus, True, True, 0) self._toolframes = {} for framename, cls, gladefile, mainwidget in [ ('resourceusage', ResourceUsageFrame, 'toolframe_telemetry.glade', 'telemetryframe'), ('nextfsn', NextFSN, 'toolframe_nextfsn.glade', 'nextfsnframe'), ('shutterbeamstop', ShutterBeamstop, 'toolframe_shutter.glade', 'shutterframe'), ('accounting', AccountingFrame, 'toolframe_accounting.glade', 'accountingframe') ]: try: self._toolframes[framename] = cls(gladefile, mainwidget, self.instrument) self.builder.get_object('toolbox').pack_end(self._toolframes[framename].widget, False, True, 0) except Exception: logger.error('Cannot open toolframe ' + framename) logger.debug('Initializing toolframes done.') self.widget.show_all() self.widget.set_title('Credo Control Tool v{}'.format(pkg_resources.get_distribution('cct').version)) logger.debug('Connecting to interpreter') interpreter = self.instrument.services['interpreter'] self._interpreterconnections = [ interpreter.connect('cmd-return', self.on_interpreter_cmd_return), interpreter.connect('cmd-fail', self.on_interpreter_cmd_fail), interpreter.connect('pulse', self.on_interpreter_cmd_pulse), interpreter.connect('progress', self.on_interpreter_cmd_progress), interpreter.connect('cmd-message', self.on_interpreter_cmd_message), interpreter.connect('idle-changed', self.on_interpreter_idle_changed), ] self._commandhistory = [] self._historyindex = None self.on_change_logviewer(self.builder.get_object('menuitem_advancedlogviewer')) self.set_menu_sensitivity() def on_change_logviewer(self, checkmenuitem: Gtk.CheckMenuItem): if checkmenuitem.get_active(): self.builder.get_object('logviewer_stack').set_visible_child_name('treelogviewer') else: self.builder.get_object('logviewer_stack').set_visible_child_name('textlogviewer') def on_command_entry_keyevent(self, entry: Gtk.Entry, event): if event.hardware_keycode == 111: # cursor up key if self._commandhistory: if self._historyindex is None: self._historyindex = len(self._commandhistory) self._historyindex = max(0, self._historyindex - 1) entry.set_text(self._commandhistory[self._historyindex]) return True # inhibit further processing of this key event elif event.hardware_keycode == 116: # cursor down key if self._commandhistory: if self._historyindex is None: self._historyindex = -1 self._historyindex = min(self._historyindex + 1, len(self._commandhistory) - 1) entry.set_text(self._commandhistory[self._historyindex]) return True # inhibit further processing of this key event return False def on_interpreter_idle_changed(self, interpreter: Instrument, idle: bool): if not idle: self.builder.get_object('command_entry').set_sensitive(idle) if self.builder.get_object('execute_button').get_label() == 'Execute': self.builder.get_object('execute_button').set_sensitive(idle) if idle: self.builder.get_object('command_entry').set_sensitive(idle) self.builder.get_object('execute_button').set_sensitive(idle) def on_command_execute(self, button: Gtk.Button): if button.get_label() == 'Execute': cmd = self.builder.get_object('command_entry').get_text() try: self.instrument.services['interpreter'].execute_command(cmd) except CommandError as ce: error_message(self.widget, 'Cannot execute command', str(ce)) else: button.set_label('Stop') if (not self._commandhistory) or (self._commandhistory and self._commandhistory[-1] != cmd): self._commandhistory.append(self.builder.get_object('command_entry').get_text()) elif button.get_label() == 'Stop': self.instrument.services['interpreter'].kill() else: raise ValueError(button.get_label()) # noinspection PyUnusedLocal def on_interpreter_cmd_return(self, interpreter: Interpreter, commandname: str, returnvalue: object): self.builder.get_object('command_entry').set_sensitive(True) self.builder.get_object('command_entry').set_progress_fraction(0) self.builder.get_object('command_entry').set_text('') self.builder.get_object('command_entry').grab_focus() self.builder.get_object('execute_button').set_label('Execute') self._historyindex = None self.builder.get_object('statusbar').pop(1) # noinspection PyUnusedLocal,PyMethodMayBeStatic def on_interpreter_cmd_fail(self, interpreter, commandname, exc, tb): logger.error('Command {} failed: {} {}'.format(commandname, str(exc), tb)) # noinspection PyUnusedLocal def on_interpreter_cmd_message(self, interpreter, commandname, message): self.builder.get_object('statusbar').pop(1) self.builder.get_object('statusbar').push(1, message) logger.info('Command {} :: {}'.format(commandname, message)) # noinspection PyUnusedLocal def on_interpreter_cmd_pulse(self, interpreter, commandname, message): self.builder.get_object('command_entry').progress_pulse() self.builder.get_object('statusbar').pop(1) self.builder.get_object('statusbar').push(1, message) # noinspection PyUnusedLocal def on_interpreter_cmd_progress(self, interpreter, commandname, message, fraction): self.builder.get_object('command_entry').set_progress_fraction(fraction) self.builder.get_object('statusbar').pop(1) self.builder.get_object('statusbar').push(1, message) def on_delete_event(self, window, event): return self.on_quit() def writelogline(self, message: str, record: logging.LogRecord): assert hasattr(record, 'message') if record.levelno >= logging.CRITICAL: tag = self._logtags.lookup('critical') elif record.levelno >= logging.ERROR: tag = self._logtags.lookup('error') elif record.levelno >= logging.WARNING: tag = self._logtags.lookup('warning') else: tag = self._logtags.lookup('normal') enditer = self._logbuffer.get_end_iter() self._logbuffer.insert_with_tags(enditer, message + '\n', tag) self._logview.scroll_to_mark( self._logbuffer.get_mark('log_end'), 0.1, False, 0, 0) if record.levelno >= logging.INFO: self.builder.get_object('statusbar').pop(0) self.builder.get_object('statusbar').push(0, record.message.split('\n')[0]) self._logview2.add_logentry(record) return False def construct_and_run_dialog(self, windowclass, toplevelname, gladefile, windowtitle, connections): assert issubclass(windowclass, ToolWindow) key = str(windowclass) + str(toplevelname) logger.debug('Construct & run dialog: ' + gladefile) if key not in self._toolwindows: logger.debug('Constructing needed for dialog ' + gladefile) try: self._toolwindows[key] = windowclass(gladefile, toplevelname, self.instrument, windowtitle) except ToolFrame.DeviceException as ex: error_message(self.widget, 'Could not open window {}'.format(windowtitle), 'Missing required device: {}'.format(ex.args[0])) return except Exception as exc: error_message(self.widget, 'Could not open window {}'.format(windowtitle), '{}\n{}'.format(str(exc), traceback.format_exc())) return # if self._toolwindows[key].widget.destroyed(): # logger.error('Error while constructing dialog ' + gladefile) # del self._toolwindows[key] logger.debug('Successful construction of dialog ' + gladefile) assert key not in self._toolwindow_connections logger.debug('Connecting signals for dialog ' + gladefile) try: self._toolwindow_connections[key] = [ self._toolwindows[key].connect('destroy', self.on_toolwindow_destroyed, key)] for signal in connections: self._toolwindow_connections[key].append( self._toolwindows[key].connect(signal, getattr(self, connections[signal]))) except Exception as exc: logger.error('Error connecting signals to dialog ' + gladefile) try: for c in self._toolwindow_connections[key]: self._toolwindows[key].disconnect(c) self._toolwindows[key].destroy() raise finally: del self._toolwindow_connections[key] del self._toolwindows[key] logger.debug('Dialog should be up and running: ' + gladefile) logger.debug('Presenting dialog ' + gladefile) return self._toolwindows[key].widget.present() def on_toolwindow_destroyed(self, toolwindow: ToolWindow, key): logger.debug('Dialog destroyed: ' + toolwindow.gladefile) assert key in self._toolwindow_connections for c in self._toolwindow_connections[key]: toolwindow.disconnect(c) del self._toolwindow_connections[key] del self._toolwindows[key] logger.debug('Mainwindow keeps no reference for dialog ' + toolwindow.gladefile) def on_quit(self): if self.instrument.is_busy(): if not question_message(self.widget, 'Confirm quit', 'The instrument is busy. Do you still want to quit?'): return True logger.info('Shutdown requested.') self.instrument.save_state() self.instrument.shutdown() return True def on_instrument_shutdown(self, instrument): logger.info('Instrument shutdown finished.') for c in self._instrumentconnections: instrument.disconnect(c) self._instrumentconnections = [] logging.root.removeHandler(self._loghandler) self.widget.destroy() Gtk.Application.get_default().quit() def on_menu(self, menuitem: Gtk.MenuItem): name = menuitem.get_name() if not (name.startswith('menuitem') or name.startswith('toolitem')): raise ValueError('Invalid menu item name: {}'.format(name)) name = name.split('_', 1)[1] if name == 'quit': return self.on_quit() elif name == 'savesettings': self.instrument.save_state() elif name == 'about': builder = Gtk.Builder.new_from_file( pkg_resources.resource_filename('cct', 'resource/glade/help_about.glade')) ad = builder.get_object('aboutdialog') ad.set_version(pkg_resources.get_distribution('cct').version) ad.set_logo(GdkPixbuf.Pixbuf.new_from_file_at_size( pkg_resources.resource_filename('cct', 'resource/icons/scalable/cctlogo.svg'), 256, 256)) ad.run() ad.destroy() del ad else: for nm, cls, toplevelname, gladefile, connections in self.toolwindow_registry: if nm != name: continue self.construct_and_run_dialog(cls, toplevelname, gladefile, menuitem.get_label().replace('_', ''), connections) return False raise ValueError(name) def on_insert_command(self, commandhelpdialog: CommandHelpDialog, command: str): self.builder.get_object('command_entry').set_text(command) def on_toolbar(self, toolbutton): return self.on_menu(toolbutton) def set_menu_sensitivity(self): for nm, cls, toplevelname, gladefile, connections in self.toolwindow_registry: requirementsmet = cls.requirements_met(self.instrument) for what in ['menuitem', 'toolitem']: try: self.builder.get_object(what + '_' + nm).set_sensitive(requirementsmet) except AttributeError: pass
# -*- coding: utf-8 -*- # Copyright 2019 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Artifacts service. This service houses the high level business logic for all created artifacts. """ from __future__ import print_function import collections import fnmatch import glob import json import os import shutil from chromite.lib import autotest_util from chromite.lib import constants from chromite.lib import cros_build_lib from chromite.lib import cros_logging as logging from chromite.lib import osutils from chromite.lib import portage_util from chromite.lib import toolchain_util from chromite.lib.paygen import partition_lib from chromite.lib.paygen import paygen_payload_lib from chromite.lib.paygen import paygen_stateful_payload_lib # Archive type constants. ARCHIVE_CONTROL_FILES = 'control' ARCHIVE_PACKAGES = 'packages' ARCHIVE_SERVER_PACKAGES = 'server_packages' ARCHIVE_TEST_SUITES = 'test_suites' CPE_WARNINGS_FILE_TEMPLATE = 'cpe-warnings-chromeos-%s.txt' CPE_RESULT_FILE_TEMPLATE = 'cpe-chromeos-%s.txt' # The individual image archives for ArchiveImages. IMAGE_TARS = { constants.TEST_IMAGE_BIN: constants.TEST_IMAGE_TAR, constants.TEST_GUEST_VM_DIR: constants.TEST_GUEST_VM_TAR, constants.BASE_GUEST_VM_DIR: constants.BASE_GUEST_VM_TAR } TAST_BUNDLE_NAME = 'tast_bundles.tar.bz2' TAST_COMPRESSOR = cros_build_lib.COMP_BZIP2 CpeResult = collections.namedtuple('CpeResult', ['report', 'warnings']) PinnedGuestImage = collections.namedtuple('PinnedGuestImage', ['filename', 'uri']) class Error(Exception): """Base module error.""" class ArchiveBaseDirNotFound(Error): """Raised when the archive base directory does not exist. This error most likely indicates the board was not built. """ class CrosGenerateSysrootError(Error): """Error when running CrosGenerateSysroot.""" class NoFilesError(Error): """When there are no files to archive.""" def BuildFirmwareArchive(chroot, sysroot, output_directory): """Build firmware_from_source.tar.bz2 in chroot's sysroot firmware directory. Args: chroot (chroot_lib.Chroot): The chroot to be used. sysroot (sysroot_lib.Sysroot): The sysroot whose artifacts are being archived. output_directory (str): The path were the completed archives should be put. Returns: str|None - The archive file path if created, None otherwise. """ firmware_root = os.path.join(chroot.path, sysroot.path.lstrip(os.sep), 'firmware') source_list = [os.path.relpath(f, firmware_root) for f in glob.iglob(os.path.join(firmware_root, '*'))] if not source_list: return None archive_file = os.path.join(output_directory, constants.FIRMWARE_ARCHIVE_NAME) cros_build_lib.CreateTarball( archive_file, firmware_root, compression=cros_build_lib.COMP_BZIP2, chroot=chroot.path, inputs=source_list) return archive_file def BundleAutotestFiles(chroot, sysroot, output_directory): """Create the Autotest Hardware Test archives. Args: chroot (chroot_lib.Chroot): The chroot containing the sysroot. sysroot (sysroot_lib.Sysroot): The sysroot whose artifacts are being archived. output_directory (str): The path were the completed archives should be put. Returns: dict - The paths of the files created in |output_directory| by their type. """ assert sysroot.Exists(chroot=chroot) assert output_directory logging.debug('Inside artifacts_service BundleAutotestFiles (%s %s %s)', chroot.path, sysroot.path, output_directory) # archive_basedir is the base directory where the archive commands are run. # We want the folder containing the board's autotest folder. archive_basedir = chroot.full_path(sysroot.path, constants.AUTOTEST_BUILD_PATH) archive_basedir = os.path.dirname(archive_basedir) if not os.path.exists(archive_basedir): raise ArchiveBaseDirNotFound( 'Archive base directory does not exist: %s' % archive_basedir) builder = autotest_util.AutotestTarballBuilder(archive_basedir, output_directory) return { ARCHIVE_CONTROL_FILES: builder.BuildAutotestControlFilesTarball(), ARCHIVE_PACKAGES: builder.BuildAutotestPackagesTarball(), ARCHIVE_SERVER_PACKAGES: builder.BuildAutotestServerPackageTarball(), ARCHIVE_TEST_SUITES: builder.BuildAutotestTestSuitesTarball(), } def BundleEBuildLogsTarball(chroot, sysroot, archive_dir): """Builds a tarball containing ebuild logs. Args: chroot (chroot_lib.Chroot): The chroot to be used. sysroot (sysroot_lib.Sysroot): Sysroot whose images are being fetched. archive_dir: The directory to drop the tarball in. Returns: The file name of the output tarball, None if no package found. """ tarball_paths = [] logs_path = chroot.full_path(sysroot.path, 'tmp/portage') if not os.path.isdir(logs_path): return None if not os.path.exists(os.path.join(logs_path, 'logs')): return None tarball_paths.append('logs') tarball_output = os.path.join(archive_dir, 'ebuild_logs.tar.xz') try: cros_build_lib.CreateTarball( tarball_output, cwd=logs_path, chroot=chroot.path, inputs=tarball_paths) except cros_build_lib.CreateTarballError: logging.warning('Unable to create logs tarball; ignoring until ' 'https://crbug.com/999933 is sorted out.') return None return os.path.basename(tarball_output) def BundleChromeOSConfig(chroot, sysroot, archive_dir): """Outputs the ChromeOS Config payload. Args: chroot (chroot_lib.Chroot): The chroot to be used. sysroot (sysroot_lib.Sysroot): Sysroot whose config is being fetched. archive_dir: The directory to drop the config in. Returns: The file name of the output config, None if no config found. """ config_path = chroot.full_path(sysroot.path, 'usr/share/chromeos-config/yaml/config.yaml') if not os.path.exists(config_path): return None config_output = os.path.join(archive_dir, 'config.yaml') shutil.copy(config_path, config_output) return os.path.basename(config_output) def BundleSimpleChromeArtifacts(chroot, sysroot, build_target, output_dir): """Gather all of the simple chrome artifacts. Args: chroot (chroot_lib.Chroot): The chroot to be used. sysroot (sysroot_lib.Sysroot): The sysroot. build_target (build_target_lib.BuildTarget): The sysroot's build target. output_dir (str): Where all result files should be stored. """ files = [] files.extend(CreateChromeRoot(chroot, build_target, output_dir)) files.append(ArchiveChromeEbuildEnv(sysroot, output_dir)) return files def BundleVmFiles(chroot, test_results_dir, output_dir): """Gather all of the VM files. Args: chroot (chroot_lib.Chroot): The chroot to be used. test_results_dir (str): Test directory relative to chroot. output_dir (str): Where all result files should be stored. """ image_dir = chroot.full_path(test_results_dir) archives = ArchiveFilesFromImageDir(image_dir, output_dir) return archives # TODO(mmortensen): Refactor ArchiveFilesFromImageDir to be part of a library # module. I tried moving it to lib/vm.py but this causes a circular dependency. def ArchiveFilesFromImageDir(images_dir, archive_path): """Archives the files into tarballs if they match a prefix from prefix_list. Create and return a list of tarballs from the images_dir of files that match VM disk and memory prefixes. Args: images_dir (str): The directory containing the images to archive. archive_path (str): The directory where the archives should be created. Returns: list[str] - The paths to the tarballs. """ images = [] for prefix in [constants.VM_DISK_PREFIX, constants.VM_MEM_PREFIX]: for path, _, filenames in os.walk(images_dir): images.extend([ os.path.join(path, filename) for filename in fnmatch.filter(filenames, prefix + '*') ]) tar_files = [] for image_path in images: image_rel_path = os.path.relpath(image_path, images_dir) image_parent_dir = os.path.dirname(image_path) image_file = os.path.basename(image_path) tarball_path = os.path.join(archive_path, '%s.tar' % image_rel_path.replace('/', '_')) # Note that tar will chdir to |image_parent_dir|, so that |image_file| # is at the top-level of the tar file. cros_build_lib.CreateTarball( tarball_path, image_parent_dir, compression=cros_build_lib.COMP_BZIP2, inputs=[image_file]) tar_files.append(tarball_path) return tar_files def ArchiveChromeEbuildEnv(sysroot, output_dir): """Generate Chrome ebuild environment. Args: sysroot (sysroot_lib.Sysroot): The sysroot where the original environment archive can be found. output_dir (str): Where the result should be stored. Returns: str: The path to the archive. Raises: NoFilesException: When the package cannot be found. """ pkg_dir = os.path.join(sysroot.path, portage_util.VDB_PATH) files = glob.glob(os.path.join(pkg_dir, constants.CHROME_CP) + '-*') if not files: raise NoFilesError('Failed to find package %s' % constants.CHROME_CP) if len(files) > 1: logging.warning('Expected one package for %s, found %d', constants.CHROME_CP, len(files)) chrome_dir = sorted(files)[-1] env_bzip = os.path.join(chrome_dir, 'environment.bz2') result_path = os.path.join(output_dir, constants.CHROME_ENV_TAR) with osutils.TempDir() as tempdir: # Convert from bzip2 to tar format. bzip2 = cros_build_lib.FindCompressor(cros_build_lib.COMP_BZIP2) tempdir_tar_path = os.path.join(tempdir, constants.CHROME_ENV_FILE) cros_build_lib.run([bzip2, '-d', env_bzip, '-c'], stdout=tempdir_tar_path) cros_build_lib.CreateTarball(result_path, tempdir) return result_path def ArchiveImages(image_dir, output_dir): """Create a .tar.xz archive for each image that has been created. Args: image_dir (str): The directory where the images are located. output_dir (str): The location where the archives should be created. Returns: list[str]: The list of created file names. """ files = os.listdir(image_dir) archives = [] # Filter down to the ones that exist first. images = {img: tar for img, tar in IMAGE_TARS.items() if img in files} for img, tar in images.items(): target = os.path.join(output_dir, tar) cros_build_lib.CreateTarball(target, image_dir, inputs=(img,), print_cmd=False) archives.append(tar) return archives def BundleImageZip(output_dir, image_dir): """Bundle image.zip. Args: output_dir (str): The location outside the chroot where the files should be stored. image_dir (str): The directory containing the image. """ filename = 'image.zip' zipfile = os.path.join(output_dir, filename) cros_build_lib.run(['zip', zipfile, '-r', '.'], cwd=image_dir, capture_output=True) return filename def CreateChromeRoot(chroot, build_target, output_dir): """Create the chrome sysroot. Args: chroot (chroot_lib.Chroot): The chroot in which the sysroot should be built. build_target (build_target_lib.BuildTarget): The build target. output_dir (str): The location outside the chroot where the files should be stored. Returns: list[str]: The list of created files. Raises: CrosGenerateSysrootError: When cros_generate_sysroot does not complete successfully. """ chroot_args = chroot.get_enter_args() extra_env = {'USE': 'chrome_internal'} with chroot.tempdir() as tempdir: in_chroot_path = os.path.relpath(tempdir, chroot.path) cmd = ['cros_generate_sysroot', '--out-dir', in_chroot_path, '--board', build_target.name, '--deps-only', '--package', constants.CHROME_CP] try: cros_build_lib.run(cmd, enter_chroot=True, extra_env=extra_env, chroot_args=chroot_args) except cros_build_lib.RunCommandError as e: raise CrosGenerateSysrootError( 'Error encountered when running cros_generate_sysroot: %s' % e, e) files = [] for path in osutils.DirectoryIterator(tempdir): if os.path.isfile(path): rel_path = os.path.relpath(path, tempdir) files.append(os.path.join(output_dir, rel_path)) osutils.CopyDirContents(tempdir, output_dir, allow_nonempty=True) return files def BundleTestUpdatePayloads(image_path, output_dir): """Generate the test update payloads. Args: image_path (str): The full path to an image file. output_dir (str): The path where the payloads should be generated. Returns: list[str] - The list of generated payloads. """ payloads = GenerateTestPayloads(image_path, output_dir, full=True, stateful=True, delta=True) payloads.extend(GenerateQuickProvisionPayloads(image_path, output_dir)) return payloads def GenerateTestPayloads(target_image_path, archive_dir, full=False, delta=False, stateful=False): """Generates the payloads for hw testing. Args: target_image_path (str): The path to the image to generate payloads to. archive_dir (str): Where to store payloads we generated. full (bool): Generate full payloads. delta (bool): Generate delta payloads. stateful (bool): Generate stateful payload. Returns: list[str] - The list of payloads that were generated. """ real_target = os.path.realpath(target_image_path) # The path to the target should look something like this: # .../link/R37-5952.0.2014_06_12_2302-a1/chromiumos_test_image.bin board, os_version = real_target.split('/')[-3:-1] prefix = 'chromeos' suffix = 'dev.bin' generated = [] if full: # Names for full payloads look something like this: # chromeos_R37-5952.0.2014_06_12_2302-a1_link_full_dev.bin name = '_'.join([prefix, os_version, board, 'full', suffix]) payload_path = os.path.join(archive_dir, name) paygen_payload_lib.GenerateUpdatePayload(target_image_path, payload_path) generated.append(payload_path) if delta: # Names for delta payloads look something like this: # chromeos_R37-5952.0.2014_06_12_2302-a1_R37- # 5952.0.2014_06_12_2302-a1_link_delta_dev.bin name = '_'.join([prefix, os_version, os_version, board, 'delta', suffix]) payload_path = os.path.join(archive_dir, name) paygen_payload_lib.GenerateUpdatePayload( target_image_path, payload_path, src_image=target_image_path) generated.append(payload_path) if stateful: generated.append( paygen_stateful_payload_lib.GenerateStatefulPayload(target_image_path, archive_dir)) return generated def GenerateQuickProvisionPayloads(target_image_path, archive_dir): """Generates payloads needed for quick_provision script. Args: target_image_path (str): The path to the image to extract the partitions. archive_dir (str): Where to store partitions when generated. Returns: list[str]: The artifacts that were produced. """ payloads = [] with osutils.TempDir() as temp_dir: # These partitions are mainly used by quick_provision. kernel_part = 'kernel.bin' rootfs_part = 'rootfs.bin' partition_lib.ExtractKernel( target_image_path, os.path.join(temp_dir, kernel_part)) partition_lib.ExtractRoot(target_image_path, os.path.join(temp_dir, rootfs_part), truncate=False) for partition, payload in { kernel_part: constants.QUICK_PROVISION_PAYLOAD_KERNEL, rootfs_part: constants.QUICK_PROVISION_PAYLOAD_ROOTFS}.items(): source = os.path.join(temp_dir, partition) dest = os.path.join(archive_dir, payload) cros_build_lib.CompressFile(source, dest) payloads.append(dest) return payloads def BundleAFDOGenerationArtifacts(is_orderfile, chroot, chrome_root, build_target, output_dir): """Generate artifacts for toolchain-related AFDO artifacts. Args: is_orderfile (boolean): The generation is for orderfile (True) or for AFDO (False). chroot (chroot_lib.Chroot): The chroot in which the sysroot should be built. chrome_root (str): Path to Chrome root. build_target (build_target_lib.BuildTarget): The build target. output_dir (str): The location outside the chroot where the files should be stored. Returns: list[str]: The list of tarballs of artifacts. """ chroot_args = chroot.get_enter_args() with chroot.tempdir() as tempdir: if is_orderfile: generate_orderfile = toolchain_util.GenerateChromeOrderfile( board=build_target.name, output_dir=tempdir, chrome_root=chrome_root, chroot_path=chroot.path, chroot_args=chroot_args) generate_orderfile.Perform() else: generate_afdo = toolchain_util.GenerateBenchmarkAFDOProfile( board=build_target.name, output_dir=tempdir, chroot_path=chroot.path, chroot_args=chroot_args) generate_afdo.Perform() files = [] for path in osutils.DirectoryIterator(tempdir): if os.path.isfile(path): rel_path = os.path.relpath(path, tempdir) files.append(os.path.join(output_dir, rel_path)) osutils.CopyDirContents(tempdir, output_dir, allow_nonempty=True) return files def BundleTastFiles(chroot, sysroot, output_dir): """Tar up the Tast private test bundles. Args: chroot (chroot_lib.Chroot): Chroot containing the sysroot. sysroot (sysroot_lib.Sysroot): Sysroot whose files are being archived. output_dir: Location for storing the result tarball. Returns: Path of the generated tarball, or None if there is no private test bundles. """ cwd = os.path.join(chroot.path, sysroot.path.lstrip(os.sep), 'build') dirs = [] for d in ('libexec/tast', 'share/tast'): if os.path.exists(os.path.join(cwd, d)): dirs.append(d) if not dirs: return None tarball = os.path.join(output_dir, TAST_BUNDLE_NAME) cros_build_lib.CreateTarball(tarball, cwd, compression=TAST_COMPRESSOR, chroot=chroot.path, inputs=dirs) return tarball def FetchPinnedGuestImages(chroot, sysroot): """Fetch the file names and uris of Guest VM and Container images for testing. Args: chroot (chroot_lib.Chroot): Chroot where the sysroot lives. sysroot (sysroot_lib.Sysroot): Sysroot whose images are being fetched. Returns: list[PinnedGuestImage] - The pinned guest image uris. """ pins_root = os.path.abspath( os.path.join(chroot.path, sysroot.path.lstrip(os.sep), constants.GUEST_IMAGES_PINS_PATH)) pins = [] for pin_file in sorted(glob.iglob(os.path.join(pins_root, '*.json'))): with open(pin_file) as f: pin = json.load(f) filename = pin.get(constants.PIN_KEY_FILENAME) uri = pin.get(constants.PIN_KEY_GSURI) if not filename or not uri: logging.warning("Skipping invalid pin file: '%s'.", pin_file) logging.debug("'%s' data: filename='%s' uri='%s'", pin_file, filename, uri) continue pins.append(PinnedGuestImage(filename=filename, uri=uri)) return pins def GenerateCpeReport(chroot, sysroot, output_dir): """Generate CPE export. Args: chroot (chroot_lib.Chroot): The chroot where the command is being run. sysroot (sysroot_lib.Sysroot): The sysroot whose dependencies are being reported. output_dir (str): The path where the output files should be written. Returns: CpeResult: The CPE result instance with the full paths to the report and warnings files. """ # Call cros_extract_deps to create the report that the export produced. # We'll assume the basename for the board name to match how these were built # out in the old system. # TODO(saklein): Can we remove the board name from the report file names? build_target = os.path.basename(sysroot.path) report_path = os.path.join(output_dir, CPE_RESULT_FILE_TEMPLATE % build_target) # Build the command and its args. cmd = [ 'cros_extract_deps', '--sysroot', sysroot.path, '--format', 'cpe', 'virtual/target-os', '--output-path', report_path ] logging.info('Beginning CPE Export.') result = cros_build_lib.run( cmd, capture_output=True, enter_chroot=True, chroot_args=chroot.get_enter_args()) logging.info('CPE Export Complete.') # Write out the warnings the export produced. warnings_path = os.path.join(output_dir, CPE_WARNINGS_FILE_TEMPLATE % build_target) osutils.WriteFile(warnings_path, result.stderr, mode='wb') return CpeResult(report=report_path, warnings=warnings_path)
# Copyright (c) 2016, Meteotest # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of Meteotest nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ SIGTERM handler. Makes sure that reader/writer synchronization remains in a consistent state after process termination. Note that deadlocks or data corruption may still occur if processes are killed (SIGKILL / kill -9). Special thanks to Giampaolo Rodola for this code: http://code.activestate.com/recipes/577997-handle-exit-context-manager/ """ import contextlib import signal import sys import threading import warnings def _sigterm_handler(signum, frame): sys.exit(0) _sigterm_handler.__enter_ctx__ = False @contextlib.contextmanager def handle_exit(callback=None, append=False): """A context manager which properly handles SIGTERM and SIGINT (KeyboardInterrupt) signals, registering a function which is guaranteed to be called after signals are received. Also, it makes sure to execute previously registered signal handlers as well (if any). >>> app = App() >>> with handle_exit(app.stop): ... app.start() ... >>> If append == False raise RuntimeError if there's already a handler registered for SIGTERM, otherwise both new and old handlers are executed in this order. """ t = threading.current_thread() if t.name != 'MainThread': warnings.warn("!!! h5pySWMR warning: SIGTERM handling does not (yet) " "work in a threaded environment. Locks may not be " "released after process termination.", UserWarning) yield return old_handler = signal.signal(signal.SIGTERM, _sigterm_handler) if old_handler != signal.SIG_DFL and old_handler != _sigterm_handler: if not append: raise RuntimeError("there is already a handler registered for " "SIGTERM: %r" % old_handler) def handler(signum, frame): try: _sigterm_handler(signum, frame) finally: old_handler(signum, frame) signal.signal(signal.SIGTERM, handler) if _sigterm_handler.__enter_ctx__: raise RuntimeError("can't use nested contexts") _sigterm_handler.__enter_ctx__ = True try: yield except KeyboardInterrupt: pass except SystemExit as err: # code != 0 refers to an application error (e.g. explicit # sys.exit('some error') call). # We don't want that to pass silently. # Nevertheless, the 'finally' clause below will always # be executed. if err.code != 0: raise finally: _sigterm_handler.__enter_ctx__ = False if callback is not None: callback() if __name__ == '__main__': # =============================================================== # --- test suite # =============================================================== import unittest import time import os class TestOnExit(unittest.TestCase): def setUp(self): # reset signal handlers signal.signal(signal.SIGTERM, signal.SIG_DFL) self.flag = None def tearDown(self): # make sure we exited the ctx manager self.assertTrue(self.flag is not None) def test_base(self): with handle_exit(): pass self.flag = True def test_callback(self): callback = [] with handle_exit(lambda: callback.append(None)): pass self.flag = True self.assertEqual(callback, [None]) def test_kinterrupt(self): with handle_exit(): raise KeyboardInterrupt self.flag = True def test_sigterm(self): with handle_exit(): os.kill(os.getpid(), signal.SIGTERM) self.flag = True def test_sigterm_complex(self): def handler(): print("handler") with handle_exit(handler): time.sleep(2) os.kill(os.getpid(), signal.SIGTERM) self.flag = True def test_sigint(self): with handle_exit(): os.kill(os.getpid(), signal.SIGINT) self.flag = True def test_sigterm_old(self): # make sure the old handler gets executed queue = [] signal.signal(signal.SIGTERM, lambda s, f: queue.append('old')) with handle_exit(lambda: queue.append('new'), append=True): os.kill(os.getpid(), signal.SIGTERM) self.flag = True self.assertEqual(queue, ['old', 'new']) def test_sigint_old(self): # make sure the old handler gets executed queue = [] signal.signal(signal.SIGINT, lambda s, f: queue.append('old')) with handle_exit(lambda: queue.append('new'), append=True): os.kill(os.getpid(), signal.SIGINT) self.flag = True self.assertEqual(queue, ['old', 'new']) def test_no_append(self): # make sure we can't use the context manager if there's # already a handler registered for SIGTERM signal.signal(signal.SIGTERM, lambda s, f: sys.exit(0)) try: with handle_exit(lambda: self.flag.append(None)): pass except RuntimeError: pass else: self.fail("exception not raised") finally: self.flag = True def test_nested_context(self): self.flag = True try: with handle_exit(): with handle_exit(): pass except RuntimeError: pass else: self.fail("exception not raised") unittest.main()
#!/usr/bin/env python import sys sys.path.append('/home/leon/projects/notebookWebapp/') from django.core import management; import notebook; import notebook.settings as settings;management.setup_environ(settings) from BeautifulSoup import BeautifulSoup, BeautifulStoneSoup from notebook.bookmarks.models import Bookmark from notebook.notes.views import getT, getW, getNote from datetime import datetime def import_only_a(username, bookmark_file, default_vote=0, common_tag=None, common_ws=None): """This only get urls, and ignore all the other infos such as folders, desc.""" urls = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified')) for tag in BeautifulSoup(bookmark_file).findAll('a')] count_urls_in_file = len(urls) print count_urls_in_file, ' urls found in the bookmark file.' duplicate = [] count_note_created = 0 for url in urls: n, created = build_one_bookmark(username, url, default_vote) if not created: duplicate.append((url[0], url[1])) else: count_note_created+=1 print count_note_created, ' bookmarks created' print len(duplicate), ' duplicated bookmarks.' #print 'duplicate is:',duplicate duplicate.sort() return count_urls_in_file, count_note_created, duplicate def import_delicious_with_no_desc(username, bookmark_file, default_vote=0, common_tag=None, common_ws=None): """This import from bookmark file exported from delicious, which has tags attr inside a tag.""" urls = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified'), tag.get('tags')) for tag in BeautifulSoup(bookmark_file).findAll('a')] count_urls_in_file = len(urls) print count_urls_in_file, ' urls found in the bookmark file.' duplicate = [] count_note_created = 0 count_tag_created = 0 for url in urls: n, created = build_one_bookmark(username, url, default_vote) num_of_tags_created = n.add_tags(url[4]+','+common_tag) count_tag_created = count_tag_created + num_of_tags_created W = getW(username) w, created = W.objects.get_or_create(name=common_ws) w.add_tags(url[4]+','+common_tag) w.save() if not created: duplicate.append((url[0], url[1])) else: count_note_created+=1 print count_note_created, ' bookmarks created' print len(duplicate), ' duplicated bookmarks.' #print 'duplicate is:',duplicate duplicate.sort() return count_urls_in_file, count_note_created, duplicate, count_tag_created def import_delicious(username, bookmark_file, default_vote=0, common_tag=None, common_ws=None): """This import from bookmark file exported from delicious, which has tags attr inside a tag.""" urls = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified'), tag.get('tags')) for tag in BeautifulSoup(bookmark_file).findAll('a')] count_urls_in_file = len(urls) print count_urls_in_file, ' urls found in the bookmark file.' duplicate = [] count_note_created = 0 count_tag_created = 0 bookmark_file.seek(0) n = None for line in bookmark_file: if line.find('<DT><A') != -1: url = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified'), tag.get('tags')) for tag in BeautifulSoup(line).findAll('a')][0] n, created = build_one_bookmark(username, url, default_vote) if not created: duplicate.append((url[0], url[1])) else: count_note_created+=1 num_of_tags_created = n.add_tags(url[4]+','+common_tag) count_tag_created = count_tag_created + num_of_tags_created W = getW(username) w, created = W.objects.get_or_create(name=common_ws) w.add_tags(url[4]+','+common_tag) w.save() if line.find('<DD>') != -1: if n: desc = line.strip('<DD>').strip('</DD>') n.desc = desc n.save() print 'n.desc:', n.desc print count_note_created, ' bookmarks created' print len(duplicate), ' duplicated bookmarks.' #print 'duplicate is:',duplicate duplicate.sort() return count_urls_in_file, count_note_created, duplicate, count_tag_created def build_one_bookmark(username, url, default_vote=0): N = getNote(username, 'bookmarkbook') n, created = N.objects.get_or_create(url = url[0]) if created: if url[1]==None: n.title = "" else: n.title = url[1] if url[2]: a = url[2] #Google bookmark file uses 16 digits for the datetime. We only need 10 digits. if len(a)==16: a = a[:-6] #TODO: catch possible exceptions n.init_date = datetime.fromtimestamp(int(a)) else: n.init_date = datetime.now() if url[3]: a = url[3] if len(a)==16: a = a[:-6] n.last_modi_date = datetime.fromtimestamp(int(a)) else: n.last_modi_date = datetime.now() n.vote = default_vote n.save() print 'A bookmark is saved:', n return n, created #This method only works with exported google bookmark files correctly, since google bookmark has no folders, and google bookmark #file puts tags in where browsers put folders. This function can be deleted, use import_with_tags2 instead, which works with google bookmark file as well. from django.utils.encoding import smart_str, smart_unicode def import_with_tags(username, bookmark_file, default_vote=0, common_tag=None, common_ws=None): """This not only gets all the urls, but also turns the folders in the file into tags""" T = getT(username) W = getW(username) urls = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified')) for tag in BeautifulSoup(bookmark_file).findAll('a')] count_urls_in_file = len(urls) print count_urls_in_file, ' urls found in the bookmark file.' bookmark_file.seek(0) folders = [tag.string for tag in BeautifulSoup(bookmark_file).findAll('h3')] print 'folders:', folders print len(folders), ' folders found in the bookmark file.' count_tag_created = 0 w = W.objects.get(name="bookmarks") #make each of them into a tag for folder in folders: print 'type(folder):', type(folder) print 'folder:', folder #some bug with BeautifulSoup's custom unicode. So upcast back to unicode itself. See http://code.djangoproject.com/ticket/11932 folderstr = unicode(folder) print 'type(folderstr):', type(folderstr) print 'folderstr:', folderstr if folderstr not in [u'Unsorted Bookmarks', u'[Folder Name]', u'Bookmarks Toolbar']: t, created = T.objects.get_or_create(name = folderstr) print 'tag ', t, ' created ', created print 't.name:', t.name if created: #print 'tag:', t, ' is created.' count_tag_created += 1 w.tags.add(t) w.save() print count_tag_created, 'tags are created.' count_note_created = 0 duplicate = [] #move the ponter back to the beginning of the file bookmark_file.seek(0) t = None for line in bookmark_file: if line.find('<DT><A') != -1: url = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified')) for tag in BeautifulSoup(line).findAll('a')][0] n, created = build_one_bookmark(username, url, default_vote) if not created: duplicate.append((url[0], url[1])) else: count_note_created +=1 if t: n.tags.add(t) n.save() elif line.find('<DT><H3') != -1: tname = [tag.string for tag in BeautifulSoup(line).findAll('h3')][0] if unicode(tname) not in [u'Unsorted Bookmarks', u'[Folder Name]', u'Bookmarks Toolbar']: print 'unicode(tname) is:', unicode(tname) t = T.objects.get(name__exact=unicode(tname)) else: continue print count_note_created, ' bookmarks created' print len(duplicate), ' duplicated bookmarks.' #print 'duplicate is:', duplicate duplicate.sort() return count_urls_in_file, count_note_created, duplicate, count_tag_created from django.utils.encoding import smart_str, smart_unicode def import_with_tags2(username, bookmark_file, default_vote=0, common_tag=None, common_ws=None): """This not only gets all the urls, but also turns the folders in the file into tags""" T = getT(username) W = getW(username) urls = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified')) for tag in BeautifulSoup(bookmark_file).findAll('a')] count_urls_in_file = len(urls) #print count_urls_in_file, ' urls found in the bookmark file.' count_tag_created = 0 w = W.objects.get(name="bookmarkbook") count_note_created = 0 duplicate = [] #move the pointer back to the beginning of the file bookmark_file.seek(0) b = None folder_list = [] for line in bookmark_file: if line.find('<DT><H3') != -1: tname = [tag.string for tag in BeautifulSoup(line).findAll('h3')][0] folder_list.append({tname:[]}) #print 'one folder with folder name ',tname,' pushed to stack.' if line.find('</DL><P>') != -1 or line.find('</DL><p>') != -1:#FF and Chrome use <p> while Opera uses <P> #there is one extra '</DL><P>' at the end of the file for <H1>Bookmarks</H1>. So when it comes to the #it, just skip if len(folder_list) == 0: continue folder_of_urls = folder_list.pop() #print 'one folder ',folder_of_urls,' popped out of stack.' folder = folder_of_urls.keys()[0] urls = folder_of_urls.get(folder) folderstr = unicode(folder) if folderstr not in [u'Unsorted Bookmarks', u'[Folder Name]', u'Bookmarks Toolbar']: t, created = T.objects.get_or_create(name = folderstr) if created: count_tag_created += 1 w.tags.add(t) w.save() for url in urls: #print 'url in the popped out stack is: ', url url.tags.add(t) num_of_tags_created = url.add_tags(common_tag, 'bookmarkbook') count_tag_created = count_tag_created + num_of_tags_created url.save() if line.find('<DT><A') != -1: u = [(tag['href'], tag.string, tag.get('add_date'), tag.get('last_modified')) for tag in BeautifulSoup(line).findAll('a')][0] b, created = build_one_bookmark(username, u, default_vote) if not created: duplicate.append((u[0], u[1])) else: count_note_created +=1 #for url that is at the top, simply create the bookmark without adding it to any tag if len(folder_list) == 0: pass else: for i in range(len(folder_list)):#add this url to every folder on the stack f_of_bs = folder_list[i] f = f_of_bs.keys()[0] bs = f_of_bs.get(f) bs.append(b) f_of_bs.update({f:bs}) folder_list[i] = f_of_bs #print 'one url ', b, 'is added to a folder on stack ', f if line.find('<DD>') != -1: if b: desc = line.strip('<DD>').strip('</DD>') b.desc = desc b.save() print 'b.desc:', b.desc #print count_note_created, ' bookmarks created' #print len(duplicate), ' duplicated bookmarks.' #print 'duplicate is:', duplicate duplicate.sort() return count_urls_in_file, count_note_created, duplicate, count_tag_created #TODO: more help for the command inputs. Also make ignoring_folder a command line argument if __name__ == "__main__": username = sys.argv[1] bookmark_file = open(sys.argv[2]) if len(sys.argv) > 3: default_vote = int(sys.argv[3]) else: default_vote = 0 import_with_tags2(username, bookmark_file, default_vote) #import_only_a(username, bookmark_file, default_vote)
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid import mox from oslo.config import cfg import webob from nova.api.openstack.compute import plugins from nova.api.openstack.compute.plugins.v3 import servers from nova.compute import api as compute_api from nova.compute import task_states from nova.compute import vm_states from nova import context from nova import db from nova import exception from nova.image import glance from nova.objects import instance as instance_obj from nova.openstack.common import importutils from nova.openstack.common import jsonutils from nova.openstack.common import uuidutils from nova import test from nova.tests.api.openstack import fakes from nova.tests import fake_block_device from nova.tests import fake_instance from nova.tests.image import fake CONF = cfg.CONF CONF.import_opt('password_length', 'nova.utils') FAKE_UUID = fakes.FAKE_UUID INSTANCE_IDS = {FAKE_UUID: 1} def return_server_not_found(*arg, **kwarg): raise exception.InstanceNotFound(instance_id='42') def instance_update_and_get_original(context, instance_uuid, values, update_cells=True, columns_to_join=None, ): inst = fakes.stub_instance(INSTANCE_IDS[instance_uuid], host='fake_host') inst = dict(inst, **values) return (inst, inst) def instance_update(context, instance_uuid, kwargs, update_cells=True): inst = fakes.stub_instance(INSTANCE_IDS[instance_uuid], host='fake_host') return inst class MockSetAdminPassword(object): def __init__(self): self.instance_id = None self.password = None def __call__(self, context, instance, password): self.instance_id = instance['uuid'] self.password = password class ServerActionsControllerTest(test.TestCase): image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' image_href = 'http://localhost/v3/images/%s' % image_uuid def setUp(self): super(ServerActionsControllerTest, self).setUp() CONF.set_override('glance_host', 'localhost') self.stubs.Set(db, 'instance_get_by_uuid', fakes.fake_instance_get(vm_state=vm_states.ACTIVE, host='fake_host')) self.stubs.Set(db, 'instance_update_and_get_original', instance_update_and_get_original) fakes.stub_out_glance(self.stubs) fakes.stub_out_nw_api(self.stubs) fakes.stub_out_compute_api_snapshot(self.stubs) fake.stub_out_image_service(self.stubs) service_class = 'nova.image.glance.GlanceImageService' self.service = importutils.import_object(service_class) self.sent_to_glance = {} fakes.stub_out_glanceclient_create(self.stubs, self.sent_to_glance) self.flags(allow_instance_snapshots=True, enable_instance_password=True) self.uuid = FAKE_UUID self.url = '/servers/%s/action' % self.uuid self._image_href = '155d900f-4e14-4e4c-a73d-069cbf4541e6' ext_info = plugins.LoadedExtensionInfo() self.controller = servers.ServersController(extension_info=ext_info) self.compute_api = self.controller.compute_api self.context = context.RequestContext('fake', 'fake') self.app = fakes.wsgi_app_v3(init_only=('servers',), fake_auth_context=self.context) def _make_request(self, url, body): req = webob.Request.blank('/v3' + url) req.method = 'POST' req.body = jsonutils.dumps(body) req.content_type = 'application/json' return req.get_response(self.app) def _stub_instance_get(self, uuid=None): self.mox.StubOutWithMock(compute_api.API, 'get') if uuid is None: uuid = uuidutils.generate_uuid() instance = fake_instance.fake_db_instance( id=1, uuid=uuid, vm_state=vm_states.ACTIVE, task_state=None) instance = instance_obj.Instance._from_db_object( self.context, instance_obj.Instance(), instance) self.compute_api.get(self.context, uuid, want_objects=True, expected_attrs=['pci_devices']).AndReturn(instance) return instance def _test_locked_instance(self, action, method=None, body_map=None, compute_api_args_map=None): if method is None: method = action if body_map is None: body_map = {} if compute_api_args_map is None: compute_api_args_map = {} instance = self._stub_instance_get() args, kwargs = compute_api_args_map.get(action, ((), {})) getattr(compute_api.API, method)(self.context, instance, *args, **kwargs).AndRaise( exception.InstanceIsLocked(instance_uuid=instance['uuid'])) self.mox.ReplayAll() res = self._make_request('/servers/%s/action' % instance['uuid'], {action: body_map.get(action)}) self.assertEqual(409, res.status_int) # Do these here instead of tearDown because this method is called # more than once for the same test case self.mox.VerifyAll() self.mox.UnsetStubs() def test_actions_with_locked_instance(self): actions = ['resize', 'confirm_resize', 'revert_resize', 'reboot', 'rebuild'] body_map = {'resize': {'flavor_ref': '2'}, 'reboot': {'type': 'HARD'}, 'rebuild': {'image_ref': self.image_uuid, 'admin_password': 'TNc53Dr8s7vw'}} args_map = {'resize': (('2'), {}), 'confirm_resize': ((), {}), 'reboot': (('HARD',), {}), 'rebuild': ((self.image_uuid, 'TNc53Dr8s7vw'), {})} for action in actions: self.mox.StubOutWithMock(compute_api.API, action) self._test_locked_instance(action, method=None, body_map=body_map, compute_api_args_map=args_map) def test_reboot_hard(self): body = dict(reboot=dict(type="HARD")) req = fakes.HTTPRequestV3.blank(self.url) self.controller._action_reboot(req, FAKE_UUID, body) def test_reboot_soft(self): body = dict(reboot=dict(type="SOFT")) req = fakes.HTTPRequestV3.blank(self.url) self.controller._action_reboot(req, FAKE_UUID, body) def test_reboot_incorrect_type(self): body = dict(reboot=dict(type="NOT_A_TYPE")) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_reboot, req, FAKE_UUID, body) def test_reboot_missing_type(self): body = dict(reboot=dict()) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_reboot, req, FAKE_UUID, body) def test_reboot_none(self): body = dict(reboot=dict(type=None)) req = fakes.HTTPRequest.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_reboot, req, FAKE_UUID, body) def test_reboot_not_found(self): self.stubs.Set(db, 'instance_get_by_uuid', return_server_not_found) body = dict(reboot=dict(type="HARD")) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPNotFound, self.controller._action_reboot, req, str(uuid.uuid4()), body) def test_reboot_raises_conflict_on_invalid_state(self): body = dict(reboot=dict(type="HARD")) def fake_reboot(*args, **kwargs): raise exception.InstanceInvalidState(attr='fake_attr', state='fake_state', method='fake_method', instance_uuid='fake') self.stubs.Set(compute_api.API, 'reboot', fake_reboot) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_reboot, req, FAKE_UUID, body) def test_reboot_soft_with_soft_in_progress_raises_conflict(self): body = dict(reboot=dict(type="SOFT")) req = fakes.HTTPRequestV3.blank(self.url) self.stubs.Set(db, 'instance_get_by_uuid', fakes.fake_instance_get(vm_state=vm_states.ACTIVE, task_state=task_states.REBOOTING)) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_reboot, req, FAKE_UUID, body) def test_reboot_hard_with_soft_in_progress_does_not_raise(self): body = dict(reboot=dict(type="HARD")) req = fakes.HTTPRequestV3.blank(self.url) self.stubs.Set(db, 'instance_get_by_uuid', fakes.fake_instance_get(vm_state=vm_states.ACTIVE, task_state=task_states.REBOOTING)) self.controller._action_reboot(req, FAKE_UUID, body) def test_reboot_hard_with_hard_in_progress_raises_conflict(self): body = dict(reboot=dict(type="HARD")) req = fakes.HTTPRequestV3.blank(self.url) self.stubs.Set(db, 'instance_get_by_uuid', fakes.fake_instance_get(vm_state=vm_states.ACTIVE, task_state=task_states.REBOOTING_HARD)) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_reboot, req, FAKE_UUID, body) def test_rebuild_accepted_minimum(self): return_server = fakes.fake_instance_get(image_ref='2', vm_state=vm_states.ACTIVE, host='fake_host') self.stubs.Set(db, 'instance_get_by_uuid', return_server) self_href = 'http://localhost/v3/servers/%s' % FAKE_UUID body = { "rebuild": { "image_ref": self._image_href, }, } req = fakes.HTTPRequestV3.blank(self.url) robj = self.controller._action_rebuild(req, FAKE_UUID, body=body) body = robj.obj self.assertEqual(body['server']['image']['id'], '2') self.assertEqual(len(body['server']['admin_password']), CONF.password_length) self.assertEqual(robj['location'], self_href) def test_rebuild_instance_with_image_uuid(self): info = dict(image_href_in_call=None) def rebuild(self2, context, instance, image_href, *args, **kwargs): info['image_href_in_call'] = image_href self.stubs.Set(db, 'instance_get', fakes.fake_instance_get(vm_state=vm_states.ACTIVE)) self.stubs.Set(compute_api.API, 'rebuild', rebuild) # proper local hrefs must start with 'http://localhost/v3/' body = { 'rebuild': { 'image_ref': self.image_uuid, }, } req = fakes.HTTPRequestV3.blank('/v3/servers/a/action') self.controller._action_rebuild(req, FAKE_UUID, body=body) self.assertEqual(info['image_href_in_call'], self.image_uuid) def test_rebuild_instance_with_image_href_uses_uuid(self): info = dict(image_href_in_call=None) def rebuild(self2, context, instance, image_href, *args, **kwargs): info['image_href_in_call'] = image_href self.stubs.Set(db, 'instance_get', fakes.fake_instance_get(vm_state=vm_states.ACTIVE)) self.stubs.Set(compute_api.API, 'rebuild', rebuild) # proper local hrefs must start with 'http://localhost/v3/' body = { 'rebuild': { 'image_ref': self.image_href, }, } req = fakes.HTTPRequestV3.blank('/v3/servers/a/action') self.controller._action_rebuild(req, FAKE_UUID, body=body) self.assertEqual(info['image_href_in_call'], self.image_uuid) def test_rebuild_accepted_minimum_pass_disabled(self): # run with enable_instance_password disabled to verify admin_password # is missing from response. See lp bug 921814 self.flags(enable_instance_password=False) return_server = fakes.fake_instance_get(image_ref='2', vm_state=vm_states.ACTIVE, host='fake_host') self.stubs.Set(db, 'instance_get_by_uuid', return_server) self_href = 'http://localhost/v3/servers/%s' % FAKE_UUID body = { "rebuild": { "image_ref": self._image_href, }, } req = fakes.HTTPRequestV3.blank(self.url) robj = self.controller._action_rebuild(req, FAKE_UUID, body=body) body = robj.obj self.assertEqual(body['server']['image']['id'], '2') self.assertNotIn("admin_password", body['server']) self.assertEqual(robj['location'], self_href) def test_rebuild_raises_conflict_on_invalid_state(self): body = { "rebuild": { "image_ref": self._image_href, }, } def fake_rebuild(*args, **kwargs): raise exception.InstanceInvalidState(attr='fake_attr', state='fake_state', method='fake_method', instance_uuid='fake') self.stubs.Set(compute_api.API, 'rebuild', fake_rebuild) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_rebuild, req, FAKE_UUID, body=body) def test_rebuild_accepted_with_metadata(self): metadata = {'new': 'metadata'} return_server = fakes.fake_instance_get(metadata=metadata, vm_state=vm_states.ACTIVE, host='fake_host') self.stubs.Set(db, 'instance_get_by_uuid', return_server) body = { "rebuild": { "image_ref": self._image_href, "metadata": metadata, }, } req = fakes.HTTPRequestV3.blank(self.url) body = self.controller._action_rebuild(req, FAKE_UUID, body=body).obj self.assertEqual(body['server']['metadata'], metadata) def test_rebuild_accepted_with_bad_metadata(self): body = { "rebuild": { "image_ref": self._image_href, "metadata": "stack", }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, req, FAKE_UUID, body=body) def test_rebuild_with_too_large_metadata(self): body = { "rebuild": { "image_ref": self._image_href, "metadata": { 256 * "k": "value" } } } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPRequestEntityTooLarge, self.controller._action_rebuild, req, FAKE_UUID, body=body) def test_rebuild_bad_entity(self): body = { "rebuild": { "imageId": self._image_href, }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, req, FAKE_UUID, body=body) def test_rebuild_admin_password(self): return_server = fakes.fake_instance_get(image_ref='2', vm_state=vm_states.ACTIVE, host='fake_host') self.stubs.Set(db, 'instance_get_by_uuid', return_server) body = { "rebuild": { "image_ref": self._image_href, "admin_password": "asdf", }, } req = fakes.HTTPRequestV3.blank(self.url) body = self.controller._action_rebuild(req, FAKE_UUID, body=body).obj self.assertEqual(body['server']['image']['id'], '2') self.assertEqual(body['server']['admin_password'], 'asdf') def test_rebuild_admin_password_pass_disabled(self): # run with enable_instance_password disabled to verify admin_password # is missing from response. See lp bug 921814 self.flags(enable_instance_password=False) return_server = fakes.fake_instance_get(image_ref='2', vm_state=vm_states.ACTIVE, host='fake_host') self.stubs.Set(db, 'instance_get_by_uuid', return_server) body = { "rebuild": { "image_ref": self._image_href, "admin_password": "asdf", }, } req = fakes.HTTPRequestV3.blank(self.url) body = self.controller._action_rebuild(req, FAKE_UUID, body=body).obj self.assertEqual(body['server']['image']['id'], '2') self.assertNotIn('admin_password', body['server']) def test_rebuild_server_not_found(self): def server_not_found(self, instance_id, columns_to_join=None, use_slave=False): raise exception.InstanceNotFound(instance_id=instance_id) self.stubs.Set(db, 'instance_get_by_uuid', server_not_found) body = { "rebuild": { "image_ref": self._image_href, }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPNotFound, self.controller._action_rebuild, req, FAKE_UUID, body=body) def test_rebuild_with_bad_image(self): body = { "rebuild": { "image_ref": "foo", }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, req, FAKE_UUID, body=body) def test_rebuild_when_kernel_not_exists(self): def return_image_meta(*args, **kwargs): image_meta_table = { '2': {'id': 2, 'status': 'active', 'container_format': 'ari'}, '155d900f-4e14-4e4c-a73d-069cbf4541e6': {'id': 3, 'status': 'active', 'container_format': 'raw', 'properties': {'kernel_id': 1, 'ramdisk_id': 2}}, } image_id = args[2] try: image_meta = image_meta_table[str(image_id)] except KeyError: raise exception.ImageNotFound(image_id=image_id) return image_meta self.stubs.Set(fake._FakeImageService, 'show', return_image_meta) body = { "rebuild": { "image_ref": "155d900f-4e14-4e4c-a73d-069cbf4541e6", }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_rebuild, req, FAKE_UUID, body=body) def test_rebuild_proper_kernel_ram(self): instance_meta = {'kernel_id': None, 'ramdisk_id': None} orig_get = compute_api.API.get def wrap_get(*args, **kwargs): inst = orig_get(*args, **kwargs) instance_meta['instance'] = inst return inst def fake_save(context, **kwargs): instance = instance_meta['instance'] for key in instance_meta.keys(): if key in instance.obj_what_changed(): instance_meta[key] = instance[key] def return_image_meta(*args, **kwargs): image_meta_table = { '1': {'id': 1, 'status': 'active', 'container_format': 'aki'}, '2': {'id': 2, 'status': 'active', 'container_format': 'ari'}, '155d900f-4e14-4e4c-a73d-069cbf4541e6': {'id': 3, 'status': 'active', 'container_format': 'raw', 'properties': {'kernel_id': 1, 'ramdisk_id': 2}}, } image_id = args[2] try: image_meta = image_meta_table[str(image_id)] except KeyError: raise exception.ImageNotFound(image_id=image_id) return image_meta self.stubs.Set(fake._FakeImageService, 'show', return_image_meta) self.stubs.Set(compute_api.API, 'get', wrap_get) self.stubs.Set(instance_obj.Instance, 'save', fake_save) body = { "rebuild": { "image_ref": "155d900f-4e14-4e4c-a73d-069cbf4541e6", }, } req = fakes.HTTPRequestV3.blank(self.url) self.controller._action_rebuild(req, FAKE_UUID, body=body).obj self.assertEqual(instance_meta['kernel_id'], '1') self.assertEqual(instance_meta['ramdisk_id'], '2') def _test_rebuild_preserve_ephemeral(self, value=None): return_server = fakes.fake_instance_get(image_ref='2', vm_state=vm_states.ACTIVE, host='fake_host') self.stubs.Set(db, 'instance_get_by_uuid', return_server) body = { "rebuild": { "image_ref": self._image_href, }, } if value is not None: body['rebuild']['preserve_ephemeral'] = value req = fakes.HTTPRequestV3.blank(self.url) context = req.environ['nova.context'] self.mox.StubOutWithMock(compute_api.API, 'rebuild') if value is not None: compute_api.API.rebuild(context, mox.IgnoreArg(), self._image_href, mox.IgnoreArg(), preserve_ephemeral=value) else: compute_api.API.rebuild(context, mox.IgnoreArg(), self._image_href, mox.IgnoreArg()) self.mox.ReplayAll() self.controller._action_rebuild(req, FAKE_UUID, body=body) def test_rebuild_preserve_ephemeral_true(self): self._test_rebuild_preserve_ephemeral(True) def test_rebuild_preserve_ephemeral_false(self): self._test_rebuild_preserve_ephemeral(False) def test_rebuild_preserve_ephemeral_default(self): self._test_rebuild_preserve_ephemeral() def test_resize_server(self): body = dict(resize=dict(flavor_ref="http://localhost/3")) self.resize_called = False def resize_mock(*args): self.resize_called = True self.stubs.Set(compute_api.API, 'resize', resize_mock) req = fakes.HTTPRequestV3.blank(self.url) body = self.controller._action_resize(req, FAKE_UUID, body) self.assertEqual(self.resize_called, True) def test_resize_server_no_flavor(self): body = dict(resize=dict()) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_resize, req, FAKE_UUID, body) def test_resize_server_no_flavor_ref(self): body = dict(resize=dict(flavor_ref=None)) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_resize, req, FAKE_UUID, body) def test_resize_with_server_not_found(self): body = dict(resize=dict(flavor_ref="http://localhost/3")) self.stubs.Set(compute_api.API, 'get', return_server_not_found) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPNotFound, self.controller._action_resize, req, FAKE_UUID, body) def test_resize_with_image_exceptions(self): body = dict(resize=dict(flavor_ref="http://localhost/3")) self.resize_called = 0 image_id = 'fake_image_id' exceptions = [ (exception.ImageNotAuthorized(image_id=image_id), webob.exc.HTTPUnauthorized), (exception.ImageNotFound(image_id=image_id), webob.exc.HTTPBadRequest), (exception.Invalid, webob.exc.HTTPBadRequest), ] raised, expected = map(iter, zip(*exceptions)) def _fake_resize(obj, context, instance, flavor_id): self.resize_called += 1 raise raised.next() self.stubs.Set(compute_api.API, 'resize', _fake_resize) for call_no in range(len(exceptions)): req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(expected.next(), self.controller._action_resize, req, FAKE_UUID, body) self.assertEqual(self.resize_called, call_no + 1) def test_resize_with_too_many_instances(self): body = dict(resize=dict(flavor_ref="http://localhost/3")) def fake_resize(*args, **kwargs): raise exception.TooManyInstances(message="TooManyInstance") self.stubs.Set(compute_api.API, 'resize', fake_resize) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPRequestEntityTooLarge, self.controller._action_resize, req, FAKE_UUID, body) def test_resize_raises_conflict_on_invalid_state(self): body = dict(resize=dict(flavor_ref="http://localhost/3")) def fake_resize(*args, **kwargs): raise exception.InstanceInvalidState(attr='fake_attr', state='fake_state', method='fake_method', instance_uuid='fake') self.stubs.Set(compute_api.API, 'resize', fake_resize) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_resize, req, FAKE_UUID, body) def test_confirm_resize_server(self): body = dict(confirm_resize=None) self.confirm_resize_called = False def cr_mock(*args): self.confirm_resize_called = True self.stubs.Set(compute_api.API, 'confirm_resize', cr_mock) req = fakes.HTTPRequestV3.blank(self.url) body = self.controller._action_confirm_resize(req, FAKE_UUID, body) self.assertEqual(self.confirm_resize_called, True) def test_confirm_resize_migration_not_found(self): body = dict(confirm_resize=None) def confirm_resize_mock(*args): raise exception.MigrationNotFoundByStatus(instance_id=1, status='finished') self.stubs.Set(compute_api.API, 'confirm_resize', confirm_resize_mock) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_confirm_resize, req, FAKE_UUID, body) def test_confirm_resize_raises_conflict_on_invalid_state(self): body = dict(confirm_resize=None) def fake_confirm_resize(*args, **kwargs): raise exception.InstanceInvalidState(attr='fake_attr', state='fake_state', method='fake_method', instance_uuid='fake') self.stubs.Set(compute_api.API, 'confirm_resize', fake_confirm_resize) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_confirm_resize, req, FAKE_UUID, body) def test_revert_resize_migration_not_found(self): body = dict(revertResize=None) def revert_resize_mock(*args): raise exception.MigrationNotFoundByStatus(instance_id=1, status='finished') self.stubs.Set(compute_api.API, 'revert_resize', revert_resize_mock) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_revert_resize, req, FAKE_UUID, body) def test_revert_resize_server_not_found(self): body = dict(revertResize=None) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob. exc.HTTPNotFound, self.controller._action_revert_resize, req, "bad_server_id", body) def test_revert_resize_server(self): body = dict(revertResize=None) self.revert_resize_called = False def revert_mock(*args): self.revert_resize_called = True self.stubs.Set(compute_api.API, 'revert_resize', revert_mock) req = fakes.HTTPRequestV3.blank(self.url) body = self.controller._action_revert_resize(req, FAKE_UUID, body) self.assertEqual(self.revert_resize_called, True) def test_revert_resize_raises_conflict_on_invalid_state(self): body = dict(revertResize=None) def fake_revert_resize(*args, **kwargs): raise exception.InstanceInvalidState(attr='fake_attr', state='fake_state', method='fake_method', instance_uuid='fake') self.stubs.Set(compute_api.API, 'revert_resize', fake_revert_resize) req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_revert_resize, req, FAKE_UUID, body) def test_create_image(self): body = { 'create_image': { 'name': 'Snapshot 1', }, } req = fakes.HTTPRequestV3.blank(self.url) response = self.controller._action_create_image(req, FAKE_UUID, body) location = response.headers['Location'] self.assertEqual(glance.generate_image_url('123'), location) def test_create_image_name_too_long(self): long_name = 'a' * 260 body = { 'create_image': { 'name': long_name, }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_create_image, req, FAKE_UUID, body) def _do_test_create_volume_backed_image(self, extra_properties): def _fake_id(x): return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12) body = dict(create_image=dict(name='snapshot_of_volume_backed')) if extra_properties: body['create_image']['metadata'] = extra_properties image_service = glance.get_default_image_service() bdm = [dict(volume_id=_fake_id('a'), volume_size=1, device_name='vda', delete_on_termination=False)] props = dict(kernel_id=_fake_id('b'), ramdisk_id=_fake_id('c'), root_device_name='/dev/vda', block_device_mapping=bdm) original_image = dict(properties=props, container_format='ami', status='active', is_public=True) image_service.create(None, original_image) def fake_block_device_mapping_get_all_by_instance(context, inst_id, use_slave=False): return [fake_block_device.FakeDbBlockDeviceDict( {'volume_id': _fake_id('a'), 'source_type': 'snapshot', 'destination_type': 'volume', 'volume_size': 1, 'device_name': 'vda', 'snapshot_id': 1, 'boot_index': 0, 'delete_on_termination': False, 'no_device': None})] self.stubs.Set(db, 'block_device_mapping_get_all_by_instance', fake_block_device_mapping_get_all_by_instance) instance = fakes.fake_instance_get(image_ref=original_image['id'], vm_state=vm_states.ACTIVE, root_device_name='/dev/vda') self.stubs.Set(db, 'instance_get_by_uuid', instance) volume = dict(id=_fake_id('a'), size=1, host='fake', display_description='fake') snapshot = dict(id=_fake_id('d')) self.mox.StubOutWithMock(self.controller.compute_api, 'volume_api') volume_api = self.controller.compute_api.volume_api volume_api.get(mox.IgnoreArg(), volume['id']).AndReturn(volume) volume_api.create_snapshot_force(mox.IgnoreArg(), volume['id'], mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(snapshot) self.mox.ReplayAll() req = fakes.HTTPRequestV3.blank(self.url) response = self.controller._action_create_image(req, FAKE_UUID, body) location = response.headers['Location'] image_id = location.replace(glance.generate_image_url(''), '') image = image_service.show(None, image_id) self.assertEqual(image['name'], 'snapshot_of_volume_backed') properties = image['properties'] self.assertEqual(properties['kernel_id'], _fake_id('b')) self.assertEqual(properties['ramdisk_id'], _fake_id('c')) self.assertEqual(properties['root_device_name'], '/dev/vda') self.assertEqual(properties['bdm_v2'], True) bdms = properties['block_device_mapping'] self.assertEqual(len(bdms), 1) self.assertEqual(bdms[0]['boot_index'], 0) self.assertEqual(bdms[0]['source_type'], 'snapshot') self.assertEqual(bdms[0]['destination_type'], 'volume') self.assertEqual(bdms[0]['snapshot_id'], snapshot['id']) for fld in ('connection_info', 'id', 'instance_uuid', 'device_name'): self.assertTrue(fld not in bdms[0]) for k in extra_properties.keys(): self.assertEqual(properties[k], extra_properties[k]) def test_create_volume_backed_image_no_metadata(self): self._do_test_create_volume_backed_image({}) def test_create_volume_backed_image_with_metadata(self): self._do_test_create_volume_backed_image(dict(ImageType='Gold', ImageVersion='2.0')) def test_create_volume_backed_image_with_metadata_from_volume(self): def _fake_id(x): return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12) body = dict(create_image=dict(name='snapshot_of_volume_backed')) image_service = glance.get_default_image_service() def fake_block_device_mapping_get_all_by_instance(context, inst_id, use_slave=False): return [fake_block_device.FakeDbBlockDeviceDict( {'volume_id': _fake_id('a'), 'source_type': 'snapshot', 'destination_type': 'volume', 'volume_size': 1, 'device_name': 'vda', 'snapshot_id': 1, 'boot_index': 0, 'delete_on_termination': False, 'no_device': None})] self.stubs.Set(db, 'block_device_mapping_get_all_by_instance', fake_block_device_mapping_get_all_by_instance) instance = fakes.fake_instance_get(image_ref='', vm_state=vm_states.ACTIVE, root_device_name='/dev/vda') self.stubs.Set(db, 'instance_get_by_uuid', instance) fake_metadata = {'test_key1': 'test_value1', 'test_key2': 'test_value2'} volume = dict(id=_fake_id('a'), size=1, host='fake', display_description='fake', volume_image_metadata=fake_metadata) snapshot = dict(id=_fake_id('d')) self.mox.StubOutWithMock(self.controller.compute_api, 'volume_api') volume_api = self.controller.compute_api.volume_api volume_api.get(mox.IgnoreArg(), volume['id']).AndReturn(volume) volume_api.get(mox.IgnoreArg(), volume['id']).AndReturn(volume) volume_api.create_snapshot_force(mox.IgnoreArg(), volume['id'], mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(snapshot) req = fakes.HTTPRequestV3.blank(self.url) self.mox.ReplayAll() response = self.controller._action_create_image(req, FAKE_UUID, body) location = response.headers['Location'] image_id = location.replace('http://localhost:9292/images/', '') image = image_service.show(None, image_id) properties = image['properties'] self.assertEqual(properties['test_key1'], 'test_value1') self.assertEqual(properties['test_key2'], 'test_value2') def test_create_image_snapshots_disabled(self): """Don't permit a snapshot if the allow_instance_snapshots flag is False """ self.flags(allow_instance_snapshots=False) body = { 'create_image': { 'name': 'Snapshot 1', }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_create_image, req, FAKE_UUID, body) def test_create_image_with_metadata(self): body = { 'create_image': { 'name': 'Snapshot 1', 'metadata': {'key': 'asdf'}, }, } req = fakes.HTTPRequestV3.blank(self.url) response = self.controller._action_create_image(req, FAKE_UUID, body) location = response.headers['Location'] self.assertEqual(glance.generate_image_url('123'), location) def test_create_image_with_too_much_metadata(self): body = { 'create_image': { 'name': 'Snapshot 1', 'metadata': {}, }, } for num in range(CONF.quota_metadata_items + 1): body['create_image']['metadata']['foo%i' % num] = "bar" req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPRequestEntityTooLarge, self.controller._action_create_image, req, FAKE_UUID, body) def test_create_image_no_name(self): body = { 'create_image': {}, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_create_image, req, FAKE_UUID, body) def test_create_image_blank_name(self): body = { 'create_image': { 'name': '', } } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_create_image, req, FAKE_UUID, body) def test_create_image_bad_metadata(self): body = { 'create_image': { 'name': 'geoff', 'metadata': 'henry', }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._action_create_image, req, FAKE_UUID, body) def test_create_image_raises_conflict_on_invalid_state(self): def snapshot(*args, **kwargs): raise exception.InstanceInvalidState(attr='fake_attr', state='fake_state', method='fake_method', instance_uuid='fake') self.stubs.Set(compute_api.API, 'snapshot', snapshot) body = { "create_image": { "name": "test_snapshot", }, } req = fakes.HTTPRequestV3.blank(self.url) self.assertRaises(webob.exc.HTTPConflict, self.controller._action_create_image, req, FAKE_UUID, body)
# Copyright 2010 OpenStack Foundation # Copyright 2011 Piston Cloud Computing, Inc # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import os import re from oslo.config import cfg from oslo import messaging from oslo.utils import strutils from oslo.utils import timeutils import six import webob from webob import exc from nova.api.openstack import common from nova.api.openstack.compute import ips from nova.api.openstack.compute.views import servers as views_servers from nova.api.openstack import wsgi from nova.api.openstack import xmlutil from nova import block_device from nova import compute from nova.compute import flavors from nova import exception from nova.i18n import _ from nova.i18n import _LW from nova import objects from nova.openstack.common import log as logging from nova.openstack.common import uuidutils from nova import policy from nova import utils server_opts = [ cfg.BoolOpt('enable_instance_password', default=True, help='Enables returning of the instance password by the' ' relevant server API calls such as create, rebuild' ' or rescue, If the hypervisor does not support' ' password injection then the password returned will' ' not be correct'), ] CONF = cfg.CONF CONF.register_opts(server_opts) CONF.import_opt('network_api_class', 'nova.network') CONF.import_opt('reclaim_instance_interval', 'nova.compute.manager') LOG = logging.getLogger(__name__) XML_WARNING = False def make_fault(elem): fault = xmlutil.SubTemplateElement(elem, 'fault', selector='fault') fault.set('code') fault.set('created') msg = xmlutil.SubTemplateElement(fault, 'message') msg.text = 'message' det = xmlutil.SubTemplateElement(fault, 'details') det.text = 'details' def make_server(elem, detailed=False): elem.set('name') elem.set('id') global XML_WARNING if not XML_WARNING: LOG.warn(_LW('XML support has been deprecated and may be removed ' 'as early as the Juno release.')) XML_WARNING = True if detailed: elem.set('userId', 'user_id') elem.set('tenantId', 'tenant_id') elem.set('updated') elem.set('created') elem.set('hostId') elem.set('accessIPv4') elem.set('accessIPv6') elem.set('status') elem.set('progress') elem.set('reservation_id') # Attach image node image = xmlutil.SubTemplateElement(elem, 'image', selector='image') image.set('id') xmlutil.make_links(image, 'links') # Attach flavor node flavor = xmlutil.SubTemplateElement(elem, 'flavor', selector='flavor') flavor.set('id') xmlutil.make_links(flavor, 'links') # Attach fault node make_fault(elem) # Attach metadata node elem.append(common.MetadataTemplate()) # Attach addresses node elem.append(ips.AddressesTemplate()) xmlutil.make_links(elem, 'links') server_nsmap = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM} class ServerTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('server', selector='server') make_server(root, detailed=True) return xmlutil.MasterTemplate(root, 1, nsmap=server_nsmap) class MinimalServersTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('servers') elem = xmlutil.SubTemplateElement(root, 'server', selector='servers') make_server(elem) xmlutil.make_links(root, 'servers_links') return xmlutil.MasterTemplate(root, 1, nsmap=server_nsmap) class ServersTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('servers') elem = xmlutil.SubTemplateElement(root, 'server', selector='servers') make_server(elem, detailed=True) return xmlutil.MasterTemplate(root, 1, nsmap=server_nsmap) class ServerAdminPassTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('server') root.set('adminPass') return xmlutil.SlaveTemplate(root, 1, nsmap=server_nsmap) class ServerMultipleCreateTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('server') root.set('reservation_id') return xmlutil.MasterTemplate(root, 1, nsmap=server_nsmap) def FullServerTemplate(): master = ServerTemplate() master.attach(ServerAdminPassTemplate()) return master class CommonDeserializer(wsgi.MetadataXMLDeserializer): """Common deserializer to handle xml-formatted server create requests. Handles standard server attributes as well as optional metadata and personality attributes """ metadata_deserializer = common.MetadataXMLDeserializer() def _extract_personality(self, server_node): """Marshal the personality attribute of a parsed request.""" node = self.find_first_child_named(server_node, "personality") if node is not None: personality = [] for file_node in self.find_children_named(node, "file"): item = {} if file_node.hasAttribute("path"): item["path"] = file_node.getAttribute("path") item["contents"] = self.extract_text(file_node) personality.append(item) return personality else: return None def _extract_server(self, node): """Marshal the server attribute of a parsed request.""" server = {} server_node = self.find_first_child_named(node, 'server') attributes = ["name", "imageRef", "flavorRef", "adminPass", "accessIPv4", "accessIPv6", "key_name", "availability_zone", "min_count", "max_count"] for attr in attributes: if server_node.getAttribute(attr): server[attr] = server_node.getAttribute(attr) res_id = server_node.getAttribute('return_reservation_id') if res_id: server['return_reservation_id'] = \ strutils.bool_from_string(res_id) scheduler_hints = self._extract_scheduler_hints(server_node) if scheduler_hints: server['OS-SCH-HNT:scheduler_hints'] = scheduler_hints metadata_node = self.find_first_child_named(server_node, "metadata") if metadata_node is not None: server["metadata"] = self.extract_metadata(metadata_node) user_data_node = self.find_first_child_named(server_node, "user_data") if user_data_node is not None: server["user_data"] = self.extract_text(user_data_node) personality = self._extract_personality(server_node) if personality is not None: server["personality"] = personality networks = self._extract_networks(server_node) if networks is not None: server["networks"] = networks security_groups = self._extract_security_groups(server_node) if security_groups is not None: server["security_groups"] = security_groups # NOTE(vish): this is not namespaced in json, so leave it without a # namespace for now block_device_mapping = self._extract_block_device_mapping(server_node) if block_device_mapping is not None: server["block_device_mapping"] = block_device_mapping block_device_mapping_v2 = self._extract_block_device_mapping_v2( server_node) if block_device_mapping_v2 is not None: server["block_device_mapping_v2"] = block_device_mapping_v2 # NOTE(vish): Support this incorrect version because it was in the code # base for a while and we don't want to accidentally break # anyone that might be using it. auto_disk_config = server_node.getAttribute('auto_disk_config') if auto_disk_config: server['OS-DCF:diskConfig'] = auto_disk_config auto_disk_config = server_node.getAttribute('OS-DCF:diskConfig') if auto_disk_config: server['OS-DCF:diskConfig'] = auto_disk_config config_drive = server_node.getAttribute('config_drive') if config_drive: server['config_drive'] = config_drive return server def _extract_block_device_mapping(self, server_node): """Marshal the block_device_mapping node of a parsed request.""" node = self.find_first_child_named(server_node, "block_device_mapping") if node: block_device_mapping = [] for child in self.extract_elements(node): if child.nodeName != "mapping": continue mapping = {} attributes = ["volume_id", "snapshot_id", "device_name", "virtual_name", "volume_size"] for attr in attributes: value = child.getAttribute(attr) if value: mapping[attr] = value attributes = ["delete_on_termination", "no_device"] for attr in attributes: value = child.getAttribute(attr) if value: mapping[attr] = strutils.bool_from_string(value) block_device_mapping.append(mapping) return block_device_mapping else: return None def _extract_block_device_mapping_v2(self, server_node): """Marshal the new block_device_mappings.""" node = self.find_first_child_named(server_node, "block_device_mapping_v2") if node: block_device_mapping = [] for child in self.extract_elements(node): if child.nodeName != "mapping": continue block_device_mapping.append( dict((attr, child.getAttribute(attr)) for attr in block_device.bdm_new_api_fields if child.getAttribute(attr))) return block_device_mapping def _extract_scheduler_hints(self, server_node): """Marshal the scheduler hints attribute of a parsed request.""" node = self.find_first_child_named_in_namespace(server_node, "http://docs.openstack.org/compute/ext/scheduler-hints/api/v2", "scheduler_hints") if node: scheduler_hints = {} for child in self.extract_elements(node): scheduler_hints.setdefault(child.nodeName, []) value = self.extract_text(child).strip() scheduler_hints[child.nodeName].append(value) return scheduler_hints else: return None def _extract_networks(self, server_node): """Marshal the networks attribute of a parsed request.""" node = self.find_first_child_named(server_node, "networks") if node is not None: networks = [] for network_node in self.find_children_named(node, "network"): item = {} if network_node.hasAttribute("uuid"): item["uuid"] = network_node.getAttribute("uuid") if network_node.hasAttribute("fixed_ip"): item["fixed_ip"] = network_node.getAttribute("fixed_ip") if network_node.hasAttribute("port"): item["port"] = network_node.getAttribute("port") networks.append(item) return networks else: return None def _extract_security_groups(self, server_node): """Marshal the security_groups attribute of a parsed request.""" node = self.find_first_child_named(server_node, "security_groups") if node is not None: security_groups = [] for sg_node in self.find_children_named(node, "security_group"): item = {} name = self.find_attribute_or_element(sg_node, 'name') if name: item["name"] = name security_groups.append(item) return security_groups else: return None class ActionDeserializer(CommonDeserializer): """Deserializer to handle xml-formatted server action requests. Handles standard server attributes as well as optional metadata and personality attributes """ def default(self, string): dom = xmlutil.safe_minidom_parse_string(string) action_node = dom.childNodes[0] action_name = action_node.tagName action_deserializer = { 'createImage': self._action_create_image, 'changePassword': self._action_change_password, 'reboot': self._action_reboot, 'rebuild': self._action_rebuild, 'resize': self._action_resize, 'confirmResize': self._action_confirm_resize, 'revertResize': self._action_revert_resize, }.get(action_name, super(ActionDeserializer, self).default) action_data = action_deserializer(action_node) return {'body': {action_name: action_data}} def _action_create_image(self, node): return self._deserialize_image_action(node, ('name',)) def _action_change_password(self, node): if not node.hasAttribute("adminPass"): raise AttributeError("No adminPass was specified in request") return {"adminPass": node.getAttribute("adminPass")} def _action_reboot(self, node): if not node.hasAttribute("type"): raise AttributeError("No reboot type was specified in request") return {"type": node.getAttribute("type")} def _action_rebuild(self, node): rebuild = {} if node.hasAttribute("name"): name = node.getAttribute("name") if not name: raise AttributeError("Name cannot be blank") rebuild['name'] = name if node.hasAttribute("auto_disk_config"): rebuild['OS-DCF:diskConfig'] = node.getAttribute( "auto_disk_config") if node.hasAttribute("OS-DCF:diskConfig"): rebuild['OS-DCF:diskConfig'] = node.getAttribute( "OS-DCF:diskConfig") metadata_node = self.find_first_child_named(node, "metadata") if metadata_node is not None: rebuild["metadata"] = self.extract_metadata(metadata_node) personality = self._extract_personality(node) if personality is not None: rebuild["personality"] = personality if not node.hasAttribute("imageRef"): raise AttributeError("No imageRef was specified in request") rebuild["imageRef"] = node.getAttribute("imageRef") if node.hasAttribute("adminPass"): rebuild["adminPass"] = node.getAttribute("adminPass") if node.hasAttribute("accessIPv4"): rebuild["accessIPv4"] = node.getAttribute("accessIPv4") if node.hasAttribute("accessIPv6"): rebuild["accessIPv6"] = node.getAttribute("accessIPv6") if node.hasAttribute("preserve_ephemeral"): rebuild["preserve_ephemeral"] = strutils.bool_from_string( node.getAttribute("preserve_ephemeral"), strict=True) return rebuild def _action_resize(self, node): resize = {} if node.hasAttribute("flavorRef"): resize["flavorRef"] = node.getAttribute("flavorRef") else: raise AttributeError("No flavorRef was specified in request") if node.hasAttribute("auto_disk_config"): resize['OS-DCF:diskConfig'] = node.getAttribute("auto_disk_config") if node.hasAttribute("OS-DCF:diskConfig"): resize['OS-DCF:diskConfig'] = node.getAttribute( "OS-DCF:diskConfig") return resize def _action_confirm_resize(self, node): return None def _action_revert_resize(self, node): return None def _deserialize_image_action(self, node, allowed_attributes): data = {} for attribute in allowed_attributes: value = node.getAttribute(attribute) if value: data[attribute] = value metadata_node = self.find_first_child_named(node, 'metadata') if metadata_node is not None: metadata = self.metadata_deserializer.extract_metadata( metadata_node) data['metadata'] = metadata return data class CreateDeserializer(CommonDeserializer): """Deserializer to handle xml-formatted server create requests. Handles standard server attributes as well as optional metadata and personality attributes """ def default(self, string): """Deserialize an xml-formatted server create request.""" dom = xmlutil.safe_minidom_parse_string(string) server = self._extract_server(dom) return {'body': {'server': server}} class Controller(wsgi.Controller): """The Server API base controller class for the OpenStack API.""" _view_builder_class = views_servers.ViewBuilder @staticmethod def _add_location(robj): # Just in case... if 'server' not in robj.obj: return robj link = filter(lambda l: l['rel'] == 'self', robj.obj['server']['links']) if link: robj['Location'] = utils.utf8(link[0]['href']) # Convenience return return robj def __init__(self, ext_mgr=None, **kwargs): super(Controller, self).__init__(**kwargs) self.compute_api = compute.API() self.ext_mgr = ext_mgr @wsgi.serializers(xml=MinimalServersTemplate) def index(self, req): """Returns a list of server names and ids for a given user.""" try: servers = self._get_servers(req, is_detail=False) except exception.Invalid as err: raise exc.HTTPBadRequest(explanation=err.format_message()) return servers @wsgi.serializers(xml=ServersTemplate) def detail(self, req): """Returns a list of server details for a given user.""" try: servers = self._get_servers(req, is_detail=True) except exception.Invalid as err: raise exc.HTTPBadRequest(explanation=err.format_message()) return servers def _get_servers(self, req, is_detail): """Returns a list of servers, based on any search options specified.""" search_opts = {} search_opts.update(req.GET) context = req.environ['nova.context'] remove_invalid_options(context, search_opts, self._get_server_search_options()) # Verify search by 'status' contains a valid status. # Convert it to filter by vm_state or task_state for compute_api. search_opts.pop('status', None) if 'status' in req.GET.keys(): statuses = req.GET.getall('status') states = common.task_and_vm_state_from_status(statuses) vm_state, task_state = states if not vm_state and not task_state: return {'servers': []} search_opts['vm_state'] = vm_state # When we search by vm state, task state will return 'default'. # So we don't need task_state search_opt. if 'default' not in task_state: search_opts['task_state'] = task_state if 'changes-since' in search_opts: try: parsed = timeutils.parse_isotime(search_opts['changes-since']) except ValueError: msg = _('Invalid changes-since value') raise exc.HTTPBadRequest(explanation=msg) search_opts['changes-since'] = parsed # By default, compute's get_all() will return deleted instances. # If an admin hasn't specified a 'deleted' search option, we need # to filter out deleted instances by setting the filter ourselves. # ... Unless 'changes-since' is specified, because 'changes-since' # should return recently deleted images according to the API spec. if 'deleted' not in search_opts: if 'changes-since' not in search_opts: # No 'changes-since', so we only want non-deleted servers search_opts['deleted'] = False if search_opts.get("vm_state") == ['deleted']: if context.is_admin: search_opts['deleted'] = True else: msg = _("Only administrators may list deleted instances") raise exc.HTTPForbidden(explanation=msg) # If all tenants is passed with 0 or false as the value # then remove it from the search options. Nothing passed as # the value for all_tenants is considered to enable the feature all_tenants = search_opts.get('all_tenants') if all_tenants: try: if not strutils.bool_from_string(all_tenants, True): del search_opts['all_tenants'] except ValueError as err: raise exception.InvalidInput(six.text_type(err)) if 'all_tenants' in search_opts: policy.enforce(context, 'compute:get_all_tenants', {'project_id': context.project_id, 'user_id': context.user_id}) del search_opts['all_tenants'] else: if context.project_id: search_opts['project_id'] = context.project_id else: search_opts['user_id'] = context.user_id limit, marker = common.get_limit_and_marker(req) try: instance_list = self.compute_api.get_all(context, search_opts=search_opts, limit=limit, marker=marker, want_objects=True) except exception.MarkerNotFound: msg = _('marker [%s] not found') % marker raise exc.HTTPBadRequest(explanation=msg) except exception.FlavorNotFound: LOG.debug("Flavor '%s' could not be found", search_opts['flavor']) instance_list = objects.InstanceList() if is_detail: instance_list.fill_faults() response = self._view_builder.detail(req, instance_list) else: response = self._view_builder.index(req, instance_list) req.cache_db_instances(instance_list) return response def _get_server(self, context, req, instance_uuid): """Utility function for looking up an instance by uuid.""" try: instance = self.compute_api.get(context, instance_uuid, want_objects=True) except exception.NotFound: msg = _("Instance could not be found") raise exc.HTTPNotFound(explanation=msg) req.cache_db_instance(instance) return instance def _check_string_length(self, value, name, max_length=None): try: if isinstance(value, six.string_types): value = value.strip() utils.check_string_length(value, name, min_length=1, max_length=max_length) except exception.InvalidInput as e: raise exc.HTTPBadRequest(explanation=e.format_message()) def _validate_server_name(self, value): self._check_string_length(value, 'Server name', max_length=255) def _get_injected_files(self, personality): """Create a list of injected files from the personality attribute. At this time, injected_files must be formatted as a list of (file_path, file_content) pairs for compatibility with the underlying compute service. """ injected_files = [] for item in personality: try: path = item['path'] contents = item['contents'] except KeyError as key: expl = _('Bad personality format: missing %s') % key raise exc.HTTPBadRequest(explanation=expl) except TypeError: expl = _('Bad personality format') raise exc.HTTPBadRequest(explanation=expl) if self._decode_base64(contents) is None: expl = _('Personality content for %s cannot be decoded') % path raise exc.HTTPBadRequest(explanation=expl) injected_files.append((path, contents)) return injected_files def _get_requested_networks(self, requested_networks): """Create a list of requested networks from the networks attribute.""" networks = [] network_uuids = [] for network in requested_networks: request = objects.NetworkRequest() try: try: request.port_id = network.get('port', None) except ValueError: msg = _("Bad port format: port uuid is " "not in proper format " "(%s)") % network.get('port') raise exc.HTTPBadRequest(explanation=msg) if request.port_id: request.network_id = None if not utils.is_neutron(): # port parameter is only for neutron v2.0 msg = _("Unknown argument : port") raise exc.HTTPBadRequest(explanation=msg) else: request.network_id = network['uuid'] if (not request.port_id and not uuidutils.is_uuid_like(request.network_id)): br_uuid = request.network_id.split('-', 1)[-1] if not uuidutils.is_uuid_like(br_uuid): msg = _("Bad networks format: network uuid is " "not in proper format " "(%s)") % request.network_id raise exc.HTTPBadRequest(explanation=msg) # fixed IP address is optional # if the fixed IP address is not provided then # it will use one of the available IP address from the network try: request.address = network.get('fixed_ip', None) except ValueError: msg = _("Invalid fixed IP address (%s)") % request.address raise exc.HTTPBadRequest(explanation=msg) if (request.network_id and request.network_id in network_uuids): expl = (_("Duplicate networks" " (%s) are not allowed") % request.network_id) raise exc.HTTPBadRequest(explanation=expl) network_uuids.append(request.network_id) networks.append(request) except KeyError as key: expl = _('Bad network format: missing %s') % key raise exc.HTTPBadRequest(explanation=expl) except TypeError: expl = _('Bad networks format') raise exc.HTTPBadRequest(explanation=expl) return objects.NetworkRequestList(objects=networks) # NOTE(vish): Without this regex, b64decode will happily # ignore illegal bytes in the base64 encoded # data. B64_REGEX = re.compile('^(?:[A-Za-z0-9+\/]{4})*' '(?:[A-Za-z0-9+\/]{2}==' '|[A-Za-z0-9+\/]{3}=)?$') def _decode_base64(self, data): data = re.sub(r'\s', '', data) if not self.B64_REGEX.match(data): return None try: return base64.b64decode(data) except TypeError: return None def _validate_user_data(self, user_data): """Check if the user_data is encoded properly.""" if not user_data: return if self._decode_base64(user_data) is None: expl = _('Userdata content cannot be decoded') raise exc.HTTPBadRequest(explanation=expl) def _validate_access_ipv4(self, address): if not utils.is_valid_ipv4(address): expl = _('accessIPv4 is not proper IPv4 format') raise exc.HTTPBadRequest(explanation=expl) def _validate_access_ipv6(self, address): if not utils.is_valid_ipv6(address): expl = _('accessIPv6 is not proper IPv6 format') raise exc.HTTPBadRequest(explanation=expl) @wsgi.serializers(xml=ServerTemplate) def show(self, req, id): """Returns server details by server id.""" try: context = req.environ['nova.context'] instance = self.compute_api.get(context, id, want_objects=True) req.cache_db_instance(instance) return self._view_builder.show(req, instance) except exception.NotFound: msg = _("Instance could not be found") raise exc.HTTPNotFound(explanation=msg) @wsgi.response(202) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=CreateDeserializer) def create(self, req, body): """Creates a new server for a given user.""" if not self.is_valid_body(body, 'server'): raise exc.HTTPUnprocessableEntity() context = req.environ['nova.context'] server_dict = body['server'] password = self._get_server_admin_password(server_dict) if 'name' not in server_dict: msg = _("Server name is not defined") raise exc.HTTPBadRequest(explanation=msg) name = server_dict['name'] self._validate_server_name(name) name = name.strip() image_uuid = self._image_from_req_data(body) personality = server_dict.get('personality') config_drive = None if self.ext_mgr.is_loaded('os-config-drive'): config_drive = server_dict.get('config_drive') injected_files = [] if personality: injected_files = self._get_injected_files(personality) sg_names = [] if self.ext_mgr.is_loaded('os-security-groups'): security_groups = server_dict.get('security_groups') if security_groups is not None: sg_names = [sg['name'] for sg in security_groups if sg.get('name')] if not sg_names: sg_names.append('default') sg_names = list(set(sg_names)) requested_networks = None if (self.ext_mgr.is_loaded('os-networks') or utils.is_neutron()): requested_networks = server_dict.get('networks') if requested_networks is not None: if not isinstance(requested_networks, list): expl = _('Bad networks format') raise exc.HTTPBadRequest(explanation=expl) requested_networks = self._get_requested_networks( requested_networks) (access_ip_v4, ) = server_dict.get('accessIPv4'), if access_ip_v4 is not None: self._validate_access_ipv4(access_ip_v4) (access_ip_v6, ) = server_dict.get('accessIPv6'), if access_ip_v6 is not None: self._validate_access_ipv6(access_ip_v6) try: flavor_id = self._flavor_id_from_req_data(body) except ValueError as error: msg = _("Invalid flavorRef provided.") raise exc.HTTPBadRequest(explanation=msg) # optional openstack extensions: key_name = None if self.ext_mgr.is_loaded('os-keypairs'): key_name = server_dict.get('key_name') user_data = None if self.ext_mgr.is_loaded('os-user-data'): user_data = server_dict.get('user_data') self._validate_user_data(user_data) availability_zone = None if self.ext_mgr.is_loaded('os-availability-zone'): availability_zone = server_dict.get('availability_zone') block_device_mapping = None block_device_mapping_v2 = None legacy_bdm = True if self.ext_mgr.is_loaded('os-volumes'): block_device_mapping = server_dict.get('block_device_mapping', []) for bdm in block_device_mapping: try: block_device.validate_device_name(bdm.get("device_name")) block_device.validate_and_default_volume_size(bdm) except exception.InvalidBDMFormat as e: raise exc.HTTPBadRequest(explanation=e.format_message()) if 'delete_on_termination' in bdm: bdm['delete_on_termination'] = strutils.bool_from_string( bdm['delete_on_termination']) if self.ext_mgr.is_loaded('os-block-device-mapping-v2-boot'): # Consider the new data format for block device mapping block_device_mapping_v2 = server_dict.get( 'block_device_mapping_v2', []) # NOTE (ndipanov): Disable usage of both legacy and new # block device format in the same request if block_device_mapping and block_device_mapping_v2: expl = _('Using different block_device_mapping syntaxes ' 'is not allowed in the same request.') raise exc.HTTPBadRequest(explanation=expl) # Assume legacy format legacy_bdm = not bool(block_device_mapping_v2) try: block_device_mapping_v2 = [ block_device.BlockDeviceDict.from_api(bdm_dict) for bdm_dict in block_device_mapping_v2] except exception.InvalidBDMFormat as e: raise exc.HTTPBadRequest(explanation=e.format_message()) block_device_mapping = (block_device_mapping or block_device_mapping_v2) ret_resv_id = False # min_count and max_count are optional. If they exist, they may come # in as strings. Verify that they are valid integers and > 0. # Also, we want to default 'min_count' to 1, and default # 'max_count' to be 'min_count'. min_count = 1 max_count = 1 if self.ext_mgr.is_loaded('os-multiple-create'): ret_resv_id = server_dict.get('return_reservation_id', False) min_count = server_dict.get('min_count', 1) max_count = server_dict.get('max_count', min_count) try: min_count = utils.validate_integer( min_count, "min_count", min_value=1) max_count = utils.validate_integer( max_count, "max_count", min_value=1) except exception.InvalidInput as e: raise exc.HTTPBadRequest(explanation=e.format_message()) if min_count > max_count: msg = _('min_count must be <= max_count') raise exc.HTTPBadRequest(explanation=msg) auto_disk_config = False if self.ext_mgr.is_loaded('OS-DCF'): auto_disk_config = server_dict.get('auto_disk_config') scheduler_hints = {} if self.ext_mgr.is_loaded('OS-SCH-HNT'): scheduler_hints = server_dict.get('scheduler_hints', {}) check_server_group_quota = \ self.ext_mgr.is_loaded('os-server-group-quotas') try: _get_inst_type = flavors.get_flavor_by_flavor_id inst_type = _get_inst_type(flavor_id, ctxt=context, read_deleted="no") (instances, resv_id) = self.compute_api.create(context, inst_type, image_uuid, display_name=name, display_description=name, key_name=key_name, metadata=server_dict.get('metadata', {}), access_ip_v4=access_ip_v4, access_ip_v6=access_ip_v6, injected_files=injected_files, admin_password=password, min_count=min_count, max_count=max_count, requested_networks=requested_networks, security_group=sg_names, user_data=user_data, availability_zone=availability_zone, config_drive=config_drive, block_device_mapping=block_device_mapping, auto_disk_config=auto_disk_config, scheduler_hints=scheduler_hints, legacy_bdm=legacy_bdm, check_server_group_quota=check_server_group_quota) except (exception.QuotaError, exception.PortLimitExceeded) as error: raise exc.HTTPForbidden( explanation=error.format_message(), headers={'Retry-After': 0}) except exception.InvalidMetadataSize as error: raise exc.HTTPRequestEntityTooLarge( explanation=error.format_message()) except exception.ImageNotFound as error: msg = _("Can not find requested image") raise exc.HTTPBadRequest(explanation=msg) except exception.FlavorNotFound as error: msg = _("Invalid flavorRef provided.") raise exc.HTTPBadRequest(explanation=msg) except exception.KeypairNotFound as error: msg = _("Invalid key_name provided.") raise exc.HTTPBadRequest(explanation=msg) except exception.ConfigDriveInvalidValue: msg = _("Invalid config_drive provided.") raise exc.HTTPBadRequest(explanation=msg) except messaging.RemoteError as err: msg = "%(err_type)s: %(err_msg)s" % {'err_type': err.exc_type, 'err_msg': err.value} raise exc.HTTPBadRequest(explanation=msg) except UnicodeDecodeError as error: msg = "UnicodeError: %s" % unicode(error) raise exc.HTTPBadRequest(explanation=msg) except (exception.ImageNotActive, exception.FlavorDiskTooSmall, exception.FlavorMemoryTooSmall, exception.NetworkNotFound, exception.PortNotFound, exception.FixedIpAlreadyInUse, exception.SecurityGroupNotFound, exception.InstanceUserDataTooLarge, exception.InstanceUserDataMalformed) as error: raise exc.HTTPBadRequest(explanation=error.format_message()) except (exception.ImageNUMATopologyIncomplete, exception.ImageNUMATopologyForbidden, exception.ImageNUMATopologyAsymmetric, exception.ImageNUMATopologyCPUOutOfRange, exception.ImageNUMATopologyCPUDuplicates, exception.ImageNUMATopologyCPUsUnassigned, exception.ImageNUMATopologyMemoryOutOfRange) as error: raise exc.HTTPBadRequest(explanation=error.format_message()) except (exception.PortInUse, exception.InstanceExists, exception.NoUniqueMatch) as error: raise exc.HTTPConflict(explanation=error.format_message()) except exception.Invalid as error: raise exc.HTTPBadRequest(explanation=error.format_message()) # If the caller wanted a reservation_id, return it if ret_resv_id: return wsgi.ResponseObject({'reservation_id': resv_id}, xml=ServerMultipleCreateTemplate) req.cache_db_instances(instances) server = self._view_builder.create(req, instances[0]) if CONF.enable_instance_password: server['server']['adminPass'] = password robj = wsgi.ResponseObject(server) return self._add_location(robj) def _delete(self, context, req, instance_uuid): instance = self._get_server(context, req, instance_uuid) if CONF.reclaim_instance_interval: try: self.compute_api.soft_delete(context, instance) except exception.InstanceInvalidState: # Note(yufang521247): instance which has never been active # is not allowed to be soft_deleted. Thus we have to call # delete() to clean up the instance. self.compute_api.delete(context, instance) else: self.compute_api.delete(context, instance) @wsgi.serializers(xml=ServerTemplate) def update(self, req, id, body): """Update server then pass on to version-specific controller.""" if not self.is_valid_body(body, 'server'): raise exc.HTTPUnprocessableEntity() ctxt = req.environ['nova.context'] update_dict = {} if 'name' in body['server']: name = body['server']['name'] self._validate_server_name(name) update_dict['display_name'] = name.strip() if 'accessIPv4' in body['server']: access_ipv4 = body['server']['accessIPv4'] if access_ipv4: self._validate_access_ipv4(access_ipv4) update_dict['access_ip_v4'] = ( access_ipv4 and access_ipv4.strip() or None) if 'accessIPv6' in body['server']: access_ipv6 = body['server']['accessIPv6'] if access_ipv6: self._validate_access_ipv6(access_ipv6) update_dict['access_ip_v6'] = ( access_ipv6 and access_ipv6.strip() or None) if 'auto_disk_config' in body['server']: auto_disk_config = strutils.bool_from_string( body['server']['auto_disk_config']) update_dict['auto_disk_config'] = auto_disk_config if 'hostId' in body['server']: msg = _("HostId cannot be updated.") raise exc.HTTPBadRequest(explanation=msg) if 'personality' in body['server']: msg = _("Personality cannot be updated.") raise exc.HTTPBadRequest(explanation=msg) try: instance = self.compute_api.get(ctxt, id, want_objects=True) req.cache_db_instance(instance) policy.enforce(ctxt, 'compute:update', instance) instance.update(update_dict) instance.save() except exception.NotFound: msg = _("Instance could not be found") raise exc.HTTPNotFound(explanation=msg) return self._view_builder.show(req, instance) @wsgi.response(204) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=ActionDeserializer) @wsgi.action('confirmResize') def _action_confirm_resize(self, req, id, body): context = req.environ['nova.context'] instance = self._get_server(context, req, id) try: self.compute_api.confirm_resize(context, instance) except exception.MigrationNotFound: msg = _("Instance has not been resized.") raise exc.HTTPBadRequest(explanation=msg) except exception.InstanceIsLocked as e: raise exc.HTTPConflict(explanation=e.format_message()) except exception.InstanceInvalidState as state_error: common.raise_http_conflict_for_instance_invalid_state(state_error, 'confirmResize', id) @wsgi.response(202) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=ActionDeserializer) @wsgi.action('revertResize') def _action_revert_resize(self, req, id, body): context = req.environ['nova.context'] instance = self._get_server(context, req, id) try: self.compute_api.revert_resize(context, instance) except exception.MigrationNotFound: msg = _("Instance has not been resized.") raise exc.HTTPBadRequest(explanation=msg) except exception.FlavorNotFound: msg = _("Flavor used by the instance could not be found.") raise exc.HTTPBadRequest(explanation=msg) except exception.InstanceIsLocked as e: raise exc.HTTPConflict(explanation=e.format_message()) except exception.InstanceInvalidState as state_error: common.raise_http_conflict_for_instance_invalid_state(state_error, 'revertResize', id) return webob.Response(status_int=202) @wsgi.response(202) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=ActionDeserializer) @wsgi.action('reboot') def _action_reboot(self, req, id, body): if 'reboot' in body and 'type' in body['reboot']: if not isinstance(body['reboot']['type'], six.string_types): msg = _("Argument 'type' for reboot must be a string") LOG.error(msg) raise exc.HTTPBadRequest(explanation=msg) valid_reboot_types = ['HARD', 'SOFT'] reboot_type = body['reboot']['type'].upper() if not valid_reboot_types.count(reboot_type): msg = _("Argument 'type' for reboot is not HARD or SOFT") LOG.error(msg) raise exc.HTTPBadRequest(explanation=msg) else: msg = _("Missing argument 'type' for reboot") LOG.error(msg) raise exc.HTTPBadRequest(explanation=msg) context = req.environ['nova.context'] instance = self._get_server(context, req, id) try: self.compute_api.reboot(context, instance, reboot_type) except exception.InstanceIsLocked as e: raise exc.HTTPConflict(explanation=e.format_message()) except exception.InstanceInvalidState as state_error: common.raise_http_conflict_for_instance_invalid_state(state_error, 'reboot', id) return webob.Response(status_int=202) def _resize(self, req, instance_id, flavor_id, **kwargs): """Begin the resize process with given instance/flavor.""" context = req.environ["nova.context"] instance = self._get_server(context, req, instance_id) try: self.compute_api.resize(context, instance, flavor_id, **kwargs) except exception.QuotaError as error: raise exc.HTTPForbidden( explanation=error.format_message(), headers={'Retry-After': 0}) except exception.FlavorNotFound: msg = _("Unable to locate requested flavor.") raise exc.HTTPBadRequest(explanation=msg) except exception.CannotResizeToSameFlavor: msg = _("Resize requires a flavor change.") raise exc.HTTPBadRequest(explanation=msg) except exception.CannotResizeDisk as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.InstanceIsLocked as e: raise exc.HTTPConflict(explanation=e.format_message()) except exception.InstanceInvalidState as state_error: common.raise_http_conflict_for_instance_invalid_state(state_error, 'resize', instance_id) except exception.ImageNotAuthorized: msg = _("You are not authorized to access the image " "the instance was started with.") raise exc.HTTPUnauthorized(explanation=msg) except exception.ImageNotFound: msg = _("Image that the instance was started " "with could not be found.") raise exc.HTTPBadRequest(explanation=msg) except (exception.NoValidHost, exception.AutoDiskConfigDisabledByImage) as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.Invalid: msg = _("Invalid instance image.") raise exc.HTTPBadRequest(explanation=msg) return webob.Response(status_int=202) @wsgi.response(204) def delete(self, req, id): """Destroys a server.""" try: self._delete(req.environ['nova.context'], req, id) except exception.NotFound: msg = _("Instance could not be found") raise exc.HTTPNotFound(explanation=msg) except exception.InstanceIsLocked as e: raise exc.HTTPConflict(explanation=e.format_message()) except exception.InstanceInvalidState as state_error: common.raise_http_conflict_for_instance_invalid_state(state_error, 'delete', id) def _image_ref_from_req_data(self, data): try: return unicode(data['server']['imageRef']) except (TypeError, KeyError): msg = _("Missing imageRef attribute") raise exc.HTTPBadRequest(explanation=msg) def _image_uuid_from_href(self, image_href): if not image_href: msg = _("Invalid imageRef provided.") raise exc.HTTPBadRequest(explanation=msg) # If the image href was generated by nova api, strip image_href # down to an id and use the default glance connection params image_uuid = image_href.split('/').pop() if not uuidutils.is_uuid_like(image_uuid): msg = _("Invalid imageRef provided.") raise exc.HTTPBadRequest(explanation=msg) return image_uuid def _image_from_req_data(self, data): """Get image data from the request or raise appropriate exceptions If no image is supplied - checks to see if there is block devices set and proper extesions loaded. """ image_ref = data['server'].get('imageRef') bdm = data['server'].get('block_device_mapping') bdm_v2 = data['server'].get('block_device_mapping_v2') if (not image_ref and ( (bdm and self.ext_mgr.is_loaded('os-volumes')) or (bdm_v2 and self.ext_mgr.is_loaded('os-block-device-mapping-v2-boot')))): return '' else: image_href = self._image_ref_from_req_data(data) image_uuid = self._image_uuid_from_href(image_href) return image_uuid def _flavor_id_from_req_data(self, data): try: flavor_ref = data['server']['flavorRef'] except (TypeError, KeyError): msg = _("Missing flavorRef attribute") raise exc.HTTPBadRequest(explanation=msg) return common.get_id_from_href(flavor_ref) @wsgi.response(202) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=ActionDeserializer) @wsgi.action('changePassword') def _action_change_password(self, req, id, body): context = req.environ['nova.context'] if ('changePassword' not in body or 'adminPass' not in body['changePassword']): msg = _("No adminPass was specified") raise exc.HTTPBadRequest(explanation=msg) password = self._get_server_admin_password(body['changePassword']) server = self._get_server(context, req, id) try: self.compute_api.set_admin_password(context, server, password) except NotImplementedError: msg = _("Unable to set password on instance") raise exc.HTTPNotImplemented(explanation=msg) return webob.Response(status_int=202) def _validate_metadata(self, metadata): """Ensure that we can work with the metadata given.""" try: metadata.iteritems() except AttributeError: msg = _("Unable to parse metadata key/value pairs.") LOG.debug(msg) raise exc.HTTPBadRequest(explanation=msg) @wsgi.response(202) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=ActionDeserializer) @wsgi.action('resize') def _action_resize(self, req, id, body): """Resizes a given instance to the flavor size requested.""" try: flavor_ref = str(body["resize"]["flavorRef"]) if not flavor_ref: msg = _("Resize request has invalid 'flavorRef' attribute.") raise exc.HTTPBadRequest(explanation=msg) except (KeyError, TypeError): msg = _("Resize requests require 'flavorRef' attribute.") raise exc.HTTPBadRequest(explanation=msg) kwargs = {} if 'auto_disk_config' in body['resize']: kwargs['auto_disk_config'] = body['resize']['auto_disk_config'] return self._resize(req, id, flavor_ref, **kwargs) @wsgi.response(202) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=ActionDeserializer) @wsgi.action('rebuild') def _action_rebuild(self, req, id, body): """Rebuild an instance with the given attributes.""" body = body['rebuild'] try: image_href = body["imageRef"] except (KeyError, TypeError): msg = _("Could not parse imageRef from request.") raise exc.HTTPBadRequest(explanation=msg) image_href = self._image_uuid_from_href(image_href) password = self._get_server_admin_password(body) context = req.environ['nova.context'] instance = self._get_server(context, req, id) attr_map = { 'personality': 'files_to_inject', 'name': 'display_name', 'accessIPv4': 'access_ip_v4', 'accessIPv6': 'access_ip_v6', 'metadata': 'metadata', 'auto_disk_config': 'auto_disk_config', } kwargs = {} # take the preserve_ephemeral value into account only when the # corresponding extension is active if (self.ext_mgr.is_loaded('os-preserve-ephemeral-rebuild') and 'preserve_ephemeral' in body): kwargs['preserve_ephemeral'] = strutils.bool_from_string( body['preserve_ephemeral'], strict=True) if 'accessIPv4' in body: self._validate_access_ipv4(body['accessIPv4']) if 'accessIPv6' in body: self._validate_access_ipv6(body['accessIPv6']) if 'name' in body: self._validate_server_name(body['name']) for request_attribute, instance_attribute in attr_map.items(): try: kwargs[instance_attribute] = body[request_attribute] except (KeyError, TypeError): pass self._validate_metadata(kwargs.get('metadata', {})) if 'files_to_inject' in kwargs: personality = kwargs.pop('files_to_inject') files_to_inject = self._get_injected_files(personality) else: files_to_inject = None try: self.compute_api.rebuild(context, instance, image_href, password, files_to_inject=files_to_inject, **kwargs) except exception.InstanceIsLocked as e: raise exc.HTTPConflict(explanation=e.format_message()) except exception.InstanceInvalidState as state_error: common.raise_http_conflict_for_instance_invalid_state(state_error, 'rebuild', id) except exception.InstanceNotFound: msg = _("Instance could not be found") raise exc.HTTPNotFound(explanation=msg) except exception.InvalidMetadataSize as error: raise exc.HTTPRequestEntityTooLarge( explanation=error.format_message()) except exception.ImageNotFound: msg = _("Cannot find image for rebuild") raise exc.HTTPBadRequest(explanation=msg) except exception.QuotaError as error: raise exc.HTTPForbidden(explanation=error.format_message()) except (exception.ImageNotActive, exception.FlavorDiskTooSmall, exception.FlavorMemoryTooSmall, exception.InvalidMetadata, exception.AutoDiskConfigDisabledByImage) as error: raise exc.HTTPBadRequest(explanation=error.format_message()) instance = self._get_server(context, req, id) view = self._view_builder.show(req, instance) # Add on the adminPass attribute since the view doesn't do it # unless instance passwords are disabled if CONF.enable_instance_password: view['server']['adminPass'] = password robj = wsgi.ResponseObject(view) return self._add_location(robj) @wsgi.response(202) @wsgi.serializers(xml=FullServerTemplate) @wsgi.deserializers(xml=ActionDeserializer) @wsgi.action('createImage') @common.check_snapshots_enabled def _action_create_image(self, req, id, body): """Snapshot a server instance.""" context = req.environ['nova.context'] entity = body.get("createImage", {}) image_name = entity.get("name") if not image_name: msg = _("createImage entity requires name attribute") raise exc.HTTPBadRequest(explanation=msg) props = {} metadata = entity.get('metadata', {}) common.check_img_metadata_properties_quota(context, metadata) try: props.update(metadata) except ValueError: msg = _("Invalid metadata") raise exc.HTTPBadRequest(explanation=msg) instance = self._get_server(context, req, id) bdms = objects.BlockDeviceMappingList.get_by_instance_uuid( context, instance.uuid) try: if self.compute_api.is_volume_backed_instance(context, instance, bdms): img = instance['image_ref'] if not img: properties = bdms.root_metadata( context, self.compute_api.image_api, self.compute_api.volume_api) image_meta = {'properties': properties} else: image_meta = self.compute_api.image_api.get(context, img) image = self.compute_api.snapshot_volume_backed( context, instance, image_meta, image_name, extra_properties=props) else: image = self.compute_api.snapshot(context, instance, image_name, extra_properties=props) except exception.InstanceInvalidState as state_error: common.raise_http_conflict_for_instance_invalid_state(state_error, 'createImage', id) except exception.Invalid as err: raise exc.HTTPBadRequest(explanation=err.format_message()) # build location of newly-created image entity image_id = str(image['id']) url_prefix = self._view_builder._update_glance_link_prefix( req.application_url) image_ref = os.path.join(url_prefix, context.project_id, 'images', image_id) resp = webob.Response(status_int=202) resp.headers['Location'] = image_ref return resp def _get_server_admin_password(self, server): """Determine the admin password for a server on creation.""" try: password = server['adminPass'] self._validate_admin_password(password) except KeyError: password = utils.generate_password() except ValueError: raise exc.HTTPBadRequest(explanation=_("Invalid adminPass")) return password def _validate_admin_password(self, password): if not isinstance(password, six.string_types): raise ValueError() def _get_server_search_options(self): """Return server search options allowed by non-admin.""" return ('reservation_id', 'name', 'status', 'image', 'flavor', 'ip', 'changes-since', 'all_tenants') def create_resource(ext_mgr): return wsgi.Resource(Controller(ext_mgr)) def remove_invalid_options(context, search_options, allowed_search_options): """Remove search options that are not valid for non-admin API/context.""" if context.is_admin: # Allow all options return # Otherwise, strip out all unknown options unknown_options = [opt for opt in search_options if opt not in allowed_search_options] LOG.debug("Removing options '%s' from query", ", ".join(unknown_options)) for opt in unknown_options: search_options.pop(opt, None)
from __future__ import unicode_literals import base64 import botocore.client import boto3 import hashlib import io import json import time import zipfile import sure # noqa from freezegun import freeze_time from moto import mock_lambda, mock_s3, mock_ec2, mock_sns, mock_logs, settings _lambda_region = 'us-west-2' def _process_lambda(func_str): zip_output = io.BytesIO() zip_file = zipfile.ZipFile(zip_output, 'w', zipfile.ZIP_DEFLATED) zip_file.writestr('lambda_function.py', func_str) zip_file.close() zip_output.seek(0) return zip_output.read() def get_test_zip_file1(): pfunc = """ def lambda_handler(event, context): return event """ return _process_lambda(pfunc) def get_test_zip_file2(): func_str = """ import boto3 def lambda_handler(event, context): ec2 = boto3.resource('ec2', region_name='us-west-2', endpoint_url='http://{base_url}') volume_id = event.get('volume_id') vol = ec2.Volume(volume_id) print('get volume details for %s\\nVolume - %s state=%s, size=%s' % (volume_id, volume_id, vol.state, vol.size)) return event """.format(base_url="motoserver:5000" if settings.TEST_SERVER_MODE else "ec2.us-west-2.amazonaws.com") return _process_lambda(func_str) def get_test_zip_file3(): pfunc = """ def lambda_handler(event, context): print("get_test_zip_file3 success") return event """ return _process_lambda(pfunc) @mock_lambda def test_list_functions(): conn = boto3.client('lambda', 'us-west-2') result = conn.list_functions() result['Functions'].should.have.length_of(0) @mock_lambda def test_invoke_requestresponse_function(): conn = boto3.client('lambda', 'us-west-2') conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'ZipFile': get_test_zip_file1(), }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) in_data = {'msg': 'So long and thanks for all the fish'} success_result = conn.invoke(FunctionName='testFunction', InvocationType='RequestResponse', Payload=json.dumps(in_data)) success_result["StatusCode"].should.equal(202) result_obj = json.loads( base64.b64decode(success_result["LogResult"]).decode('utf-8')) result_obj.should.equal(in_data) payload = success_result["Payload"].read().decode('utf-8') json.loads(payload).should.equal(in_data) @mock_lambda def test_invoke_event_function(): conn = boto3.client('lambda', 'us-west-2') conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'ZipFile': get_test_zip_file1(), }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) conn.invoke.when.called_with( FunctionName='notAFunction', InvocationType='Event', Payload='{}' ).should.throw(botocore.client.ClientError) in_data = {'msg': 'So long and thanks for all the fish'} success_result = conn.invoke( FunctionName='testFunction', InvocationType='Event', Payload=json.dumps(in_data)) success_result["StatusCode"].should.equal(202) json.loads(success_result['Payload'].read().decode( 'utf-8')).should.equal({}) if settings.TEST_SERVER_MODE: @mock_ec2 @mock_lambda def test_invoke_function_get_ec2_volume(): conn = boto3.resource("ec2", "us-west-2") vol = conn.create_volume(Size=99, AvailabilityZone='us-west-2') vol = conn.Volume(vol.id) conn = boto3.client('lambda', 'us-west-2') conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'ZipFile': get_test_zip_file2(), }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) in_data = {'volume_id': vol.id} result = conn.invoke(FunctionName='testFunction', InvocationType='RequestResponse', Payload=json.dumps(in_data)) result["StatusCode"].should.equal(202) msg = 'get volume details for %s\nVolume - %s state=%s, size=%s\n%s' % ( vol.id, vol.id, vol.state, vol.size, json.dumps(in_data)) log_result = base64.b64decode(result["LogResult"]).decode('utf-8') # fix for running under travis (TODO: investigate why it has an extra newline) log_result = log_result.replace('\n\n', '\n') log_result.should.equal(msg) payload = result['Payload'].read().decode('utf-8') # fix for running under travis (TODO: investigate why it has an extra newline) payload = payload.replace('\n\n', '\n') payload.should.equal(msg) @mock_logs @mock_sns @mock_ec2 @mock_lambda def test_invoke_function_from_sns(): logs_conn = boto3.client("logs", region_name="us-west-2") sns_conn = boto3.client("sns", region_name="us-west-2") sns_conn.create_topic(Name="some-topic") topics_json = sns_conn.list_topics() topics = topics_json["Topics"] topic_arn = topics[0]['TopicArn'] conn = boto3.client('lambda', 'us-west-2') result = conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'ZipFile': get_test_zip_file3(), }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) sns_conn.subscribe(TopicArn=topic_arn, Protocol="lambda", Endpoint=result['FunctionArn']) result = sns_conn.publish(TopicArn=topic_arn, Message=json.dumps({})) start = time.time() while (time.time() - start) < 30: result = logs_conn.describe_log_streams(logGroupName='/aws/lambda/testFunction') log_streams = result.get('logStreams') if not log_streams: time.sleep(1) continue assert len(log_streams) == 1 result = logs_conn.get_log_events(logGroupName='/aws/lambda/testFunction', logStreamName=log_streams[0]['logStreamName']) for event in result.get('events'): if event['message'] == 'get_test_zip_file3 success': return time.sleep(1) assert False, "Test Failed" @mock_lambda def test_create_based_on_s3_with_missing_bucket(): conn = boto3.client('lambda', 'us-west-2') conn.create_function.when.called_with( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'S3Bucket': 'this-bucket-does-not-exist', 'S3Key': 'test.zip', }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, VpcConfig={ "SecurityGroupIds": ["sg-123abc"], "SubnetIds": ["subnet-123abc"], }, ).should.throw(botocore.client.ClientError) @mock_lambda @mock_s3 @freeze_time('2015-01-01 00:00:00') def test_create_function_from_aws_bucket(): s3_conn = boto3.client('s3', 'us-west-2') s3_conn.create_bucket(Bucket='test-bucket') zip_content = get_test_zip_file2() s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) conn = boto3.client('lambda', 'us-west-2') result = conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'S3Bucket': 'test-bucket', 'S3Key': 'test.zip', }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, VpcConfig={ "SecurityGroupIds": ["sg-123abc"], "SubnetIds": ["subnet-123abc"], }, ) # this is hard to match against, so remove it result['ResponseMetadata'].pop('HTTPHeaders', None) # Botocore inserts retry attempts not seen in Python27 result['ResponseMetadata'].pop('RetryAttempts', None) result.pop('LastModified') result.should.equal({ 'FunctionName': 'testFunction', 'FunctionArn': 'arn:aws:lambda:{}:123456789012:function:testFunction:$LATEST'.format(_lambda_region), 'Runtime': 'python2.7', 'Role': 'test-iam-role', 'Handler': 'lambda_function.lambda_handler', "CodeSha256": hashlib.sha256(zip_content).hexdigest(), "CodeSize": len(zip_content), 'Description': 'test lambda function', 'Timeout': 3, 'MemorySize': 128, 'Version': '$LATEST', 'VpcConfig': { "SecurityGroupIds": ["sg-123abc"], "SubnetIds": ["subnet-123abc"], "VpcId": "vpc-123abc" }, 'ResponseMetadata': {'HTTPStatusCode': 201}, }) @mock_lambda @freeze_time('2015-01-01 00:00:00') def test_create_function_from_zipfile(): conn = boto3.client('lambda', 'us-west-2') zip_content = get_test_zip_file1() result = conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'ZipFile': zip_content, }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) # this is hard to match against, so remove it result['ResponseMetadata'].pop('HTTPHeaders', None) # Botocore inserts retry attempts not seen in Python27 result['ResponseMetadata'].pop('RetryAttempts', None) result.pop('LastModified') result.should.equal({ 'FunctionName': 'testFunction', 'FunctionArn': 'arn:aws:lambda:{}:123456789012:function:testFunction:$LATEST'.format(_lambda_region), 'Runtime': 'python2.7', 'Role': 'test-iam-role', 'Handler': 'lambda_function.lambda_handler', 'CodeSize': len(zip_content), 'Description': 'test lambda function', 'Timeout': 3, 'MemorySize': 128, 'CodeSha256': hashlib.sha256(zip_content).hexdigest(), 'Version': '$LATEST', 'VpcConfig': { "SecurityGroupIds": [], "SubnetIds": [], }, 'ResponseMetadata': {'HTTPStatusCode': 201}, }) @mock_lambda @mock_s3 @freeze_time('2015-01-01 00:00:00') def test_get_function(): s3_conn = boto3.client('s3', 'us-west-2') s3_conn.create_bucket(Bucket='test-bucket') zip_content = get_test_zip_file1() s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) conn = boto3.client('lambda', 'us-west-2') conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'S3Bucket': 'test-bucket', 'S3Key': 'test.zip', }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) result = conn.get_function(FunctionName='testFunction') # this is hard to match against, so remove it result['ResponseMetadata'].pop('HTTPHeaders', None) # Botocore inserts retry attempts not seen in Python27 result['ResponseMetadata'].pop('RetryAttempts', None) result['Configuration'].pop('LastModified') result['Code']['Location'].should.equal('s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com/test.zip'.format(_lambda_region)) result['Code']['RepositoryType'].should.equal('S3') result['Configuration']['CodeSha256'].should.equal(hashlib.sha256(zip_content).hexdigest()) result['Configuration']['CodeSize'].should.equal(len(zip_content)) result['Configuration']['Description'].should.equal('test lambda function') result['Configuration'].should.contain('FunctionArn') result['Configuration']['FunctionName'].should.equal('testFunction') result['Configuration']['Handler'].should.equal('lambda_function.lambda_handler') result['Configuration']['MemorySize'].should.equal(128) result['Configuration']['Role'].should.equal('test-iam-role') result['Configuration']['Runtime'].should.equal('python2.7') result['Configuration']['Timeout'].should.equal(3) result['Configuration']['Version'].should.equal('$LATEST') result['Configuration'].should.contain('VpcConfig') # Test get function with result = conn.get_function(FunctionName='testFunction', Qualifier='$LATEST') result['Configuration']['Version'].should.equal('$LATEST') @mock_lambda @mock_s3 def test_delete_function(): s3_conn = boto3.client('s3', 'us-west-2') s3_conn.create_bucket(Bucket='test-bucket') zip_content = get_test_zip_file2() s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) conn = boto3.client('lambda', 'us-west-2') conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'S3Bucket': 'test-bucket', 'S3Key': 'test.zip', }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) success_result = conn.delete_function(FunctionName='testFunction') # this is hard to match against, so remove it success_result['ResponseMetadata'].pop('HTTPHeaders', None) # Botocore inserts retry attempts not seen in Python27 success_result['ResponseMetadata'].pop('RetryAttempts', None) success_result.should.equal({'ResponseMetadata': {'HTTPStatusCode': 204}}) conn.delete_function.when.called_with( FunctionName='testFunctionThatDoesntExist').should.throw(botocore.client.ClientError) @mock_lambda @mock_s3 def test_publish(): s3_conn = boto3.client('s3', 'us-west-2') s3_conn.create_bucket(Bucket='test-bucket') zip_content = get_test_zip_file2() s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) conn = boto3.client('lambda', 'us-west-2') conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'S3Bucket': 'test-bucket', 'S3Key': 'test.zip', }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) function_list = conn.list_functions() function_list['Functions'].should.have.length_of(1) latest_arn = function_list['Functions'][0]['FunctionArn'] conn.publish_version(FunctionName='testFunction') function_list = conn.list_functions() function_list['Functions'].should.have.length_of(2) # #SetComprehension ;-) published_arn = list({f['FunctionArn'] for f in function_list['Functions']} - {latest_arn})[0] published_arn.should.contain('testFunction:1') conn.delete_function(FunctionName='testFunction', Qualifier='1') function_list = conn.list_functions() function_list['Functions'].should.have.length_of(1) function_list['Functions'][0]['FunctionArn'].should.contain('testFunction:$LATEST') @mock_lambda @mock_s3 @freeze_time('2015-01-01 00:00:00') def test_list_create_list_get_delete_list(): """ test `list -> create -> list -> get -> delete -> list` integration """ s3_conn = boto3.client('s3', 'us-west-2') s3_conn.create_bucket(Bucket='test-bucket') zip_content = get_test_zip_file2() s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) conn = boto3.client('lambda', 'us-west-2') conn.list_functions()['Functions'].should.have.length_of(0) conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={ 'S3Bucket': 'test-bucket', 'S3Key': 'test.zip', }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) expected_function_result = { "Code": { "Location": "s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com/test.zip".format(_lambda_region), "RepositoryType": "S3" }, "Configuration": { "CodeSha256": hashlib.sha256(zip_content).hexdigest(), "CodeSize": len(zip_content), "Description": "test lambda function", "FunctionArn": 'arn:aws:lambda:{}:123456789012:function:testFunction:$LATEST'.format(_lambda_region), "FunctionName": "testFunction", "Handler": "lambda_function.lambda_handler", "MemorySize": 128, "Role": "test-iam-role", "Runtime": "python2.7", "Timeout": 3, "Version": '$LATEST', "VpcConfig": { "SecurityGroupIds": [], "SubnetIds": [], } }, 'ResponseMetadata': {'HTTPStatusCode': 200}, } func = conn.list_functions()['Functions'][0] func.pop('LastModified') func.should.equal(expected_function_result['Configuration']) func = conn.get_function(FunctionName='testFunction') # this is hard to match against, so remove it func['ResponseMetadata'].pop('HTTPHeaders', None) # Botocore inserts retry attempts not seen in Python27 func['ResponseMetadata'].pop('RetryAttempts', None) func['Configuration'].pop('LastModified') func.should.equal(expected_function_result) conn.delete_function(FunctionName='testFunction') conn.list_functions()['Functions'].should.have.length_of(0) @mock_lambda def test_invoke_lambda_error(): lambda_fx = """ def lambda_handler(event, context): raise Exception('failsauce') """ zip_output = io.BytesIO() zip_file = zipfile.ZipFile(zip_output, 'w', zipfile.ZIP_DEFLATED) zip_file.writestr('lambda_function.py', lambda_fx) zip_file.close() zip_output.seek(0) client = boto3.client('lambda', region_name='us-east-1') client.create_function( FunctionName='test-lambda-fx', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, Code={ 'ZipFile': zip_output.read() }, ) result = client.invoke( FunctionName='test-lambda-fx', InvocationType='RequestResponse', LogType='Tail' ) assert 'FunctionError' in result assert result['FunctionError'] == 'Handled' @mock_lambda @mock_s3 def test_tags(): """ test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration """ s3_conn = boto3.client('s3', 'us-west-2') s3_conn.create_bucket(Bucket='test-bucket') zip_content = get_test_zip_file2() s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content) conn = boto3.client('lambda', 'us-west-2') function = conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.handler', Code={ 'S3Bucket': 'test-bucket', 'S3Key': 'test.zip', }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) # List tags when there are none conn.list_tags( Resource=function['FunctionArn'] )['Tags'].should.equal(dict()) # List tags when there is one conn.tag_resource( Resource=function['FunctionArn'], Tags=dict(spam='eggs') )['ResponseMetadata']['HTTPStatusCode'].should.equal(200) conn.list_tags( Resource=function['FunctionArn'] )['Tags'].should.equal(dict(spam='eggs')) # List tags when another has been added conn.tag_resource( Resource=function['FunctionArn'], Tags=dict(foo='bar') )['ResponseMetadata']['HTTPStatusCode'].should.equal(200) conn.list_tags( Resource=function['FunctionArn'] )['Tags'].should.equal(dict(spam='eggs', foo='bar')) # Untag resource conn.untag_resource( Resource=function['FunctionArn'], TagKeys=['spam', 'trolls'] )['ResponseMetadata']['HTTPStatusCode'].should.equal(204) conn.list_tags( Resource=function['FunctionArn'] )['Tags'].should.equal(dict(foo='bar')) # Untag a tag that does not exist (no error and no change) conn.untag_resource( Resource=function['FunctionArn'], TagKeys=['spam'] )['ResponseMetadata']['HTTPStatusCode'].should.equal(204) @mock_lambda def test_tags_not_found(): """ Test list_tags and tag_resource when the lambda with the given arn does not exist """ conn = boto3.client('lambda', 'us-west-2') conn.list_tags.when.called_with( Resource='arn:aws:lambda:123456789012:function:not-found' ).should.throw(botocore.client.ClientError) conn.tag_resource.when.called_with( Resource='arn:aws:lambda:123456789012:function:not-found', Tags=dict(spam='eggs') ).should.throw(botocore.client.ClientError) conn.untag_resource.when.called_with( Resource='arn:aws:lambda:123456789012:function:not-found', TagKeys=['spam'] ).should.throw(botocore.client.ClientError) @mock_lambda def test_invoke_async_function(): conn = boto3.client('lambda', 'us-west-2') conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.lambda_handler', Code={'ZipFile': get_test_zip_file1()}, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) success_result = conn.invoke_async( FunctionName='testFunction', InvokeArgs=json.dumps({'test': 'event'}) ) success_result['Status'].should.equal(202) @mock_lambda @freeze_time('2015-01-01 00:00:00') def test_get_function_created_with_zipfile(): conn = boto3.client('lambda', 'us-west-2') zip_content = get_test_zip_file1() result = conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.handler', Code={ 'ZipFile': zip_content, }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) response = conn.get_function( FunctionName='testFunction' ) response['Configuration'].pop('LastModified') response['ResponseMetadata']['HTTPStatusCode'].should.equal(200) assert len(response['Code']) == 2 assert response['Code']['RepositoryType'] == 'S3' assert response['Code']['Location'].startswith('s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com'.format(_lambda_region)) response['Configuration'].should.equal( { "CodeSha256": hashlib.sha256(zip_content).hexdigest(), "CodeSize": len(zip_content), "Description": "test lambda function", "FunctionArn": 'arn:aws:lambda:{}:123456789012:function:testFunction:$LATEST'.format(_lambda_region), "FunctionName": "testFunction", "Handler": "lambda_function.handler", "MemorySize": 128, "Role": "test-iam-role", "Runtime": "python2.7", "Timeout": 3, "Version": '$LATEST', "VpcConfig": { "SecurityGroupIds": [], "SubnetIds": [], } }, ) @mock_lambda def add_function_permission(): conn = boto3.client('lambda', 'us-west-2') zip_content = get_test_zip_file1() result = conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.handler', Code={ 'ZipFile': zip_content, }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) response = conn.add_permission( FunctionName='testFunction', StatementId='1', Action="lambda:InvokeFunction", Principal='432143214321', SourceArn="arn:aws:lambda:us-west-2:account-id:function:helloworld", SourceAccount='123412341234', EventSourceToken='blah', Qualifier='2' ) assert 'Statement' in response res = json.loads(response['Statement']) assert res['Action'] == "lambda:InvokeFunction" @mock_lambda def get_function_policy(): conn = boto3.client('lambda', 'us-west-2') zip_content = get_test_zip_file1() result = conn.create_function( FunctionName='testFunction', Runtime='python2.7', Role='test-iam-role', Handler='lambda_function.handler', Code={ 'ZipFile': zip_content, }, Description='test lambda function', Timeout=3, MemorySize=128, Publish=True, ) response = conn.add_permission( FunctionName='testFunction', StatementId='1', Action="lambda:InvokeFunction", Principal='432143214321', SourceArn="arn:aws:lambda:us-west-2:account-id:function:helloworld", SourceAccount='123412341234', EventSourceToken='blah', Qualifier='2' ) response = conn.get_policy( FunctionName='testFunction' ) assert 'Policy' in response assert isinstance(response['Policy'], str) res = json.loads(response['Policy']) assert res['Statement'][0]['Action'] == 'lambda:InvokeFunction'
# MIT License # # Copyright (c) 2016 Anders Steen Christensen # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import numpy as np import copy from arad import ARAD from aras import ARAS from data import NUCLEAR_CHARGE from representations import fgenerate_coulomb_matrix from representations import fgenerate_unsorted_coulomb_matrix from representations import fgenerate_local_coulomb_matrix from representations import fgenerate_atomic_coulomb_matrix HOF_DFTB3 = dict() HOF_DFTB3["H"] = -172.3145 HOF_DFTB3["C"] = -906.4342 HOF_DFTB3["N"] = -1327.2991 HOF_DFTB3["O"] = -1936.6161 HOF_DFTB3["S"] = -1453.3907 class Molecule: def __init__(self): self.natoms = -1 self.energy = float("nan") self.molid = -1 self.name = None self.dftb3_energy = float("nan") self.dftb3_hof = float("nan") self.atomtypes = [] self.nuclear_charges = [] self.coordinates = [] self.active_atoms = [] self.unit_cell = None # Container for misc properties self.properties = [] self.properties2 = [] def generate_coulomb_matrix(self, size=23): self.coulomb_matrix = fgenerate_coulomb_matrix(self.nuclear_charges, \ self.coordinates, self.natoms, size) def generate_unsorted_coulomb_matrix(self, size=23): self.unsorted_coulomb_matrix = fgenerate_unsorted_coulomb_matrix(self.nuclear_charges, \ self.coordinates, self.natoms, size) def generate_local_coulomb_matrix(self, calc="all",size=23): self.local_coulomb_matrix = fgenerate_local_coulomb_matrix( \ self.nuclear_charges, self.coordinates, self.natoms, size) def generate_atomic_coulomb_matrix(self, calc="all",size=23): self.atomic_coulomb_matrix = fgenerate_atomic_coulomb_matrix( \ self.nuclear_charges, self.coordinates, self.natoms, size) def generate_arad_descriptor(self, size=23): arad_object = ARAD(maxMolSize=size,maxAts=size) self.arad_descriptor = arad_object.describe(np.array(self.coordinates), \ np.array(self.nuclear_charges)) assert (self.arad_descriptor).shape[0] == size, "ERROR: Check ARAD descriptor size!" assert (self.arad_descriptor).shape[2] == size, "ERROR: Check ARAD descriptor size!" def generate_arad_descriptor_periodic(self, size=23, unit_cell=None): if unit_cell is None: unit_cell = self.unit_cell arad_object = ARAD(maxMolSize=size,maxAts=size) self.arad_descriptor = arad_object.describe(np.array(self.coordinates), \ np.array(self.nuclear_charges), cell=unit_cell) assert (self.arad_descriptor).shape[0] == size, "ERROR: Check ARAD descriptor size!" assert (self.arad_descriptor).shape[2] == size, "ERROR: Check ARAD descriptor size!" def generate_aras_descriptor(self, size=23): aras_object = ARAS(maxMolSize=size,maxAts=size) self.aras_descriptor = aras_object.describe(np.array(self.coordinates), \ np.array(self.nuclear_charges)) assert (self.aras_descriptor).shape[0] == size, "ERROR: Check ARAS descriptor size!" assert (self.aras_descriptor).shape[2] == size, "ERROR: Check ARAS descriptor size!" def read_xyz(self, filename): f = open(filename, "r") lines = f.readlines() f.close() self.natoms = int(lines[0]) for line in lines[2:]: tokens = line.split() if len(tokens) < 4: break self.atomtypes.append(tokens[0]) self.nuclear_charges.append(NUCLEAR_CHARGE[tokens[0]]) x = float(tokens[1]) y = float(tokens[2]) z = float(tokens[3]) self.coordinates.append(np.array([x, y, z])) self.coordinates = np.array(self.coordinates) def get_lines(filename): f = open(filename, "r") lines = f.readlines() f.close() return lines def parse_molecules(filename): lines = get_lines(filename) mols = [] mol = Molecule() for line in lines: tokens = line.split() if len(tokens) == 1: if mol.natoms > 0: mols.append(mol) mol = Molecule() mol.natoms = int(tokens[0]) if len(tokens) == 2: mol.molid = int(tokens[0]) mol.energy = float(tokens[1]) mol.dftb3_energy = parse_dft3_energy(mol.molid) if len(tokens) == 7: atom_type = tokens[0] mol.atomtypes.append(atom_type) mol.nuclear_charges.append(NUCLEAR_CHARGE[atom_type]) x = float(tokens[4]) y = float(tokens[5]) z = float(tokens[6]) mol.coordinates.append(np.array([x, y, z])) mol.dftb3_hof = 0.0 mol.dftb3_hof += mol.dftb3_energy for atom in ["H", "C", "N", "O", "S"]: n = mol.atomtypes.count(atom) mol.dftb3_hof -= n * HOF_DFTB3[atom] # for mol in mols: # print mol.molid, mol.energy, mol.dftb3_hof return mols def parse_dft3_energy(molid): filename = "../logfiles/" + str(molid) + ".log" f = open(filename, "r") lines = f.readlines() f.close() energy = float("nan") for line in lines: if "Total Energy" in line: tokens = line.split() energy = float(tokens[2]) * 627.51 return energy
# Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unittest for Ipset rendering module.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import unittest from lib import ipset from lib import nacaddr from lib import naming from lib import policy import mock GOOD_HEADER_1 = """ header { comment:: "this is a test acl" target:: ipset OUTPUT DROP } """ GOOD_TERM_1 = """ term good-term-1 { source-address:: INTERNAL action:: accept } """ GOOD_TERM_2 = """ term good-term-2 { destination-address:: EXTERNAL action:: accept } """ GOOD_TERM_3 = """ term good-term-3 { source-address:: INTERNAL destination-address:: EXTERNAL action:: accept } """ GOOD_TERM_4 = """ term good-term-4 { source-address:: INTERNAL destination-address:: EXTERNAL policer:: batman action:: accept } """ SUPPORTED_TOKENS = { 'action', 'comment', 'counter', 'destination_address', 'destination_address_exclude', 'destination_interface', 'destination_port', 'destination_prefix', 'expiration', 'fragment_offset', 'icmp_type', 'logging', 'name', 'option', 'owner', 'packet_length', 'platform', 'platform_exclude', 'protocol', 'routing_instance', 'source_address', 'source_address_exclude', 'source_interface', 'source_port', 'source_prefix', 'translated', 'verbatim', } SUPPORTED_SUB_TOKENS = { 'action': {'accept', 'deny', 'reject', 'next', 'reject-with-tcp-rst'}, 'icmp_type': { 'alternate-address', 'certification-path-advertisement', 'certification-path-solicitation', 'conversion-error', 'destination-unreachable', 'echo-reply', 'echo-request', 'mobile-redirect', 'home-agent-address-discovery-reply', 'home-agent-address-discovery-request', 'icmp-node-information-query', 'icmp-node-information-response', 'information-request', 'inverse-neighbor-discovery-advertisement', 'inverse-neighbor-discovery-solicitation', 'mask-reply', 'mask-request', 'information-reply', 'mobile-prefix-advertisement', 'mobile-prefix-solicitation', 'multicast-listener-done', 'multicast-listener-query', 'multicast-listener-report', 'multicast-router-advertisement', 'multicast-router-solicitation', 'multicast-router-termination', 'neighbor-advertisement', 'neighbor-solicit', 'packet-too-big', 'parameter-problem', 'redirect', 'redirect-message', 'router-advertisement', 'router-renumbering', 'router-solicit', 'router-solicitation', 'source-quench', 'time-exceeded', 'timestamp-reply', 'timestamp-request', 'unreachable', 'version-2-multicast-listener-report', }, 'option': {'established', 'first-fragment', 'initial', 'sample', 'tcp-established', 'tcp-initial', 'syn', 'ack', 'fin', 'rst', 'urg', 'psh', 'all', 'none'} } # Print a info message when a term is set to expire in that many weeks. # This is normally passed from command line. EXP_INFO = 2 class IpsetTest(unittest.TestCase): def setUp(self): self.naming = mock.create_autospec(naming.Naming) def testMarkers(self): self.naming.GetNetAddr.return_value = [nacaddr.IPv4('10.0.0.0/8')] acl = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_1, self.naming), EXP_INFO) result = str(acl) self.assertIn('# begin:ipset-rules', result) self.assertIn('# end:ipset-rules', result) self.naming.GetNetAddr.assert_called_once_with('INTERNAL') def testGenerateSetName(self): # iptables superclass currently limits term name length to 26 characters, # but that could change policy_term = mock.MagicMock() policy_term.name = 'filter_name' policy_term.protocol = ['tcp'] term = ipset.Term(policy_term, 'filter_name', False, None) self.assertEqual(term._GenerateSetName('good-term-1', 'src'), 'good-term-1-src') self.assertEqual(term._GenerateSetName('good-but-way-too-long-term-name', 'src'), 'good-but-way-too-long-term--src') term = ipset.Term(policy_term, 'filter_name', False, None, 'inet6') self.assertEqual(term._GenerateSetName('good-term-1', 'src'), 'good-term-1-src-v6') self.assertEqual(term._GenerateSetName('good-but-way-too-long-term-name', 'src'), 'good-but-way-too-long-te-src-v6') def testOneSourceAddress(self): self.naming.GetNetAddr.return_value = [nacaddr.IPv4('10.0.0.0/8')] acl = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_1, self.naming), EXP_INFO) result = str(acl) self.assertIn('-s 10.0.0.0/8', result) self.assertNotIn('-m set --match-set good-term-3-src src', result) self.naming.GetNetAddr.assert_called_once_with('INTERNAL') def testOneDestinationAddress(self): self.naming.GetNetAddr.return_value = [nacaddr.IPv4('172.16.0.0/12')] acl = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_2, self.naming), EXP_INFO) result = str(acl) self.assertIn('-d 172.16.0.0/12', result) self.assertNotIn('-m set --match-set good-term-2-dst dst', result) self.naming.GetNetAddr.assert_called_once_with('EXTERNAL') def testOneSourceAndDestinationAddress(self): self.naming.GetNetAddr.side_effect = [ [nacaddr.IPv4('10.0.0.0/8')], [nacaddr.IPv4('172.16.0.0/12')]] acl = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_3, self.naming), EXP_INFO) result = str(acl) self.assertIn('-s 10.0.0.0/8', result) self.assertIn('-d 172.16.0.0/12', result) self.assertNotIn('-m set --match-set good-term-3-src src', result) self.assertNotIn('-m set --match-set good-term-3-dst dst', result) self.naming.GetNetAddr.assert_has_calls([ mock.call('INTERNAL'), mock.call('EXTERNAL')]) def testManySourceAddresses(self): self.naming.GetNetAddr.return_value = [ nacaddr.IPv4('10.0.0.0/24'), nacaddr.IPv4('10.1.0.0/24')] acl = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_1, self.naming), EXP_INFO) result = str(acl) self.assertIn('create good-term-1-src hash:net family inet hashsize' ' 4 maxelem 4', result) self.assertIn('add good-term-1-src 10.0.0.0/24', result) self.assertIn('add good-term-1-src 10.1.0.0/24', result) self.assertIn('-m set --match-set good-term-1-src src', result) self.assertNotIn('-s ', result) self.naming.GetNetAddr.assert_called_once_with('INTERNAL') def testManyDestinationAddresses(self): self.naming.GetNetAddr.return_value = [ nacaddr.IPv4('172.16.0.0/24'), nacaddr.IPv4('172.17.0.0/24')] acl = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_2, self.naming), EXP_INFO) result = str(acl) self.assertIn('create good-term-2-dst hash:net family inet hashsize ' '4 maxelem 4', result) self.assertIn('add good-term-2-dst 172.16.0.0/24', result) self.assertIn('add good-term-2-dst 172.17.0.0/24', result) self.assertIn('-m set --match-set good-term-2-dst dst', result) self.assertNotIn('-s ', result) self.naming.GetNetAddr.assert_called_once_with('EXTERNAL') def testManySourceAndDestinationAddresses(self): self.naming.GetNetAddr.side_effect = [ [nacaddr.IPv4('10.0.0.0/24'), nacaddr.IPv4('10.1.0.0/24')], [nacaddr.IPv4('172.16.0.0/24'), nacaddr.IPv4('172.17.0.0/24')]] acl = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_3, self.naming), EXP_INFO) result = str(acl) self.assertIn('create good-term-3-src hash:net family inet hashsize ' '4 maxelem 4', result) self.assertIn('create good-term-3-dst hash:net family inet hashsize ' '4 maxelem 4', result) self.assertIn('add good-term-3-src 10.0.0.0/24', result) self.assertIn('add good-term-3-src 10.1.0.0/24', result) self.assertIn('add good-term-3-dst 172.16.0.0/24', result) self.assertIn('add good-term-3-dst 172.17.0.0/24', result) self.assertIn('-m set --match-set good-term-3-src src', result) self.assertIn('-m set --match-set good-term-3-dst dst', result) self.assertNotIn('-s ', result) self.assertNotIn('-d ', result) self.naming.GetNetAddr.assert_has_calls([ mock.call('INTERNAL'), mock.call('EXTERNAL')]) def testBuildTokens(self): pol1 = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_3, self.naming), EXP_INFO) st, sst = pol1._BuildTokens() self.assertEquals(st, SUPPORTED_TOKENS) self.assertEquals(sst, SUPPORTED_SUB_TOKENS) def testBuildWarningTokens(self): pol1 = ipset.Ipset(policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_4, self.naming), EXP_INFO) st, sst = pol1._BuildTokens() self.assertEquals(st, SUPPORTED_TOKENS) self.assertEquals(sst, SUPPORTED_SUB_TOKENS) if __name__ == '__main__': unittest.main()
#!/usr/bin/env python # Written by Bram Cohen # modified for multitracker by John Hoffman # see LICENSE.txt for license information from BitTornado import PSYCO if PSYCO.psyco: try: import psyco assert psyco.__version__ >= 0x010100f0 psyco.full() except: pass from sys import argv, version from BitTornado.BT1.makemetafile import make_meta_file, completedir from threading import Event, Thread from BitTornado.bencode import bdecode import sys from os import getcwd from os.path import join, isdir try: from wxPython.wx import * except: print 'wxPython is either not installed or has not been installed properly.' sys.exit(1) try: True except: True = 1 False = 0 wxEVT_INVOKE = wxNewEventType() def EVT_INVOKE(win, func): win.Connect(-1, -1, wxEVT_INVOKE, func) class InvokeEvent(wxPyEvent): def __init__(self, func, args, kwargs): wxPyEvent.__init__(self) self.SetEventType(wxEVT_INVOKE) self.func = func self.args = args self.kwargs = kwargs class DownloadInfo: def __init__(self): frame = wxFrame(None, -1, 'BitTorrent Torrent File Maker', size = wxSize(550, 410)) self.frame = frame panel = wxPanel(frame, -1) gridSizer = wxFlexGridSizer(cols = 2, rows = 2, vgap = 0, hgap = 8) gridSizer.Add(wxStaticText(panel, -1, 'make torrent of:')) b = wxBoxSizer(wxHORIZONTAL) self.dirCtl = wxTextCtrl(panel, -1, '') b.Add(self.dirCtl, 1, wxEXPAND) # b.Add(10, 10, 0, wxEXPAND) button = wxButton(panel, -1, 'dir', size = (30,20)) EVT_BUTTON(frame, button.GetId(), self.selectdir) b.Add(button, 0) button2 = wxButton(panel, -1, 'file', size = (30,20)) EVT_BUTTON(frame, button2.GetId(), self.selectfile) b.Add(button2, 0) gridSizer.Add(b, 0, wxEXPAND) gridSizer.Add(wxStaticText(panel, -1, '')) gridSizer.Add(wxStaticText(panel, -1, '')) gridSizer.Add(wxStaticText(panel, -1, 'announce url:')) self.annCtl = wxTextCtrl(panel, -1, 'http://my.tracker:6969/announce') gridSizer.Add(self.annCtl, 0, wxEXPAND) gridSizer.Add(wxStaticText(panel, -1, '')) gridSizer.Add(wxStaticText(panel, -1, '')) a = wxFlexGridSizer(cols = 1) a.Add(wxStaticText(panel, -1, 'announce list:')) a.Add(wxStaticText(panel, -1, '')) abutton = wxButton(panel, -1, 'copy\nannounces\nfrom\ntorrent', size = (50,70)) EVT_BUTTON(frame, abutton.GetId(), self.announcecopy) a.Add(abutton, 0, wxEXPAND) gridSizer.Add(a, 0, wxEXPAND) self.annListCtl = wxTextCtrl(panel, -1, '\n\n\n\n\n', wxPoint(-1,-1), (400,120), wxTE_MULTILINE|wxHSCROLL|wxTE_DONTWRAP) gridSizer.Add(self.annListCtl, -1, wxEXPAND) gridSizer.Add(wxStaticText(panel, -1, '')) exptext = wxStaticText(panel, -1, "a list of announces separated by commas " + "or whitespace and on several lines -\n" + "trackers on the same line will be tried randomly," + "and all the trackers on one line\n" + "will be tried before the trackers on the next line.") exptext.SetFont(wxFont(6, wxDEFAULT, wxNORMAL, wxNORMAL, False)) gridSizer.Add(exptext) gridSizer.Add(wxStaticText(panel, -1, '')) gridSizer.Add(wxStaticText(panel, -1, '')) gridSizer.Add(wxStaticText(panel, -1, 'piece size:')) self.piece_length = wxChoice(panel, -1, choices = ['automatic', '2MiB', '1MiB', '512KiB', '256KiB', '128KiB', '64KiB', '32KiB']) self.piece_length_list = [0, 21, 20, 19, 18, 17, 16, 15] self.piece_length.SetSelection(0) gridSizer.Add(self.piece_length) gridSizer.Add(wxStaticText(panel, -1, '')) gridSizer.Add(wxStaticText(panel, -1, '')) gridSizer.Add(wxStaticText(panel, -1, 'comment:')) self.commentCtl = wxTextCtrl(panel, -1, '') gridSizer.Add(self.commentCtl, 0, wxEXPAND) gridSizer.AddGrowableCol(1) border = wxBoxSizer(wxVERTICAL) border.Add(gridSizer, 0, wxEXPAND | wxNORTH | wxEAST | wxWEST, 25) b2 = wxButton(panel, -1, 'make') # border.Add(10, 10, 1, wxEXPAND) border.Add(b2, 0, wxALIGN_CENTER | wxSOUTH, 20) EVT_BUTTON(frame, b2.GetId(), self.complete) panel.SetSizer(border) panel.SetAutoLayout(True) # panel.DragAcceptFiles(True) # EVT_DROP_FILES(panel, self.selectdrop) def selectdir(self, x): dl = wxDirDialog(self.frame, style = wxDD_DEFAULT_STYLE | wxDD_NEW_DIR_BUTTON) if dl.ShowModal() == wxID_OK: self.dirCtl.SetValue(dl.GetPath()) def selectfile(self, x): dl = wxFileDialog (self.frame, 'Choose file or directory to use', '', '', '', wxOPEN) if dl.ShowModal() == wxID_OK: self.dirCtl.SetValue(dl.GetPath()) def selectdrop(self, x): print x list = x.m_files self.dirCtl.SetValue(x[0]) def announcecopy(self, x): dl = wxFileDialog (self.frame, 'Choose .torrent file to use', '', '', '*.torrent', wxOPEN) if dl.ShowModal() == wxID_OK: try: h = open(dl.GetPath(), 'rb') metainfo = bdecode(h.read()) h.close() self.annCtl.SetValue(metainfo['announce']) if metainfo.has_key('announce-list'): list = [] for tier in metainfo['announce-list']: for tracker in tier: list += [tracker, ', '] del list[-1] list += ['\n'] liststring = '' for i in list: liststring += i self.annListCtl.SetValue(liststring+'\n\n') else: self.annListCtl.SetValue('') except: return def getannouncelist(self): list = [] for t in self.annListCtl.GetValue().split('\n'): tier = [] t = t.replace(',',' ') for tr in t.split(' '): if tr != '': tier += [tr] if len(tier)>0: list.append(tier) return list def complete(self, x): if self.dirCtl.GetValue() == '': dlg = wxMessageDialog(self.frame, message = 'You must select a\n file or directory', caption = 'Error', style = wxOK | wxICON_ERROR) dlg.ShowModal() dlg.Destroy() return params = {'piece_size_pow2': self.piece_length_list[self.piece_length.GetSelection()]} annlist = self.getannouncelist() if len(annlist)>0: params['real_announce_list'] = annlist comment = self.commentCtl.GetValue() if comment != '': params['comment'] = comment try: CompleteDir(self.dirCtl.GetValue(), self.annCtl.GetValue(), params) except: print_exc() from traceback import print_exc class CompleteDir: def __init__(self, d, a, params): self.d = d self.a = a self.params = params self.flag = Event() self.separatetorrents = False if isdir(d): self.choicemade = Event() frame = wxFrame(None, -1, 'BitTorrent make torrent', size = (1,1)) self.frame = frame panel = wxPanel(frame, -1) gridSizer = wxFlexGridSizer(cols = 1, vgap = 8, hgap = 8) gridSizer.AddGrowableRow(1) gridSizer.Add(wxStaticText(panel, -1, 'Do you want to make a separate .torrent'),0,wxALIGN_CENTER) gridSizer.Add(wxStaticText(panel, -1, 'for every item in this directory?'),0,wxALIGN_CENTER) gridSizer.Add(wxStaticText(panel, -1, '')) b = wxFlexGridSizer(cols = 3, hgap = 10) yesbut = wxButton(panel, -1, 'Yes') def saidyes(e, self = self): self.frame.Destroy() self.separatetorrents = True self.begin() EVT_BUTTON(frame, yesbut.GetId(), saidyes) b.Add(yesbut, 0) nobut = wxButton(panel, -1, 'No') def saidno(e, self = self): self.frame.Destroy() self.begin() EVT_BUTTON(frame, nobut.GetId(), saidno) b.Add(nobut, 0) cancelbut = wxButton(panel, -1, 'Cancel') def canceled(e, self = self): self.frame.Destroy() EVT_BUTTON(frame, cancelbut.GetId(), canceled) b.Add(cancelbut, 0) gridSizer.Add(b, 0, wxALIGN_CENTER) border = wxBoxSizer(wxHORIZONTAL) border.Add(gridSizer, 1, wxEXPAND | wxALL, 4) panel.SetSizer(border) panel.SetAutoLayout(True) frame.Show() border.Fit(panel) frame.Fit() else: self.begin() def begin(self): if self.separatetorrents: frame = wxFrame(None, -1, 'BitTorrent make directory', size = wxSize(550, 250)) else: frame = wxFrame(None, -1, 'BitTorrent make torrent', size = wxSize(550, 250)) self.frame = frame panel = wxPanel(frame, -1) gridSizer = wxFlexGridSizer(cols = 1, vgap = 15, hgap = 8) if self.separatetorrents: self.currentLabel = wxStaticText(panel, -1, 'checking file sizes') else: self.currentLabel = wxStaticText(panel, -1, 'building ' + self.d + '.torrent') gridSizer.Add(self.currentLabel, 0, wxEXPAND) self.gauge = wxGauge(panel, -1, range = 1000, style = wxGA_SMOOTH) gridSizer.Add(self.gauge, 0, wxEXPAND) gridSizer.Add((10, 10), 1, wxEXPAND) self.button = wxButton(panel, -1, 'cancel') gridSizer.Add(self.button, 0, wxALIGN_CENTER) gridSizer.AddGrowableRow(2) gridSizer.AddGrowableCol(0) g2 = wxFlexGridSizer(cols = 1, vgap = 15, hgap = 8) g2.Add(gridSizer, 1, wxEXPAND | wxALL, 25) g2.AddGrowableRow(0) g2.AddGrowableCol(0) panel.SetSizer(g2) panel.SetAutoLayout(True) EVT_BUTTON(frame, self.button.GetId(), self.done) EVT_CLOSE(frame, self.done) EVT_INVOKE(frame, self.onInvoke) frame.Show(True) Thread(target = self.complete).start() def complete(self): try: if self.separatetorrents: completedir(self.d, self.a, self.params, self.flag, self.valcallback, self.filecallback) else: make_meta_file(self.d, self.a, self.params, self.flag, self.valcallback, progress_percent = 1) if not self.flag.isSet(): self.currentLabel.SetLabel('Done!') self.gauge.SetValue(1000) self.button.SetLabel('Close') self.frame.Refresh() except (OSError, IOError), e: self.currentLabel.SetLabel('Error!') self.button.SetLabel('Close') dlg = wxMessageDialog(self.frame, message = 'Error - ' + str(e), caption = 'Error', style = wxOK | wxICON_ERROR) dlg.ShowModal() dlg.Destroy() def valcallback(self, amount): self.invokeLater(self.onval, [amount]) def onval(self, amount): self.gauge.SetValue(int(amount * 1000)) def filecallback(self, f): self.invokeLater(self.onfile, [f]) def onfile(self, f): self.currentLabel.SetLabel('building ' + join(self.d, f) + '.torrent') def onInvoke(self, event): if not self.flag.isSet(): apply(event.func, event.args, event.kwargs) def invokeLater(self, func, args = [], kwargs = {}): if not self.flag.isSet(): wxPostEvent(self.frame, InvokeEvent(func, args, kwargs)) def done(self, event): self.flag.set() self.frame.Destroy() class btWxApp(wxApp): def OnInit(self): d = DownloadInfo() d.frame.Show(True) self.SetTopWindow(d.frame) return True if __name__ == '__main__': btWxApp().MainLoop()
# -*- coding: utf-8 -*- from south.db import db from south.v2 import SchemaMigration class Migration(SchemaMigration): def forwards(self, orm): # Rename 'user' field to 'subscriber' db.rename_column('djstripe_customer', 'user_id', 'subscriber_id') def backwards(self, orm): # Rename 'subscriber' field to 'user' db.rename_column('djstripe_customer', 'subscriber_id', 'user_id') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'djstripe.charge': { 'Meta': {'object_name': 'Charge'}, 'amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '7', 'decimal_places': '2'}), 'amount_refunded': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '7', 'decimal_places': '2'}), 'card_kind': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}), 'card_last_4': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}), 'charge_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'customer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'charges'", 'to': u"orm['djstripe.Customer']"}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'disputed': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'fee': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '7', 'decimal_places': '2'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'invoice': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'charges'", 'null': 'True', 'to': u"orm['djstripe.Invoice']"}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'paid': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'receipt_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'refunded': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'stripe_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}) }, u'djstripe.currentsubscription': { 'Meta': {'object_name': 'CurrentSubscription'}, 'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'cancel_at_period_end': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'canceled_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'current_period_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'current_period_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'customer': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'current_subscription'", 'unique': 'True', 'null': 'True', 'to': u"orm['djstripe.Customer']"}), 'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'plan': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'quantity': ('django.db.models.fields.IntegerField', [], {}), 'start': ('django.db.models.fields.DateTimeField', [], {}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '25'}), 'trial_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'trial_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}) }, u'djstripe.customer': { 'Meta': {'object_name': 'Customer'}, 'card_fingerprint': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}), 'card_kind': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}), 'card_last_4': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'date_purged': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'stripe_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}), 'subscriber': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'null': 'True'}) }, u'djstripe.event': { 'Meta': {'object_name': 'Event'}, 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['djstripe.Customer']", 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'kind': ('django.db.models.fields.CharField', [], {'max_length': '250'}), 'livemode': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'stripe_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}), 'valid': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'validated_message': ('jsonfield.fields.JSONField', [], {'null': 'True'}), 'webhook_message': ('jsonfield.fields.JSONField', [], {}) }, u'djstripe.eventprocessingexception': { 'Meta': {'object_name': 'EventProcessingException'}, 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'data': ('django.db.models.fields.TextField', [], {}), 'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['djstripe.Event']", 'null': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.CharField', [], {'max_length': '500'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'traceback': ('django.db.models.fields.TextField', [], {}) }, u'djstripe.invoice': { 'Meta': {'ordering': "[u'-date']", 'object_name': 'Invoice'}, 'attempted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'attempts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}), 'charge': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}), 'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'customer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'invoices'", 'to': u"orm['djstripe.Customer']"}), 'date': ('django.db.models.fields.DateTimeField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'paid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'period_end': ('django.db.models.fields.DateTimeField', [], {}), 'period_start': ('django.db.models.fields.DateTimeField', [], {}), 'stripe_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}), 'subtotal': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'total': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}) }, u'djstripe.invoiceitem': { 'Meta': {'object_name': 'InvoiceItem'}, 'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'currency': ('django.db.models.fields.CharField', [], {'max_length': '10'}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'invoice': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'items'", 'to': u"orm['djstripe.Invoice']"}), 'line_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'period_end': ('django.db.models.fields.DateTimeField', [], {}), 'period_start': ('django.db.models.fields.DateTimeField', [], {}), 'plan': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'proration': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'quantity': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'stripe_id': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'djstripe.plan': { 'Meta': {'object_name': 'Plan'}, 'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'currency': ('django.db.models.fields.CharField', [], {'max_length': '10'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'interval': ('django.db.models.fields.CharField', [], {'max_length': '10'}), 'interval_count': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'stripe_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}), 'trial_period_days': ('django.db.models.fields.IntegerField', [], {'null': 'True'}) }, u'djstripe.transfer': { 'Meta': {'object_name': 'Transfer'}, 'adjustment_count': ('django.db.models.fields.IntegerField', [], {}), 'adjustment_fees': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'adjustment_gross': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'charge_count': ('django.db.models.fields.IntegerField', [], {}), 'charge_fees': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'charge_gross': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'collected_fee_count': ('django.db.models.fields.IntegerField', [], {}), 'collected_fee_gross': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'date': ('django.db.models.fields.DateTimeField', [], {}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'transfers'", 'to': u"orm['djstripe.Event']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'net': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'refund_count': ('django.db.models.fields.IntegerField', [], {}), 'refund_fees': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'refund_gross': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '25'}), 'stripe_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}), 'validation_count': ('django.db.models.fields.IntegerField', [], {}), 'validation_fees': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}) }, u'djstripe.transferchargefee': { 'Meta': {'object_name': 'TransferChargeFee'}, 'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '7', 'decimal_places': '2'}), 'application': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'kind': ('django.db.models.fields.CharField', [], {'max_length': '150'}), 'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}), 'transfer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'charge_fee_details'", 'to': u"orm['djstripe.Transfer']"}) } } complete_apps = ['djstripe']
import datetime from email.headerregistry import Address from typing import Any, Dict, Iterable, List, Mapping, Optional, TypeVar, Union from unittest import mock import ujson from django.conf import settings from django.core.exceptions import ValidationError from django.test import override_settings from zerver.lib.actions import ( create_users, do_change_user_role, do_create_user, do_deactivate_user, do_reactivate_user, do_set_realm_property, get_emails_from_user_ids, get_recipient_info, ) from zerver.lib.avatar import avatar_url, get_gravatar_url from zerver.lib.create_user import copy_user_settings from zerver.lib.events import do_events_register from zerver.lib.exceptions import JsonableError from zerver.lib.send_email import clear_scheduled_emails, deliver_email, send_future_email from zerver.lib.stream_topic import StreamTopicTarget from zerver.lib.test_classes import ZulipTestCase from zerver.lib.test_helpers import ( get_subscription, queries_captured, reset_emails_in_zulip_realm, simulated_empty_cache, tornado_redirected_to_list, ) from zerver.lib.topic_mutes import add_topic_mute from zerver.lib.users import access_user_by_id, get_accounts_for_email, user_ids_to_users from zerver.models import ( CustomProfileField, InvalidFakeEmailDomain, Realm, RealmDomain, Recipient, ScheduledEmail, UserHotspot, UserProfile, check_valid_user_ids, get_client, get_fake_email_domain, get_realm, get_source_profile, get_stream, get_system_bot, get_user, get_user_by_delivery_email, get_user_by_id_in_realm_including_cross_realm, ) K = TypeVar('K') V = TypeVar('V') def find_dict(lst: Iterable[Dict[K, V]], k: K, v: V) -> Dict[K, V]: for dct in lst: if dct[k] == v: return dct raise AssertionError(f'Cannot find element in list where key {k} == {v}') class PermissionTest(ZulipTestCase): def test_role_setters(self) -> None: user_profile = self.example_user('hamlet') user_profile.is_realm_admin = True self.assertEqual(user_profile.is_realm_admin, True) self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR) user_profile.is_guest = False self.assertEqual(user_profile.is_guest, False) self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR) user_profile.is_realm_admin = False self.assertEqual(user_profile.is_realm_admin, False) self.assertEqual(user_profile.role, UserProfile.ROLE_MEMBER) user_profile.is_guest = True self.assertEqual(user_profile.is_guest, True) self.assertEqual(user_profile.role, UserProfile.ROLE_GUEST) user_profile.is_realm_admin = False self.assertEqual(user_profile.is_guest, True) self.assertEqual(user_profile.role, UserProfile.ROLE_GUEST) user_profile.is_guest = False self.assertEqual(user_profile.is_guest, False) self.assertEqual(user_profile.role, UserProfile.ROLE_MEMBER) def test_get_admin_users(self) -> None: user_profile = self.example_user('hamlet') do_change_user_role(user_profile, UserProfile.ROLE_MEMBER) self.assertFalse(user_profile.is_realm_owner) admin_users = user_profile.realm.get_human_admin_users() self.assertFalse(user_profile in admin_users) admin_users = user_profile.realm.get_admin_users_and_bots() self.assertFalse(user_profile in admin_users) do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR) self.assertFalse(user_profile.is_realm_owner) admin_users = user_profile.realm.get_human_admin_users() self.assertTrue(user_profile in admin_users) admin_users = user_profile.realm.get_admin_users_and_bots() self.assertTrue(user_profile in admin_users) do_change_user_role(user_profile, UserProfile.ROLE_REALM_OWNER) self.assertTrue(user_profile.is_realm_owner) admin_users = user_profile.realm.get_human_admin_users() self.assertTrue(user_profile in admin_users) admin_users = user_profile.realm.get_admin_users_and_bots() self.assertTrue(user_profile in admin_users) def test_updating_non_existent_user(self) -> None: self.login('hamlet') admin = self.example_user('hamlet') do_change_user_role(admin, UserProfile.ROLE_REALM_ADMINISTRATOR) invalid_user_id = 1000 result = self.client_patch(f'/json/users/{invalid_user_id}', {}) self.assert_json_error(result, 'No such user') def test_owner_api(self) -> None: self.login('iago') desdemona = self.example_user('desdemona') othello = self.example_user('othello') iago = self.example_user('iago') realm = iago.realm do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER) result = self.client_get('/json/users') self.assert_json_success(result) members = result.json()['members'] iago_dict = find_dict(members, 'email', iago.email) self.assertTrue(iago_dict['is_owner']) othello_dict = find_dict(members, 'email', othello.email) self.assertFalse(othello_dict['is_owner']) req = dict(role=UserProfile.ROLE_REALM_OWNER) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{othello.id}', req) self.assert_json_success(result) owner_users = realm.get_human_owner_users() self.assertTrue(othello in owner_users) person = events[0]['event']['person'] self.assertEqual(person['user_id'], othello.id) self.assertEqual(person['role'], UserProfile.ROLE_REALM_OWNER) req = dict(role=UserProfile.ROLE_MEMBER) events = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{othello.id}', req) self.assert_json_success(result) owner_users = realm.get_human_owner_users() self.assertFalse(othello in owner_users) person = events[0]['event']['person'] self.assertEqual(person['user_id'], othello.id) self.assertEqual(person['role'], UserProfile.ROLE_MEMBER) # Cannot take away from last owner self.login('desdemona') req = dict(role=UserProfile.ROLE_MEMBER) events = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{iago.id}', req) self.assert_json_success(result) owner_users = realm.get_human_owner_users() self.assertFalse(iago in owner_users) person = events[0]['event']['person'] self.assertEqual(person['user_id'], iago.id) self.assertEqual(person['role'], UserProfile.ROLE_MEMBER) with tornado_redirected_to_list([]): result = self.client_patch(f'/json/users/{desdemona.id}', req) self.assert_json_error(result, 'The owner permission cannot be removed from the only organization owner.') do_change_user_role(iago, UserProfile.ROLE_REALM_ADMINISTRATOR) self.login('iago') with tornado_redirected_to_list([]): result = self.client_patch(f'/json/users/{desdemona.id}', req) self.assert_json_error(result, 'Must be an organization owner') def test_admin_api(self) -> None: self.login('desdemona') hamlet = self.example_user('hamlet') othello = self.example_user('othello') desdemona = self.example_user('desdemona') realm = hamlet.realm # Make sure we see is_admin flag in /json/users result = self.client_get('/json/users') self.assert_json_success(result) members = result.json()['members'] desdemona_dict = find_dict(members, 'email', desdemona.email) self.assertTrue(desdemona_dict['is_admin']) othello_dict = find_dict(members, 'email', othello.email) self.assertFalse(othello_dict['is_admin']) # Giveth req = dict(role=ujson.dumps(UserProfile.ROLE_REALM_ADMINISTRATOR)) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{othello.id}', req) self.assert_json_success(result) admin_users = realm.get_human_admin_users() self.assertTrue(othello in admin_users) person = events[0]['event']['person'] self.assertEqual(person['user_id'], othello.id) self.assertEqual(person['role'], UserProfile.ROLE_REALM_ADMINISTRATOR) # Taketh away req = dict(role=ujson.dumps(UserProfile.ROLE_MEMBER)) events = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{othello.id}', req) self.assert_json_success(result) admin_users = realm.get_human_admin_users() self.assertFalse(othello in admin_users) person = events[0]['event']['person'] self.assertEqual(person['user_id'], othello.id) self.assertEqual(person['role'], UserProfile.ROLE_MEMBER) # Make sure only admins can patch other user's info. self.login('othello') result = self.client_patch(f'/json/users/{hamlet.id}', req) self.assert_json_error(result, 'Insufficient permission') def test_admin_api_hide_emails(self) -> None: reset_emails_in_zulip_realm() user = self.example_user('hamlet') admin = self.example_user('iago') self.login_user(user) # First, verify client_gravatar works normally result = self.client_get('/json/users?client_gravatar=true') self.assert_json_success(result) members = result.json()['members'] hamlet = find_dict(members, 'user_id', user.id) self.assertEqual(hamlet['email'], user.email) self.assertIsNone(hamlet['avatar_url']) self.assertNotIn('delivery_email', hamlet) # Also verify the /events code path. This is a bit hacky, but # we need to verify client_gravatar is not being overridden. with mock.patch('zerver.lib.events.request_event_queue', return_value=None) as mock_request_event_queue: with self.assertRaises(JsonableError): result = do_events_register(user, get_client("website"), client_gravatar=True) self.assertEqual(mock_request_event_queue.call_args_list[0][0][3], True) ############################################################# # Now, switch email address visibility, check client_gravatar # is automatically disabled for the user. do_set_realm_property(user.realm, "email_address_visibility", Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS) result = self.client_get('/json/users?client_gravatar=true') self.assert_json_success(result) members = result.json()['members'] hamlet = find_dict(members, 'user_id', user.id) self.assertEqual(hamlet['email'], f"user{user.id}@zulip.testserver") # Note that the Gravatar URL should still be computed from the # `delivery_email`; otherwise, we won't be able to serve the # user's Gravatar. self.assertEqual(hamlet['avatar_url'], get_gravatar_url(user.delivery_email, 1)) self.assertNotIn('delivery_email', hamlet) # Also verify the /events code path. This is a bit hacky, but # basically we want to verify client_gravatar is being # overridden. with mock.patch('zerver.lib.events.request_event_queue', return_value=None) as mock_request_event_queue: with self.assertRaises(JsonableError): result = do_events_register(user, get_client("website"), client_gravatar=True) self.assertEqual(mock_request_event_queue.call_args_list[0][0][3], False) # client_gravatar is still turned off for admins. In theory, # it doesn't need to be, but client-side changes would be # required in apps like the mobile apps. # delivery_email is sent for admins. admin.refresh_from_db() self.login_user(admin) result = self.client_get('/json/users?client_gravatar=true') self.assert_json_success(result) members = result.json()['members'] hamlet = find_dict(members, 'user_id', user.id) self.assertEqual(hamlet['email'], f"user{user.id}@zulip.testserver") self.assertEqual(hamlet['avatar_url'], get_gravatar_url(user.email, 1)) self.assertEqual(hamlet['delivery_email'], self.example_email("hamlet")) def test_user_cannot_promote_to_admin(self) -> None: self.login('hamlet') req = dict(role=ujson.dumps(UserProfile.ROLE_REALM_ADMINISTRATOR)) result = self.client_patch('/json/users/{}'.format(self.example_user('hamlet').id), req) self.assert_json_error(result, 'Insufficient permission') def test_admin_user_can_change_full_name(self) -> None: new_name = 'new name' self.login('iago') hamlet = self.example_user('hamlet') req = dict(full_name=ujson.dumps(new_name)) result = self.client_patch(f'/json/users/{hamlet.id}', req) self.assert_json_success(result) hamlet = self.example_user('hamlet') self.assertEqual(hamlet.full_name, new_name) def test_non_admin_cannot_change_full_name(self) -> None: self.login('hamlet') req = dict(full_name=ujson.dumps('new name')) result = self.client_patch('/json/users/{}'.format(self.example_user('othello').id), req) self.assert_json_error(result, 'Insufficient permission') def test_admin_cannot_set_long_full_name(self) -> None: new_name = 'a' * (UserProfile.MAX_NAME_LENGTH + 1) self.login('iago') req = dict(full_name=ujson.dumps(new_name)) result = self.client_patch('/json/users/{}'.format(self.example_user('hamlet').id), req) self.assert_json_error(result, 'Name too long!') def test_admin_cannot_set_short_full_name(self) -> None: new_name = 'a' self.login('iago') req = dict(full_name=ujson.dumps(new_name)) result = self.client_patch('/json/users/{}'.format(self.example_user('hamlet').id), req) self.assert_json_error(result, 'Name too short!') def test_not_allowed_format(self) -> None: # Name of format "Alice|999" breaks in markdown new_name = 'iago|72' self.login('iago') req = dict(full_name=ujson.dumps(new_name)) result = self.client_patch('/json/users/{}'.format(self.example_user('hamlet').id), req) self.assert_json_error(result, 'Invalid format!') def test_allowed_format_complex(self) -> None: # Adding characters after r'|d+' doesn't break markdown new_name = 'Hello- 12iago|72k' self.login('iago') req = dict(full_name=ujson.dumps(new_name)) result = self.client_patch('/json/users/{}'.format(self.example_user('hamlet').id), req) self.assert_json_success(result) def test_not_allowed_format_complex(self) -> None: new_name = 'Hello- 12iago|72' self.login('iago') req = dict(full_name=ujson.dumps(new_name)) result = self.client_patch('/json/users/{}'.format(self.example_user('hamlet').id), req) self.assert_json_error(result, 'Invalid format!') def test_admin_cannot_set_full_name_with_invalid_characters(self) -> None: new_name = 'Opheli*' self.login('iago') req = dict(full_name=ujson.dumps(new_name)) result = self.client_patch('/json/users/{}'.format(self.example_user('hamlet').id), req) self.assert_json_error(result, 'Invalid characters in name!') def test_access_user_by_id(self) -> None: iago = self.example_user("iago") # Must be a valid user ID in the realm with self.assertRaises(JsonableError): access_user_by_id(iago, 1234) with self.assertRaises(JsonableError): access_user_by_id(iago, self.mit_user("sipbtest").id) # Can only access bot users if allow_deactivated is passed bot = self.example_user("default_bot") access_user_by_id(iago, bot.id, allow_bots=True) with self.assertRaises(JsonableError): access_user_by_id(iago, bot.id) # Can only access deactivated users if allow_deactivated is passed hamlet = self.example_user("hamlet") do_deactivate_user(hamlet) with self.assertRaises(JsonableError): access_user_by_id(iago, hamlet.id) access_user_by_id(iago, hamlet.id, allow_deactivated=True) # Non-admin user can't admin another user with self.assertRaises(JsonableError): access_user_by_id(self.example_user("cordelia"), self.example_user("aaron").id) # But does have read-only access to it. access_user_by_id(self.example_user("cordelia"), self.example_user("aaron").id, read_only=True) def test_change_regular_member_to_guest(self) -> None: iago = self.example_user("iago") self.login_user(iago) hamlet = self.example_user("hamlet") self.assertFalse(hamlet.is_guest) req = dict(role=ujson.dumps(UserProfile.ROLE_GUEST)) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{hamlet.id}', req) self.assert_json_success(result) hamlet = self.example_user("hamlet") self.assertTrue(hamlet.is_guest) self.assertFalse(hamlet.can_access_all_realm_members()) person = events[0]['event']['person'] self.assertEqual(person['user_id'], hamlet.id) self.assertTrue(person['role'], UserProfile.ROLE_GUEST) def test_change_guest_to_regular_member(self) -> None: iago = self.example_user("iago") self.login_user(iago) polonius = self.example_user("polonius") self.assertTrue(polonius.is_guest) req = dict(role=ujson.dumps(UserProfile.ROLE_MEMBER)) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{polonius.id}', req) self.assert_json_success(result) polonius = self.example_user("polonius") self.assertFalse(polonius.is_guest) person = events[0]['event']['person'] self.assertEqual(person['user_id'], polonius.id) self.assertEqual(person['role'], UserProfile.ROLE_MEMBER) def test_change_admin_to_guest(self) -> None: iago = self.example_user("iago") self.login_user(iago) hamlet = self.example_user("hamlet") do_change_user_role(hamlet, UserProfile.ROLE_REALM_ADMINISTRATOR) self.assertFalse(hamlet.is_guest) self.assertTrue(hamlet.is_realm_admin) # Test changing a user from admin to guest and revoking admin status hamlet = self.example_user("hamlet") self.assertFalse(hamlet.is_guest) req = dict(role=ujson.dumps(UserProfile.ROLE_GUEST)) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{hamlet.id}', req) self.assert_json_success(result) hamlet = self.example_user("hamlet") self.assertTrue(hamlet.is_guest) self.assertFalse(hamlet.is_realm_admin) person = events[0]['event']['person'] self.assertEqual(person['user_id'], hamlet.id) self.assertEqual(person['role'], UserProfile.ROLE_GUEST) def test_change_guest_to_admin(self) -> None: iago = self.example_user("iago") self.login_user(iago) polonius = self.example_user("polonius") self.assertTrue(polonius.is_guest) self.assertFalse(polonius.is_realm_admin) # Test changing a user from guest to admin and revoking guest status polonius = self.example_user("polonius") self.assertFalse(polonius.is_realm_admin) req = dict(role=ujson.dumps(UserProfile.ROLE_REALM_ADMINISTRATOR)) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{polonius.id}', req) self.assert_json_success(result) polonius = self.example_user("polonius") self.assertFalse(polonius.is_guest) self.assertTrue(polonius.is_realm_admin) person = events[0]['event']['person'] self.assertEqual(person['user_id'], polonius.id) self.assertEqual(person['role'], UserProfile.ROLE_REALM_ADMINISTRATOR) def test_change_owner_to_guest(self) -> None: self.login("desdemona") iago = self.example_user("iago") do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER) self.assertFalse(iago.is_guest) self.assertTrue(iago.is_realm_owner) # Test changing a user from owner to guest and revoking owner status iago = self.example_user("iago") self.assertFalse(iago.is_guest) req = dict(role=UserProfile.ROLE_GUEST) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{iago.id}', req) self.assert_json_success(result) iago = self.example_user("iago") self.assertTrue(iago.is_guest) self.assertFalse(iago.is_realm_owner) person = events[0]['event']['person'] self.assertEqual(person['user_id'], iago.id) self.assertEqual(person['role'], UserProfile.ROLE_GUEST) def test_change_guest_to_owner(self) -> None: desdemona = self.example_user("desdemona") self.login_user(desdemona) polonius = self.example_user("polonius") self.assertTrue(polonius.is_guest) self.assertFalse(polonius.is_realm_owner) # Test changing a user from guest to admin and revoking guest status polonius = self.example_user("polonius") self.assertFalse(polonius.is_realm_owner) req = dict(role=UserProfile.ROLE_REALM_OWNER) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{polonius.id}', req) self.assert_json_success(result) polonius = self.example_user("polonius") self.assertFalse(polonius.is_guest) self.assertTrue(polonius.is_realm_owner) person = events[0]['event']['person'] self.assertEqual(person['user_id'], polonius.id) self.assertEqual(person['role'], UserProfile.ROLE_REALM_OWNER) def test_change_admin_to_owner(self) -> None: desdemona = self.example_user("desdemona") self.login_user(desdemona) iago = self.example_user("iago") self.assertTrue(iago.is_realm_admin) self.assertFalse(iago.is_realm_owner) # Test changing a user from admin to owner and revoking admin status iago = self.example_user("iago") self.assertFalse(iago.is_realm_owner) req = dict(role=UserProfile.ROLE_REALM_OWNER) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{iago.id}', req) self.assert_json_success(result) iago = self.example_user("iago") self.assertTrue(iago.is_realm_owner) person = events[0]['event']['person'] self.assertEqual(person['user_id'], iago.id) self.assertEqual(person['role'], UserProfile.ROLE_REALM_OWNER) def test_change_owner_to_admin(self) -> None: desdemona = self.example_user("desdemona") self.login_user(desdemona) iago = self.example_user("iago") do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER) self.assertTrue(iago.is_realm_owner) # Test changing a user from admin to owner and revoking admin status iago = self.example_user("iago") self.assertTrue(iago.is_realm_owner) req = dict(role=UserProfile.ROLE_REALM_ADMINISTRATOR) events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch(f'/json/users/{iago.id}', req) self.assert_json_success(result) iago = self.example_user("iago") self.assertFalse(iago.is_realm_owner) person = events[0]['event']['person'] self.assertEqual(person['user_id'], iago.id) self.assertEqual(person['role'], UserProfile.ROLE_REALM_ADMINISTRATOR) def test_admin_user_can_change_profile_data(self) -> None: realm = get_realm('zulip') self.login('iago') new_profile_data = [] cordelia = self.example_user("cordelia") # Test for all type of data fields = { 'Phone number': 'short text data', 'Biography': 'long text data', 'Favorite food': 'short text data', 'Favorite editor': 'vim', 'Birthday': '1909-3-5', 'Favorite website': 'https://zulip.com', 'Mentor': [cordelia.id], 'GitHub': 'timabbott', } for field_name in fields: field = CustomProfileField.objects.get(name=field_name, realm=realm) new_profile_data.append({ 'id': field.id, 'value': fields[field_name], }) result = self.client_patch(f'/json/users/{cordelia.id}', {'profile_data': ujson.dumps(new_profile_data)}) self.assert_json_success(result) cordelia = self.example_user("cordelia") for field_dict in cordelia.profile_data: with self.subTest(field_name=field_dict['name']): self.assertEqual(field_dict['value'], fields[field_dict['name']]) # Test admin user cannot set invalid profile data invalid_fields = [ ('Favorite editor', 'invalid choice', "'invalid choice' is not a valid choice for 'Favorite editor'."), ('Birthday', '1909-34-55', "Birthday is not a date"), ('Favorite website', 'not url', "Favorite website is not a URL"), ('Mentor', "not list of user ids", "User IDs is not a list"), ] for field_name, field_value, error_msg in invalid_fields: new_profile_data = [] field = CustomProfileField.objects.get(name=field_name, realm=realm) new_profile_data.append({ 'id': field.id, 'value': field_value, }) result = self.client_patch(f'/json/users/{cordelia.id}', {'profile_data': ujson.dumps(new_profile_data)}) self.assert_json_error(result, error_msg) # non-existent field and no data invalid_profile_data = [{ 'id': 9001, 'value': '', }] result = self.client_patch(f'/json/users/{cordelia.id}', {'profile_data': ujson.dumps(invalid_profile_data)}) self.assert_json_error(result, 'Field id 9001 not found.') # non-existent field and data invalid_profile_data = [{ 'id': 9001, 'value': 'some data', }] result = self.client_patch(f'/json/users/{cordelia.id}', {'profile_data': ujson.dumps(invalid_profile_data)}) self.assert_json_error(result, 'Field id 9001 not found.') # Test for clearing/resetting field values. empty_profile_data = [] for field_name in fields: field = CustomProfileField.objects.get(name=field_name, realm=realm) value: Union[str, None, List[Any]] = '' if field.field_type == CustomProfileField.USER: value = [] empty_profile_data.append({ 'id': field.id, 'value': value, }) result = self.client_patch(f'/json/users/{cordelia.id}', {'profile_data': ujson.dumps(empty_profile_data)}) self.assert_json_success(result) for field_dict in cordelia.profile_data: with self.subTest(field_name=field_dict['name']): self.assertEqual(field_dict['value'], None) # Test adding some of the field values after removing all. hamlet = self.example_user("hamlet") new_fields = { 'Phone number': None, 'Biography': 'A test user', 'Favorite food': None, 'Favorite editor': None, 'Birthday': None, 'Favorite website': 'https://zulip.github.io', 'Mentor': [hamlet.id], 'GitHub': 'timabbott', } new_profile_data = [] for field_name in fields: field = CustomProfileField.objects.get(name=field_name, realm=realm) value = None if new_fields[field_name]: value = new_fields[field_name] new_profile_data.append({ 'id': field.id, 'value': value, }) result = self.client_patch(f'/json/users/{cordelia.id}', {'profile_data': ujson.dumps(new_profile_data)}) self.assert_json_success(result) for field_dict in cordelia.profile_data: with self.subTest(field_name=field_dict['name']): self.assertEqual(field_dict['value'], new_fields[str(field_dict['name'])]) def test_non_admin_user_cannot_change_profile_data(self) -> None: self.login('cordelia') hamlet = self.example_user("hamlet") realm = get_realm("zulip") new_profile_data = [] field = CustomProfileField.objects.get(name="Biography", realm=realm) new_profile_data.append({ 'id': field.id, 'value': "New hamlet Biography", }) result = self.client_patch(f'/json/users/{hamlet.id}', {'profile_data': ujson.dumps(new_profile_data)}) self.assert_json_error(result, 'Insufficient permission') result = self.client_patch('/json/users/{}'.format(self.example_user("cordelia").id), {'profile_data': ujson.dumps(new_profile_data)}) self.assert_json_error(result, 'Insufficient permission') class BulkCreateUserTest(ZulipTestCase): def test_create_users(self) -> None: realm = get_realm('zulip') realm.email_address_visibility = Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS realm.save() name_list = [ ('Fred Flinstone', 'fred@zulip.com'), ('Lisa Simpson', 'lisa@zulip.com'), ] create_users(realm, name_list) fred = get_user_by_delivery_email('fred@zulip.com', realm) self.assertEqual( fred.email, f'user{fred.id}@zulip.testserver', ) lisa = get_user_by_delivery_email('lisa@zulip.com', realm) self.assertEqual(lisa.full_name, 'Lisa Simpson') self.assertEqual(lisa.is_bot, False) self.assertEqual(lisa.bot_type, None) realm.email_address_visibility = Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE realm.save() name_list = [ ('Bono', 'bono@zulip.com'), ('Cher', 'cher@zulip.com'), ] create_users(realm, name_list) bono = get_user_by_delivery_email('bono@zulip.com', realm) self.assertEqual(bono.email, 'bono@zulip.com') self.assertEqual(bono.delivery_email, 'bono@zulip.com') cher = get_user_by_delivery_email('cher@zulip.com', realm) self.assertEqual(cher.full_name, 'Cher') class AdminCreateUserTest(ZulipTestCase): def test_create_user_backend(self) -> None: # This test should give us complete coverage on # create_user_backend. It mostly exercises error # conditions, and it also does a basic test of the success # path. admin = self.example_user('hamlet') realm = admin.realm self.login_user(admin) do_change_user_role(admin, UserProfile.ROLE_REALM_ADMINISTRATOR) result = self.client_post("/json/users", dict()) self.assert_json_error(result, "Missing 'email' argument") result = self.client_post("/json/users", dict( email='romeo@not-zulip.com', )) self.assert_json_error(result, "Missing 'password' argument") result = self.client_post("/json/users", dict( email='romeo@not-zulip.com', password='xxxx', )) self.assert_json_error(result, "Missing 'full_name' argument") result = self.client_post("/json/users", dict( email='romeo@not-zulip.com', password='xxxx', full_name='Romeo Montague', )) self.assert_json_error(result, "Missing 'short_name' argument") result = self.client_post("/json/users", dict( email='broken', password='xxxx', full_name='Romeo Montague', short_name='Romeo', )) self.assert_json_error(result, "Bad name or username") do_set_realm_property(realm, 'emails_restricted_to_domains', True) result = self.client_post("/json/users", dict( email='romeo@not-zulip.com', password='xxxx', full_name='Romeo Montague', short_name='Romeo', )) self.assert_json_error(result, "Email 'romeo@not-zulip.com' not allowed in this organization") RealmDomain.objects.create(realm=get_realm('zulip'), domain='zulip.net') valid_params = dict( email='romeo@zulip.net', password='xxxx', full_name='Romeo Montague', short_name='Romeo', ) # Check can't use a bad password with zxcvbn enabled with self.settings(PASSWORD_MIN_LENGTH=6, PASSWORD_MIN_GUESSES=1000): result = self.client_post("/json/users", valid_params) self.assert_json_error(result, "The password is too weak.") result = self.client_post("/json/users", valid_params) self.assert_json_success(result) # Romeo is a newly registered user new_user = get_user_by_delivery_email('romeo@zulip.net', get_realm('zulip')) self.assertEqual(new_user.full_name, 'Romeo Montague') self.assertEqual(new_user.short_name, 'Romeo') # Make sure the recipient field is set correctly. self.assertEqual(new_user.recipient, Recipient.objects.get(type=Recipient.PERSONAL, type_id=new_user.id)) # we can't create the same user twice. result = self.client_post("/json/users", valid_params) self.assert_json_error(result, "Email 'romeo@zulip.net' already in use") # Don't allow user to sign up with disposable email. realm.emails_restricted_to_domains = False realm.disallow_disposable_email_addresses = True realm.save() valid_params["email"] = "abc@mailnator.com" result = self.client_post("/json/users", valid_params) self.assert_json_error(result, "Disposable email addresses are not allowed in this organization") # Don't allow creating a user with + in their email address when realm # is restricted to a domain. realm.emails_restricted_to_domains = True realm.save() valid_params["email"] = "iago+label@zulip.com" result = self.client_post("/json/users", valid_params) self.assert_json_error(result, "Email addresses containing + are not allowed.") # Users can be created with + in their email address when realm # is not restricted to a domain. realm.emails_restricted_to_domains = False realm.save() valid_params["email"] = "iago+label@zulip.com" result = self.client_post("/json/users", valid_params) self.assert_json_success(result) class UserProfileTest(ZulipTestCase): def test_get_emails_from_user_ids(self) -> None: hamlet = self.example_user('hamlet') othello = self.example_user('othello') dct = get_emails_from_user_ids([hamlet.id, othello.id]) self.assertEqual(dct[hamlet.id], hamlet.email) self.assertEqual(dct[othello.id], othello.email) def test_valid_user_id(self) -> None: realm = get_realm("zulip") hamlet = self.example_user('hamlet') othello = self.example_user('othello') bot = self.example_user("default_bot") # Invalid user ID invalid_uid: object = 1000 with self.assertRaisesRegex(ValidationError, r"User IDs is not a list"): check_valid_user_ids(realm.id, invalid_uid) with self.assertRaisesRegex(ValidationError, rf"Invalid user ID: {invalid_uid}"): check_valid_user_ids(realm.id, [invalid_uid]) invalid_uid = "abc" with self.assertRaisesRegex(ValidationError, r"User IDs\[0\] is not an integer"): check_valid_user_ids(realm.id, [invalid_uid]) invalid_uid = str(othello.id) with self.assertRaisesRegex(ValidationError, r"User IDs\[0\] is not an integer"): check_valid_user_ids(realm.id, [invalid_uid]) # User is in different realm with self.assertRaisesRegex(ValidationError, rf"Invalid user ID: {hamlet.id}"): check_valid_user_ids(get_realm("zephyr").id, [hamlet.id]) # User is not active hamlet.is_active = False hamlet.save() with self.assertRaisesRegex(ValidationError, rf"User with ID {hamlet.id} is deactivated"): check_valid_user_ids(realm.id, [hamlet.id]) check_valid_user_ids(realm.id, [hamlet.id], allow_deactivated=True) # User is a bot with self.assertRaisesRegex(ValidationError, rf"User with ID {bot.id} is a bot"): check_valid_user_ids(realm.id, [bot.id]) # Successfully get non-bot, active user belong to your realm check_valid_user_ids(realm.id, [othello.id]) def test_cache_invalidation(self) -> None: hamlet = self.example_user('hamlet') with mock.patch('zerver.lib.cache.delete_display_recipient_cache') as m: hamlet.full_name = 'Hamlet Junior' hamlet.save(update_fields=["full_name"]) self.assertTrue(m.called) with mock.patch('zerver.lib.cache.delete_display_recipient_cache') as m: hamlet.long_term_idle = True hamlet.save(update_fields=["long_term_idle"]) self.assertFalse(m.called) def test_user_ids_to_users(self) -> None: real_user_ids = [ self.example_user('hamlet').id, self.example_user('cordelia').id, ] self.assertEqual(user_ids_to_users([], get_realm("zulip")), []) self.assertEqual({user_profile.id for user_profile in user_ids_to_users(real_user_ids, get_realm("zulip"))}, set(real_user_ids)) with self.assertRaises(JsonableError): user_ids_to_users([1234], get_realm("zephyr")) with self.assertRaises(JsonableError): user_ids_to_users(real_user_ids, get_realm("zephyr")) def test_bulk_get_users(self) -> None: from zerver.lib.users import bulk_get_users hamlet = self.example_user("hamlet") cordelia = self.example_user("cordelia") webhook_bot = self.example_user("webhook_bot") result = bulk_get_users( [hamlet.email, cordelia.email], get_realm("zulip"), ) self.assertEqual(result[hamlet.email].email, hamlet.email) self.assertEqual(result[cordelia.email].email, cordelia.email) result = bulk_get_users( [hamlet.email, cordelia.email, webhook_bot.email], None, base_query=UserProfile.objects.all(), ) self.assertEqual(result[hamlet.email].email, hamlet.email) self.assertEqual(result[cordelia.email].email, cordelia.email) self.assertEqual(result[webhook_bot.email].email, webhook_bot.email) def test_get_accounts_for_email(self) -> None: reset_emails_in_zulip_realm() def check_account_present_in_accounts(user: UserProfile, accounts: List[Dict[str, Optional[str]]]) -> None: for account in accounts: realm = user.realm if account["avatar"] == avatar_url(user) and account["full_name"] == user.full_name \ and account["realm_name"] == realm.name and account["string_id"] == realm.string_id: return raise AssertionError("Account not found") lear_realm = get_realm("lear") cordelia_in_zulip = self.example_user("cordelia") cordelia_in_lear = get_user_by_delivery_email("cordelia@zulip.com", lear_realm) email = "cordelia@zulip.com" accounts = get_accounts_for_email(email) self.assert_length(accounts, 2) check_account_present_in_accounts(cordelia_in_zulip, accounts) check_account_present_in_accounts(cordelia_in_lear, accounts) email = "CORDelia@zulip.com" accounts = get_accounts_for_email(email) self.assert_length(accounts, 2) check_account_present_in_accounts(cordelia_in_zulip, accounts) check_account_present_in_accounts(cordelia_in_lear, accounts) email = "IAGO@ZULIP.COM" accounts = get_accounts_for_email(email) self.assert_length(accounts, 1) check_account_present_in_accounts(self.example_user("iago"), accounts) # We verify that get_accounts_for_email don't return deactivated users accounts user = self.example_user("hamlet") do_deactivate_user(user) email = self.example_email("hamlet") accounts = get_accounts_for_email(email) with self.assertRaises(AssertionError): check_account_present_in_accounts(user, accounts) def test_get_source_profile(self) -> None: reset_emails_in_zulip_realm() iago = get_source_profile("iago@zulip.com", "zulip") assert iago is not None self.assertEqual(iago.email, "iago@zulip.com") self.assertEqual(iago.realm, get_realm("zulip")) iago = get_source_profile("IAGO@ZULIP.com", "zulip") assert iago is not None self.assertEqual(iago.email, "iago@zulip.com") cordelia = get_source_profile("cordelia@zulip.com", "lear") assert cordelia is not None self.assertEqual(cordelia.email, "cordelia@zulip.com") self.assertIsNone(get_source_profile("iagod@zulip.com", "zulip")) self.assertIsNone(get_source_profile("iago@zulip.com", "ZULIP")) self.assertIsNone(get_source_profile("iago@zulip.com", "lear")) def test_copy_user_settings(self) -> None: iago = self.example_user("iago") cordelia = self.example_user("cordelia") hamlet = self.example_user("hamlet") hamlet.color_scheme = UserProfile.COLOR_SCHEME_LIGHT cordelia.default_language = "de" cordelia.emojiset = "twitter" cordelia.timezone = "America/Phoenix" cordelia.color_scheme = UserProfile.COLOR_SCHEME_NIGHT cordelia.enable_offline_email_notifications = False cordelia.enable_stream_push_notifications = True cordelia.enter_sends = False cordelia.save() UserHotspot.objects.filter(user=cordelia).delete() UserHotspot.objects.filter(user=iago).delete() hotspots_completed = ['intro_reply', 'intro_streams', 'intro_topics'] for hotspot in hotspots_completed: UserHotspot.objects.create(user=cordelia, hotspot=hotspot) copy_user_settings(cordelia, iago) # We verify that cordelia and iago match, but hamlet has the defaults. self.assertEqual(iago.full_name, "Cordelia Lear") self.assertEqual(cordelia.full_name, "Cordelia Lear") self.assertEqual(hamlet.full_name, "King Hamlet") self.assertEqual(iago.default_language, "de") self.assertEqual(cordelia.default_language, "de") self.assertEqual(hamlet.default_language, "en") self.assertEqual(iago.emojiset, "twitter") self.assertEqual(cordelia.emojiset, "twitter") self.assertEqual(hamlet.emojiset, "google-blob") self.assertEqual(iago.timezone, "America/Phoenix") self.assertEqual(cordelia.timezone, "America/Phoenix") self.assertEqual(hamlet.timezone, "") self.assertEqual(iago.color_scheme, UserProfile.COLOR_SCHEME_NIGHT) self.assertEqual(cordelia.color_scheme, UserProfile.COLOR_SCHEME_NIGHT) self.assertEqual(hamlet.color_scheme, UserProfile.COLOR_SCHEME_LIGHT) self.assertEqual(iago.enable_offline_email_notifications, False) self.assertEqual(cordelia.enable_offline_email_notifications, False) self.assertEqual(hamlet.enable_offline_email_notifications, True) self.assertEqual(iago.enable_stream_push_notifications, True) self.assertEqual(cordelia.enable_stream_push_notifications, True) self.assertEqual(hamlet.enable_stream_push_notifications, False) self.assertEqual(iago.enter_sends, False) self.assertEqual(cordelia.enter_sends, False) self.assertEqual(hamlet.enter_sends, True) hotspots = list(UserHotspot.objects.filter(user=iago).values_list('hotspot', flat=True)) self.assertEqual(hotspots, hotspots_completed) def test_get_user_by_id_in_realm_including_cross_realm(self) -> None: realm = get_realm('zulip') hamlet = self.example_user('hamlet') othello = self.example_user('othello') bot = get_system_bot(settings.WELCOME_BOT) # Pass in the ID of a cross-realm bot and a valid realm cross_realm_bot = get_user_by_id_in_realm_including_cross_realm( bot.id, realm) self.assertEqual(cross_realm_bot.email, bot.email) self.assertEqual(cross_realm_bot.id, bot.id) # Pass in the ID of a cross-realm bot but with a invalid realm, # note that the realm should be irrelevant here cross_realm_bot = get_user_by_id_in_realm_including_cross_realm( bot.id, None) self.assertEqual(cross_realm_bot.email, bot.email) self.assertEqual(cross_realm_bot.id, bot.id) # Pass in the ID of a non-cross-realm user with a realm user_profile = get_user_by_id_in_realm_including_cross_realm( othello.id, realm) self.assertEqual(user_profile.email, othello.email) self.assertEqual(user_profile.id, othello.id) # If the realm doesn't match, or if the ID is not that of a # cross-realm bot, UserProfile.DoesNotExist is raised with self.assertRaises(UserProfile.DoesNotExist): get_user_by_id_in_realm_including_cross_realm( hamlet.id, None) def test_get_user_subscription_status(self) -> None: self.login('hamlet') iago = self.example_user('iago') stream = get_stream('Rome', iago.realm) # Invalid User ID. result = self.client_get(f"/json/users/25/subscriptions/{stream.id}") self.assert_json_error(result, "No such user") # Invalid Stream ID. result = self.client_get(f"/json/users/{iago.id}/subscriptions/25") self.assert_json_error(result, "Invalid stream id") result = ujson.loads(self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}").content) self.assertFalse(result['is_subscribed']) # Subscribe to the stream. self.subscribe(iago, stream.name) with queries_captured() as queries: result = ujson.loads(self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}").content) self.assert_length(queries, 7) self.assertTrue(result['is_subscribed']) # Logging in with a Guest user. polonius = self.example_user("polonius") self.login('polonius') self.assertTrue(polonius.is_guest) result = self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}") self.assert_json_error(result, "Invalid stream id") class ActivateTest(ZulipTestCase): def test_basics(self) -> None: user = self.example_user('hamlet') do_deactivate_user(user) self.assertFalse(user.is_active) do_reactivate_user(user) self.assertTrue(user.is_active) def test_api(self) -> None: admin = self.example_user('othello') do_change_user_role(admin, UserProfile.ROLE_REALM_ADMINISTRATOR) self.login('othello') user = self.example_user('hamlet') self.assertTrue(user.is_active) result = self.client_delete(f'/json/users/{user.id}') self.assert_json_success(result) user = self.example_user('hamlet') self.assertFalse(user.is_active) result = self.client_post(f'/json/users/{user.id}/reactivate') self.assert_json_success(result) user = self.example_user('hamlet') self.assertTrue(user.is_active) def test_api_with_nonexistent_user(self) -> None: self.login('iago') # Organization Administrator cannot deactivate organization owner. result = self.client_delete(f'/json/users/{self.example_user("desdemona").id}') self.assert_json_error(result, 'Must be an organization owner') iago = self.example_user('iago') desdemona = self.example_user('desdemona') do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER) # Cannot deactivate a user with the bot api result = self.client_delete('/json/bots/{}'.format(self.example_user("hamlet").id)) self.assert_json_error(result, 'No such bot') # Cannot deactivate a nonexistent user. invalid_user_id = 1000 result = self.client_delete(f'/json/users/{invalid_user_id}') self.assert_json_error(result, 'No such user') result = self.client_delete('/json/users/{}'.format(self.example_user("webhook_bot").id)) self.assert_json_error(result, 'No such user') result = self.client_delete(f'/json/users/{desdemona.id}') self.assert_json_success(result) result = self.client_delete(f'/json/users/{iago.id}') self.assert_json_error(result, 'Cannot deactivate the only organization owner') # Cannot reactivate a nonexistent user. invalid_user_id = 1000 result = self.client_post(f'/json/users/{invalid_user_id}/reactivate') self.assert_json_error(result, 'No such user') def test_api_with_insufficient_permissions(self) -> None: non_admin = self.example_user('othello') do_change_user_role(non_admin, UserProfile.ROLE_MEMBER) self.login('othello') # Cannot deactivate a user with the users api result = self.client_delete('/json/users/{}'.format(self.example_user("hamlet").id)) self.assert_json_error(result, 'Insufficient permission') # Cannot reactivate a user result = self.client_post('/json/users/{}/reactivate'.format(self.example_user("hamlet").id)) self.assert_json_error(result, 'Insufficient permission') def test_clear_scheduled_jobs(self) -> None: user = self.example_user('hamlet') send_future_email('zerver/emails/followup_day1', user.realm, to_user_ids=[user.id], delay=datetime.timedelta(hours=1)) self.assertEqual(ScheduledEmail.objects.count(), 1) do_deactivate_user(user) self.assertEqual(ScheduledEmail.objects.count(), 0) def test_send_future_email_with_multiple_recipients(self) -> None: hamlet = self.example_user('hamlet') iago = self.example_user('iago') send_future_email('zerver/emails/followup_day1', iago.realm, to_user_ids=[hamlet.id, iago.id], delay=datetime.timedelta(hours=1)) self.assertEqual(ScheduledEmail.objects.filter(users__in=[hamlet, iago]).distinct().count(), 1) email = ScheduledEmail.objects.all().first() self.assertEqual(email.users.count(), 2) def test_clear_scheduled_emails_with_multiple_user_ids(self) -> None: hamlet = self.example_user('hamlet') iago = self.example_user('iago') send_future_email('zerver/emails/followup_day1', iago.realm, to_user_ids=[hamlet.id, iago.id], delay=datetime.timedelta(hours=1)) self.assertEqual(ScheduledEmail.objects.count(), 1) clear_scheduled_emails([hamlet.id, iago.id]) self.assertEqual(ScheduledEmail.objects.count(), 0) def test_clear_schedule_emails_with_one_user_id(self) -> None: hamlet = self.example_user('hamlet') iago = self.example_user('iago') send_future_email('zerver/emails/followup_day1', iago.realm, to_user_ids=[hamlet.id, iago.id], delay=datetime.timedelta(hours=1)) self.assertEqual(ScheduledEmail.objects.count(), 1) clear_scheduled_emails([hamlet.id]) self.assertEqual(ScheduledEmail.objects.count(), 1) self.assertEqual(ScheduledEmail.objects.filter(users=hamlet).count(), 0) self.assertEqual(ScheduledEmail.objects.filter(users=iago).count(), 1) def test_deliver_email(self) -> None: iago = self.example_user('iago') hamlet = self.example_user('hamlet') send_future_email('zerver/emails/followup_day1', iago.realm, to_user_ids=[hamlet.id, iago.id], delay=datetime.timedelta(hours=1)) self.assertEqual(ScheduledEmail.objects.count(), 1) email = ScheduledEmail.objects.all().first() deliver_email(email) from django.core.mail import outbox self.assertEqual(len(outbox), 1) for message in outbox: self.assertEqual( set(message.to), { str(Address(display_name=hamlet.full_name, addr_spec=hamlet.delivery_email)), str(Address(display_name=iago.full_name, addr_spec=iago.delivery_email)), }, ) self.assertEqual(ScheduledEmail.objects.count(), 0) class RecipientInfoTest(ZulipTestCase): def test_stream_recipient_info(self) -> None: hamlet = self.example_user('hamlet') cordelia = self.example_user('cordelia') othello = self.example_user('othello') # These tests were written with the old default for # enable_online_push_notifications; that default is better for # testing the full code path anyway. hamlet.enable_online_push_notifications = False cordelia.enable_online_push_notifications = False othello.enable_online_push_notifications = False hamlet.save() cordelia.save() othello.save() realm = hamlet.realm stream_name = 'Test Stream' topic_name = 'test topic' for user in [hamlet, cordelia, othello]: self.subscribe(user, stream_name) stream = get_stream(stream_name, realm) recipient = stream.recipient stream_topic = StreamTopicTarget( stream_id=stream.id, topic_name=topic_name, ) info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possible_wildcard_mention=False, ) all_user_ids = {hamlet.id, cordelia.id, othello.id} expected_info = dict( active_user_ids=all_user_ids, push_notify_user_ids=set(), stream_push_user_ids=set(), stream_email_user_ids=set(), wildcard_mention_user_ids=set(), um_eligible_user_ids=all_user_ids, long_term_idle_user_ids=set(), default_bot_user_ids=set(), service_bot_tuples=[], ) self.assertEqual(info, expected_info) cordelia.wildcard_mentions_notify = False cordelia.save() hamlet.enable_stream_push_notifications = True hamlet.save() info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possible_wildcard_mention=False, ) self.assertEqual(info['stream_push_user_ids'], {hamlet.id}) self.assertEqual(info['wildcard_mention_user_ids'], set()) info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possible_wildcard_mention=True, ) self.assertEqual(info['wildcard_mention_user_ids'], {hamlet.id, othello.id}) sub = get_subscription(stream_name, hamlet) sub.push_notifications = False sub.save() info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, ) self.assertEqual(info['stream_push_user_ids'], set()) hamlet.enable_stream_push_notifications = False hamlet.save() sub = get_subscription(stream_name, hamlet) sub.push_notifications = True sub.save() info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, ) self.assertEqual(info['stream_push_user_ids'], {hamlet.id}) # Now mute Hamlet to omit him from stream_push_user_ids. add_topic_mute( user_profile=hamlet, stream_id=stream.id, recipient_id=recipient.id, topic_name=topic_name, ) info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possible_wildcard_mention=False, ) self.assertEqual(info['stream_push_user_ids'], set()) self.assertEqual(info['wildcard_mention_user_ids'], set()) info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possible_wildcard_mention=True, ) self.assertEqual(info['stream_push_user_ids'], set()) # Since Hamlet has muted the stream and Cordelia has disabled # wildcard notifications, it should just be Othello here. self.assertEqual(info['wildcard_mention_user_ids'], {othello.id}) sub = get_subscription(stream_name, othello) sub.wildcard_mentions_notify = False sub.save() info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possible_wildcard_mention=True, ) self.assertEqual(info['stream_push_user_ids'], set()) # Verify that stream-level wildcard_mentions_notify=False works correctly. self.assertEqual(info['wildcard_mention_user_ids'], set()) # Verify that True works as expected as well sub = get_subscription(stream_name, othello) sub.wildcard_mentions_notify = True sub.save() info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possible_wildcard_mention=True, ) self.assertEqual(info['stream_push_user_ids'], set()) self.assertEqual(info['wildcard_mention_user_ids'], {othello.id}) # Add a service bot. service_bot = do_create_user( email='service-bot@zulip.com', password='', realm=realm, full_name='', short_name='', bot_type=UserProfile.EMBEDDED_BOT, ) info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possibly_mentioned_user_ids={service_bot.id}, ) self.assertEqual(info['service_bot_tuples'], [ (service_bot.id, UserProfile.EMBEDDED_BOT), ]) # Add a normal bot. normal_bot = do_create_user( email='normal-bot@zulip.com', password='', realm=realm, full_name='', short_name='', bot_type=UserProfile.DEFAULT_BOT, ) info = get_recipient_info( recipient=recipient, sender_id=hamlet.id, stream_topic=stream_topic, possibly_mentioned_user_ids={service_bot.id, normal_bot.id}, ) self.assertEqual(info['default_bot_user_ids'], {normal_bot.id}) def test_get_recipient_info_invalid_recipient_type(self) -> None: hamlet = self.example_user('hamlet') realm = hamlet.realm stream = get_stream('Rome', realm) stream_topic = StreamTopicTarget( stream_id=stream.id, topic_name='test topic', ) # Make sure get_recipient_info asserts on invalid recipient types with self.assertRaisesRegex(ValueError, 'Bad recipient type'): invalid_recipient = Recipient(type=999) # 999 is not a valid type get_recipient_info( recipient=invalid_recipient, sender_id=hamlet.id, stream_topic=stream_topic, ) class BulkUsersTest(ZulipTestCase): def test_client_gravatar_option(self) -> None: reset_emails_in_zulip_realm() self.login('cordelia') hamlet = self.example_user('hamlet') def get_hamlet_avatar(client_gravatar: bool) -> Optional[str]: data = dict(client_gravatar=ujson.dumps(client_gravatar)) result = self.client_get('/json/users', data) self.assert_json_success(result) rows = result.json()['members'] hamlet_data = [ row for row in rows if row['user_id'] == hamlet.id ][0] return hamlet_data['avatar_url'] self.assertEqual( get_hamlet_avatar(client_gravatar=True), None, ) ''' The main purpose of this test is to make sure we return None for avatar_url when client_gravatar is set to True. And we do a sanity check for when it's False, but we leave it to other tests to validate the specific URL. ''' self.assertIn( 'gravatar.com', get_hamlet_avatar(client_gravatar=False), ) class GetProfileTest(ZulipTestCase): def common_update_pointer(self, user: UserProfile, pointer: int) -> None: self.login_user(user) result = self.client_post("/json/users/me/pointer", {"pointer": pointer}) self.assert_json_success(result) def common_get_pointer(self, user_id: str) -> Dict[str, Any]: user_profile = self.example_user(user_id) result = self.api_get(user_profile, "/json/users/me/pointer") self.assert_json_success(result) json = result.json() return json def test_get_pointer(self) -> None: user = self.example_user("hamlet") self.login_user(user) result = self.client_get("/json/users/me/pointer") self.assert_json_success(result) self.assertIn("pointer", result.json()) def test_cache_behavior(self) -> None: """Tests whether fetching a user object the normal way, with `get_user`, makes 1 cache query and 1 database query. """ realm = get_realm("zulip") email = self.example_user("hamlet").email with queries_captured() as queries: with simulated_empty_cache() as cache_queries: user_profile = get_user(email, realm) self.assert_length(queries, 1) self.assert_length(cache_queries, 1) self.assertEqual(user_profile.email, email) def test_get_user_profile(self) -> None: hamlet = self.example_user('hamlet') iago = self.example_user('iago') desdemona = self.example_user('desdemona') self.login('hamlet') result = ujson.loads(self.client_get('/json/users/me').content) self.assertEqual(result['email'], hamlet.email) self.assertEqual(result['full_name'], 'King Hamlet') self.assertIn("user_id", result) self.assertFalse(result['is_bot']) self.assertFalse(result['is_admin']) self.assertFalse(result['is_owner']) self.assertFalse(result['is_guest']) self.assertFalse('delivery_email' in result) self.login('iago') result = ujson.loads(self.client_get('/json/users/me').content) self.assertEqual(result['email'], iago.email) self.assertEqual(result['full_name'], 'Iago') self.assertFalse(result['is_bot']) self.assertTrue(result['is_admin']) self.assertFalse(result['is_owner']) self.assertFalse(result['is_guest']) self.login('desdemona') result = ujson.loads(self.client_get('/json/users/me').content) self.assertEqual(result['email'], desdemona.email) self.assertFalse(result['is_bot']) self.assertTrue(result['is_admin']) self.assertTrue(result['is_owner']) self.assertFalse(result['is_guest']) # Tests the GET ../users/{id} api endpoint. user = self.example_user('hamlet') result = ujson.loads(self.client_get(f'/json/users/{user.id}').content) self.assertEqual(result['user']['email'], user.email) self.assertEqual(result['user']['full_name'], user.full_name) self.assertIn("user_id", result['user']) self.assertNotIn("profile_data", result['user']) self.assertFalse(result['user']['is_bot']) self.assertFalse(result['user']['is_admin']) self.assertFalse(result['user']['is_owner']) result = ujson.loads(self.client_get(f'/json/users/{user.id}?include_custom_profile_fields=true').content) self.assertIn('profile_data', result['user']) result = self.client_get(f'/json/users/{30}?') self.assert_json_error(result, "No such user") bot = self.example_user("default_bot") result = ujson.loads(self.client_get(f'/json/users/{bot.id}').content) self.assertEqual(result['user']['email'], bot.email) self.assertTrue(result['user']['is_bot']) def test_api_get_empty_profile(self) -> None: """ Ensure GET /users/me returns a max message id and returns successfully """ json = self.common_get_pointer("othello") self.assertEqual(json['pointer'], -1) def test_profile_with_pointer(self) -> None: """ Ensure GET /users/me returns a proper pointer id after the pointer is updated """ id1 = self.send_stream_message(self.example_user("othello"), "Verona") id2 = self.send_stream_message(self.example_user("othello"), "Verona") hamlet = self.example_user('hamlet') self.common_update_pointer(hamlet, id2) json = self.common_get_pointer("hamlet") self.assertEqual(json["pointer"], id2) self.common_update_pointer(hamlet, id1) json = self.common_get_pointer("hamlet") self.assertEqual(json["pointer"], id2) # pointer does not move backwards result = self.client_post("/json/users/me/pointer", {"pointer": 99999999}) self.assert_json_error(result, "Invalid message ID") def test_get_all_profiles_avatar_urls(self) -> None: hamlet = self.example_user('hamlet') result = self.api_get(hamlet, "/api/v1/users") self.assert_json_success(result) (my_user,) = [ user for user in result.json()['members'] if user['email'] == hamlet.email ] self.assertEqual( my_user['avatar_url'], avatar_url(hamlet), ) class FakeEmailDomainTest(ZulipTestCase): @override_settings(FAKE_EMAIL_DOMAIN="invaliddomain") def test_invalid_fake_email_domain(self) -> None: with self.assertRaises(InvalidFakeEmailDomain): get_fake_email_domain() @override_settings(FAKE_EMAIL_DOMAIN="127.0.0.1") def test_invalid_fake_email_domain_ip(self) -> None: with self.assertRaises(InvalidFakeEmailDomain): get_fake_email_domain()
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from heat.common import identifier from heat.tests import common class IdentifierTest(common.HeatTestCase): url_prefix = 'http://1.2.3.4/foo/' def test_attrs(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertEqual('t', hi.tenant) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('/p', hi.path) def test_path_default(self): hi = identifier.HeatIdentifier('t', 's', 'i') self.assertEqual('', hi.path) def test_items(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertEqual('t', hi['tenant']) self.assertEqual('s', hi['stack_name']) self.assertEqual('i', hi['stack_id']) self.assertEqual('/p', hi['path']) def test_invalid_attr(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') hi.identity['foo'] = 'bar' self.assertRaises(AttributeError, getattr, hi, 'foo') def test_invalid_item(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') hi.identity['foo'] = 'bar' self.assertRaises(KeyError, lambda o, k: o[k], hi, 'foo') def test_stack_path(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertEqual('s/i', hi.stack_path()) def test_arn(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertEqual('arn:openstack:heat::t:stacks/s/i/p', hi.arn()) def test_arn_url(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertEqual('/arn%3Aopenstack%3Aheat%3A%3At%3Astacks%2Fs%2Fi%2Fp', hi.arn_url_path()) def test_arn_id_int(self): hi = identifier.HeatIdentifier('t', 's', 42, 'p') self.assertEqual('arn:openstack:heat::t:stacks/s/42/p', hi.arn()) def test_arn_parse(self): arn = 'arn:openstack:heat::t:stacks/s/i/p' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual('t', hi.tenant) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('/p', hi.path) def test_arn_url_parse(self): url = self.url_prefix + 'arn%3Aopenstack%3Aheat%3A%3At%3Astacks/s/i/p' hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual('t', hi.tenant) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('/p', hi.path) def test_arn_parse_path_default(self): arn = 'arn:openstack:heat::t:stacks/s/i' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual('t', hi.tenant) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('', hi.path) def test_arn_url_parse_default(self): url = self.url_prefix + 'arn%3Aopenstack%3Aheat%3A%3At%3Astacks/s/i' hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual('t', hi.tenant) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('', hi.path) def test_arn_parse_upper(self): arn = 'ARN:openstack:heat::t:stacks/s/i/p' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('/p', hi.path) def test_arn_url_parse_upper(self): url = self.url_prefix + 'ARN%3Aopenstack%3Aheat%3A%3At%3Astacks/s/i/p' hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual('t', hi.tenant) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('/p', hi.path) def test_arn_url_parse_qs(self): url = (self.url_prefix + 'arn%3Aopenstack%3Aheat%3A%3At%3Astacks/s/i/p?foo=bar') hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual('t', hi.tenant) self.assertEqual('s', hi.stack_name) self.assertEqual('i', hi.stack_id) self.assertEqual('/p', hi.path) def test_arn_parse_arn_invalid(self): arn = 'urn:openstack:heat::t:stacks/s/i' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn, arn) def test_arn_url_parse_arn_invalid(self): url = self.url_prefix + 'urn:openstack:heat::t:stacks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_parse_os_invalid(self): arn = 'arn:aws:heat::t:stacks/s/i' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn, arn) def test_arn_url_parse_os_invalid(self): url = self.url_prefix + 'arn:aws:heat::t:stacks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_parse_heat_invalid(self): arn = 'arn:openstack:cool::t:stacks/s/i' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn, arn) def test_arn_url_parse_heat_invalid(self): url = self.url_prefix + 'arn:openstack:cool::t:stacks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_parse_stacks_invalid(self): arn = 'arn:openstack:heat::t:sticks/s/i' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn, arn) def test_arn_url_parse_stacks_invalid(self): url = self.url_prefix + 'arn%3Aopenstack%3Aheat%3A%3At%3Asticks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_parse_missing_field(self): arn = 'arn:openstack:heat::t:stacks/s' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn, arn) def test_arn_url_parse_missing_field(self): url = self.url_prefix + 'arn%3Aopenstack%3Aheat%3A%3At%3Asticks/s/' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_parse_empty_field(self): arn = 'arn:openstack:heat::t:stacks//i' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn, arn) def test_arn_url_parse_empty_field(self): url = self.url_prefix + 'arn%3Aopenstack%3Aheat%3A%3At%3Asticks//i' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_url_parse_leading_char(self): url = self.url_prefix + 'Aarn%3Aopenstack%3Aheat%3A%3At%3Asticks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_url_parse_leading_space(self): url = self.url_prefix + ' arn%3Aopenstack%3Aheat%3A%3At%3Asticks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_url_parse_badurl_proto(self): url = 'htt://1.2.3.4/foo/arn%3Aopenstack%3Aheat%3A%3At%3Asticks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_url_parse_badurl_host(self): url = 'http:///foo/arn%3Aopenstack%3Aheat%3A%3At%3Asticks/s/i/p' self.assertRaises(ValueError, identifier.HeatIdentifier.from_arn_url, url) def test_arn_round_trip(self): hii = identifier.HeatIdentifier('t', 's', 'i', 'p') hio = identifier.HeatIdentifier.from_arn(hii.arn()) self.assertEqual(hii.tenant, hio.tenant) self.assertEqual(hii.stack_name, hio.stack_name) self.assertEqual(hii.stack_id, hio.stack_id) self.assertEqual(hii.path, hio.path) def test_arn_parse_round_trip(self): arn = 'arn:openstack:heat::t:stacks/s/i/p' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual(arn, hi.arn()) def test_arn_url_parse_round_trip(self): arn = '/arn%3Aopenstack%3Aheat%3A%3At%3Astacks%2Fs%2Fi%2Fp' url = 'http://1.2.3.4/foo' + arn hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual(arn, hi.arn_url_path()) def test_dict_round_trip(self): hii = identifier.HeatIdentifier('t', 's', 'i', 'p') hio = identifier.HeatIdentifier(**dict(hii)) self.assertEqual(hii.tenant, hio.tenant) self.assertEqual(hii.stack_name, hio.stack_name) self.assertEqual(hii.stack_id, hio.stack_id) self.assertEqual(hii.path, hio.path) def test_url_path(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertEqual('t/stacks/s/i/p', hi.url_path()) def test_url_path_default(self): hi = identifier.HeatIdentifier('t', 's', 'i') self.assertEqual('t/stacks/s/i', hi.url_path()) def test_url_path_with_unicode_path(self): hi = identifier.HeatIdentifier('t', 's', 'i', u'\u5de5') self.assertEqual('t/stacks/s/i/%E5%B7%A5', hi.url_path()) def test_tenant_escape(self): hi = identifier.HeatIdentifier(':/', 's', 'i') self.assertEqual(':/', hi.tenant) self.assertEqual('%3A%2F/stacks/s/i', hi.url_path()) self.assertEqual('arn:openstack:heat::%3A%2F:stacks/s/i', hi.arn()) def test_name_escape(self): hi = identifier.HeatIdentifier('t', ':%', 'i') self.assertEqual(':%', hi.stack_name) self.assertEqual('t/stacks/%3A%25/i', hi.url_path()) self.assertEqual('arn:openstack:heat::t:stacks/%3A%25/i', hi.arn()) def test_id_escape(self): hi = identifier.HeatIdentifier('t', 's', ':/') self.assertEqual(':/', hi.stack_id) self.assertEqual('t/stacks/s/%3A%2F', hi.url_path()) self.assertEqual('arn:openstack:heat::t:stacks/s/%3A%2F', hi.arn()) def test_id_contains(self): hi = identifier.HeatIdentifier('t', 's', ':/') self.assertFalse("t" in hi) self.assertTrue("stack_id" in hi) def test_path_escape(self): hi = identifier.HeatIdentifier('t', 's', 'i', ':/') self.assertEqual('/:/', hi.path) self.assertEqual('t/stacks/s/i/%3A/', hi.url_path()) self.assertEqual('arn:openstack:heat::t:stacks/s/i/%3A/', hi.arn()) def test_tenant_decode(self): arn = 'arn:openstack:heat::%3A%2F:stacks/s/i' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual(':/', hi.tenant) def test_url_tenant_decode(self): enc_arn = 'arn%3Aopenstack%3Aheat%3A%3A%253A%252F%3Astacks%2Fs%2Fi' url = self.url_prefix + enc_arn hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual(':/', hi.tenant) def test_name_decode(self): arn = 'arn:openstack:heat::t:stacks/%3A%25/i' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual(':%', hi.stack_name) def test_url_name_decode(self): enc_arn = 'arn%3Aopenstack%3Aheat%3A%3At%3Astacks%2F%253A%2525%2Fi' url = self.url_prefix + enc_arn hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual(':%', hi.stack_name) def test_id_decode(self): arn = 'arn:openstack:heat::t:stacks/s/%3A%2F' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual(':/', hi.stack_id) def test_url_id_decode(self): enc_arn = 'arn%3Aopenstack%3Aheat%3A%3At%3Astacks%2Fs%2F%253A%252F' url = self.url_prefix + enc_arn hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual(':/', hi.stack_id) def test_path_decode(self): arn = 'arn:openstack:heat::t:stacks/s/i/%3A%2F' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual('/:/', hi.path) def test_url_path_decode(self): enc_arn = 'arn%3Aopenstack%3Aheat%3A%3At%3Astacks%2Fs%2Fi%2F%253A%252F' url = self.url_prefix + enc_arn hi = identifier.HeatIdentifier.from_arn_url(url) self.assertEqual('/:/', hi.path) def test_arn_escape_decode_round_trip(self): hii = identifier.HeatIdentifier(':/', ':%', ':/', ':/') hio = identifier.HeatIdentifier.from_arn(hii.arn()) self.assertEqual(hii.tenant, hio.tenant) self.assertEqual(hii.stack_name, hio.stack_name) self.assertEqual(hii.stack_id, hio.stack_id) self.assertEqual(hii.path, hio.path) def test_arn_decode_escape_round_trip(self): arn = 'arn:openstack:heat::%3A%2F:stacks/%3A%25/%3A%2F/%3A/' hi = identifier.HeatIdentifier.from_arn(arn) self.assertEqual(arn, hi.arn()) def test_arn_url_decode_escape_round_trip(self): enc_arn = "".join(['arn%3Aopenstack%3Aheat%3A%3A%253A%252F%3A', 'stacks%2F%253A%2525%2F%253A%252F%2F%253A']) url = self.url_prefix + enc_arn hi = identifier.HeatIdentifier.from_arn_url(url) hi2 = identifier.HeatIdentifier.from_arn_url(self.url_prefix + hi.arn_url_path()) self.assertEqual(hi, hi2) def test_stack_name_slash(self): self.assertRaises(ValueError, identifier.HeatIdentifier, 't', 's/s', 'i', 'p') def test_equal(self): hi1 = identifier.HeatIdentifier('t', 's', 'i', 'p') hi2 = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertTrue(hi1 == hi2) def test_equal_dict(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p') self.assertTrue(hi == dict(hi)) self.assertTrue(dict(hi) == hi) def test_not_equal(self): hi1 = identifier.HeatIdentifier('t', 's', 'i', 'p') hi2 = identifier.HeatIdentifier('t', 's', 'i', 'q') self.assertFalse(hi1 == hi2) self.assertFalse(hi2 == hi1) def test_not_equal_dict(self): hi1 = identifier.HeatIdentifier('t', 's', 'i', 'p') hi2 = identifier.HeatIdentifier('t', 's', 'i', 'q') self.assertFalse(hi1 == dict(hi2)) self.assertFalse(dict(hi1) == hi2) self.assertFalse(hi1 == {'tenant': 't', 'stack_name': 's', 'stack_id': 'i'}) self.assertFalse({'tenant': 't', 'stack_name': 's', 'stack_id': 'i'} == hi1) def test_path_components(self): hi = identifier.HeatIdentifier('t', 's', 'i', 'p1/p2/p3') self.assertEqual(['p1', 'p2', 'p3'], hi._path_components()) class ResourceIdentifierTest(common.HeatTestCase): def test_resource_init_no_path(self): si = identifier.HeatIdentifier('t', 's', 'i') ri = identifier.ResourceIdentifier(resource_name='r', **si) self.assertEqual('/resources/r', ri.path) def test_resource_init_path(self): si = identifier.HeatIdentifier('t', 's', 'i') pi = identifier.ResourceIdentifier(resource_name='p', **si) ri = identifier.ResourceIdentifier(resource_name='r', **pi) self.assertEqual('/resources/p/resources/r', ri.path) def test_resource_init_from_dict(self): hi = identifier.HeatIdentifier('t', 's', 'i', '/resources/r') ri = identifier.ResourceIdentifier(**hi) self.assertEqual(hi, ri) def test_resource_stack(self): si = identifier.HeatIdentifier('t', 's', 'i') ri = identifier.ResourceIdentifier(resource_name='r', **si) self.assertEqual(si, ri.stack()) def test_resource_id(self): ri = identifier.ResourceIdentifier('t', 's', 'i', '', 'r') self.assertEqual('r', ri.resource_name) def test_resource_name_slash(self): self.assertRaises(ValueError, identifier.ResourceIdentifier, 't', 's', 'i', 'p', 'r/r') class EventIdentifierTest(common.HeatTestCase): def test_event_init_integer_id(self): self._test_event_init('42') def test_event_init_uuid_id(self): self._test_event_init('a3455d8c-9f88-404d-a85b-5315293e67de') def _test_event_init(self, event_id): si = identifier.HeatIdentifier('t', 's', 'i') pi = identifier.ResourceIdentifier(resource_name='p', **si) ei = identifier.EventIdentifier(event_id=event_id, **pi) self.assertEqual('/resources/p/events/{0}'.format(event_id), ei.path) def test_event_init_from_dict(self): hi = identifier.HeatIdentifier('t', 's', 'i', '/resources/p/events/42') ei = identifier.EventIdentifier(**hi) self.assertEqual(hi, ei) def test_event_stack(self): si = identifier.HeatIdentifier('t', 's', 'i') pi = identifier.ResourceIdentifier(resource_name='r', **si) ei = identifier.EventIdentifier(event_id='e', **pi) self.assertEqual(si, ei.stack()) def test_event_resource(self): si = identifier.HeatIdentifier('t', 's', 'i') pi = identifier.ResourceIdentifier(resource_name='r', **si) ei = identifier.EventIdentifier(event_id='e', **pi) self.assertEqual(pi, ei.resource()) def test_resource_name(self): ei = identifier.EventIdentifier('t', 's', 'i', '/resources/p', 'e') self.assertEqual('p', ei.resource_name) def test_event_id_integer(self): self._test_event_id('42') def test_event_id_uuid(self): self._test_event_id('a3455d8c-9f88-404d-a85b-5315293e67de') def _test_event_id(self, event_id): ei = identifier.EventIdentifier('t', 's', 'i', '/resources/p', event_id) self.assertEqual(event_id, ei.event_id)
# Licensed under a 3-clause BSD style license - see LICENSE.rst """An extensible ASCII table reader and writer. daophot.py: Classes to read DAOphot table format :Copyright: Smithsonian Astrophysical Observatory (2011) :Author: Tom Aldcroft (aldcroft@head.cfa.harvard.edu) """ from __future__ import absolute_import, division, print_function import re import numpy as np from . import core from . import basic from . import fixedwidth from ...utils import OrderedDict class DaophotHeader(core.BaseHeader): comment = r'\s*#K' aperture_values = '' def update_meta(self, lines, meta): """ Extract table-level keywords for DAOphot table. These are indicated by a leading '#K ' prefix. """ table_meta = meta['table'] # Read keywords as a table embedded in the header comments comment_lines = [line for line in lines if line.startswith('#')] if len(comment_lines) > 0: re_header_keyword = re.compile(r'[#]K' r'\s+ (?P<name> \w+)' r'\s* = (?P<stuff> .+) $', re.VERBOSE) table_meta['keywords'] = OrderedDict() for line in comment_lines: m = re_header_keyword.match(line) if m: vals = m.group('stuff').strip().rsplit(None, 2) keyword_dict = {'units': vals[-2], 'format': vals[-1]} keyword_dict['value'] = (vals[0] if len(vals) > 2 else "") table_meta['keywords'][m.group('name')] = keyword_dict if m.group('name') == 'APERTURES': self.aperture_values = keyword_dict['value'] def get_cols(self, lines): """Initialize the header Column objects from the table ``lines`` for a DAOphot header. The DAOphot header is specialized so that we just copy the entire BaseHeader get_cols routine and modify as needed. Parameters ---------- lines : list List of table lines """ # Parse a series of column definition lines like below. There may be several # such blocks in a single file (where continuation characters have already been # stripped). # #N ID XCENTER YCENTER MAG MERR MSKY NITER # #U ## pixels pixels magnitudes magnitudes counts ## # #F %-9d %-10.3f %-10.3f %-12.3f %-14.3f %-15.7g %-6d coldef_lines = ['', '', ''] starts = ('#N ', '#U ', '#F ') col_width = [] col_len_def = re.compile(r'[0-9]+') re_colformat_def = re.compile(r'#F([^#]+)') last_coldef_line = ['', '', ''] for line in lines: if not line.startswith('#'): break # End of header lines else: formatmatch = re_colformat_def.search(line) if formatmatch: form = formatmatch.group(1).split() width = ([int(col_len_def.search(s).group()) for s in form]) # original data format might be shorter than 80 characters # and filled with spaces width[-1] = 80 - sum(width[:-1]) col_width.extend(width) last_width = width for i, start in enumerate(starts): if line.startswith(start): line_stripped = line[2:] coldef_lines[i] = coldef_lines[i] + line_stripped last_coldef_line[i] = line_stripped break # We need to check whether daophot file has multiple aperture data, in its keywords if (',' in self.aperture_values) or (':' in self.aperture_values): apertures=[] for aper in self.aperture_values.split(','): if ':' in aper: # Generate list of apertures from daophot's closed interval range # syntax ap1:apN:apstep ap1, apN, apstep = (float(i) for i in aper.split(':')) apertures.extend(list(np.arange(ap1, apN, apstep))) if (apN-ap1)%apstep == 0: apertures.append(apN) else: apertures.append(float(aper)) # We shall now append the last header multiple times for j in range(1, len(apertures)): col_width.extend(last_width) coldef_lines[0] = coldef_lines[0] + ' ' + ' '.join([name+str(j+1) for name in last_coldef_line[0].split()]) for i in range(1, len(coldef_lines)): coldef_lines[i] = coldef_lines[i] + last_coldef_line[i] # At this point colddef_lines has three lines corresponding to column # names, unit, and format. Get the column names by splitting the # first line on whitespace. self.names = coldef_lines[0].split() if not self.names: raise core.InconsistentTableError('No column names found in DAOphot header') ends = np.cumsum(col_width) starts = ends - col_width # If there wasn't a #U defined (not sure of DAOphot specification), then # replace the empty line with the right number of ## indicators, which matches # the DAOphot "no unit" tag. for i, coldef_line in enumerate(coldef_lines): if not coldef_line: coldef_lines[i] = '## ' * len(self.names) # Read the three lines as a basic table. reader = core._get_reader(Reader=basic.Basic, comment=None) reader.header.comment = None coldefs = reader.read(coldef_lines) # Create the list of io.ascii column objects self._set_cols_from_names() # Set unit and format as needed. for col in self.cols: if coldefs[col.name][0] != '##': col.unit = coldefs[col.name][0] if coldefs[col.name][1] != '##': col.format = coldefs[col.name][1] # Set column start and end positions. for i, col in enumerate(self.cols): col.start = starts[i] col.end = ends[i] if hasattr(col, 'format'): if any(x in col.format for x in 'fg'): col.type = core.FloatType elif 'd' in col.format: col.type = core.IntType elif 's' in col.format: col.type = core.StrType # INDEF is the missing value marker self.data.fill_values.append(('INDEF', '0')) class DaophotData(core.BaseData): splitter_class = fixedwidth.FixedWidthSplitter start_line = 0 comment = r'\s*#' class DaophotInputter(core.ContinuationLinesInputter): no_continue = r'\s*#' class Daophot(core.BaseReader): """Read a DAOphot file. Example:: #K MERGERAD = INDEF scaleunit %-23.7g #K IRAF = NOAO/IRAFV2.10EXPORT version %-23s #K USER = davis name %-23s #K HOST = tucana computer %-23s # #N ID XCENTER YCENTER MAG MERR MSKY NITER \\ #U ## pixels pixels magnitudes magnitudes counts ## \\ #F %-9d %-10.3f %-10.3f %-12.3f %-14.3f %-15.7g %-6d # #N SHARPNESS CHI PIER PERROR \\ #U ## ## ## perrors \\ #F %-23.3f %-12.3f %-6d %-13s # 14 138.538 INDEF 15.461 0.003 34.85955 4 \\ -0.032 0.802 0 No_error The keywords defined in the #K records are available via the output table ``meta`` attribute:: >>> import os >>> from astropy.io import ascii >>> filename = os.path.join(ascii.__path__[0], 'tests/t/daophot.dat') >>> data = ascii.read(filename) >>> for name, keyword in data.meta['keywords'].items(): ... print(name, keyword['value'], keyword['units'], keyword['format']) ... MERGERAD INDEF scaleunit %-23.7g IRAF NOAO/IRAFV2.10EXPORT version %-23s USER name %-23s ... The unit and formats are available in the output table columns:: >>> for colname in data.colnames: ... col = data[colname] ... print(colname, col.unit, col.format) ... ID None %-9d XCENTER pixels %-10.3f YCENTER pixels %-10.3f ... Any column values of INDEF are interpreted as a missing value and will be masked out in the resultant table. In case of multi-aperture daophot files containing repeated entries for the last row of fields, extra unique column names will be created by suffixing corresponding field names with numbers starting from 2 to N (where N is the total number of apertures). For example, first aperture radius will be RAPERT and corresponding magnitude will be MAG, second aperture radius will be RAPERT2 and corresponding magnitude will be MAG2, third aperture radius will be RAPERT3 and corresponding magnitude will be MAG3, and so on. """ _format_name = 'daophot' _io_registry_format_aliases = ['daophot'] _io_registry_can_write = False _description = 'IRAF DAOphot format table' header_class = DaophotHeader data_class = DaophotData inputter_class = DaophotInputter def write(self, table=None): raise NotImplementedError
# Copyright (c) 2014 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re import shutil import tempfile import netaddr from oslo_config import cfg from oslo_log import log as logging from oslo_utils import uuidutils from neutron.agent.common import ovs_lib from neutron.agent.l3 import ha_router from neutron.agent.l3 import namespaces from neutron.agent.linux import external_process from neutron.agent.linux import ip_lib from neutron.agent.linux import ip_link_support from neutron.agent.linux import keepalived from neutron.agent.linux import utils as agent_utils from neutron.common import constants as n_consts from neutron.common import utils from neutron.i18n import _LE from neutron.plugins.common import constants as const from neutron.plugins.ml2.drivers.openvswitch.agent.common \ import constants as ovs_const LOG = logging.getLogger(__name__) MINIMUM_DNSMASQ_VERSION = 2.67 MINIMUM_DIBBLER_VERSION = '1.0.1' def ovs_vxlan_supported(from_ip='192.0.2.1', to_ip='192.0.2.2'): name = "vxlantest-" + utils.get_random_string(6) with ovs_lib.OVSBridge(name) as br: port = br.add_tunnel_port(from_ip, to_ip, const.TYPE_VXLAN) return port != ovs_lib.INVALID_OFPORT def ovs_geneve_supported(from_ip='192.0.2.3', to_ip='192.0.2.4'): name = "genevetest-" + utils.get_random_string(6) with ovs_lib.OVSBridge(name) as br: port = br.add_tunnel_port(from_ip, to_ip, const.TYPE_GENEVE) return port != ovs_lib.INVALID_OFPORT def iproute2_vxlan_supported(): ip = ip_lib.IPWrapper() name = "vxlantest-" + utils.get_random_string(4) port = ip.add_vxlan(name, 3000) ip.del_veth(name) return name == port.name def patch_supported(): seed = utils.get_random_string(6) name = "patchtest-" + seed peer_name = "peertest0-" + seed patch_name = "peertest1-" + seed with ovs_lib.OVSBridge(name) as br: port = br.add_patch_port(patch_name, peer_name) return port != ovs_lib.INVALID_OFPORT def nova_notify_supported(): try: import neutron.notifiers.nova # noqa since unused return True except ImportError: return False def ofctl_arg_supported(cmd, **kwargs): """Verify if ovs-ofctl binary supports cmd with **kwargs. :param cmd: ovs-ofctl command to use for test. :param **kwargs: arguments to test with the command. :returns: a boolean if the supplied arguments are supported. """ br_name = 'br-test-%s' % utils.get_random_string(6) with ovs_lib.OVSBridge(br_name) as test_br: full_args = ["ovs-ofctl", cmd, test_br.br_name, ovs_lib._build_flow_expr_str(kwargs, cmd.split('-')[0])] try: agent_utils.execute(full_args, run_as_root=True) except RuntimeError as e: LOG.debug("Exception while checking supported feature via " "command %s. Exception: %s", full_args, e) return False except Exception: LOG.exception(_LE("Unexpected exception while checking supported" " feature via command: %s"), full_args) return False else: return True def arp_responder_supported(): mac = netaddr.EUI('dead:1234:beef', dialect=netaddr.mac_unix) ip = netaddr.IPAddress('240.0.0.1') actions = ovs_const.ARP_RESPONDER_ACTIONS % {'mac': mac, 'ip': ip} return ofctl_arg_supported(cmd='add-flow', table=21, priority=1, proto='arp', dl_vlan=42, nw_dst='%s' % ip, actions=actions) def arp_header_match_supported(): return ofctl_arg_supported(cmd='add-flow', table=24, priority=1, proto='arp', arp_op='0x2', arp_spa='1.1.1.1', actions="NORMAL") def icmpv6_header_match_supported(): return ofctl_arg_supported(cmd='add-flow', table=ovs_const.ARP_SPOOF_TABLE, priority=1, dl_type=n_consts.ETHERTYPE_IPV6, nw_proto=n_consts.PROTO_NUM_ICMP_V6, icmp_type=n_consts.ICMPV6_TYPE_NA, nd_target='fdf8:f53b:82e4::10', actions="NORMAL") def vf_management_supported(): is_supported = True required_caps = ( ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_STATE, ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_SPOOFCHK, ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_RATE) try: vf_section = ip_link_support.IpLinkSupport.get_vf_mgmt_section() for cap in required_caps: if not ip_link_support.IpLinkSupport.vf_mgmt_capability_supported( vf_section, cap): is_supported = False LOG.debug("ip link command does not support " "vf capability '%(cap)s'", cap) except ip_link_support.UnsupportedIpLinkCommand: LOG.exception(_LE("Unexpected exception while checking supported " "ip link command")) return False return is_supported def netns_read_requires_helper(): ipw = ip_lib.IPWrapper() nsname = "netnsreadtest-" + uuidutils.generate_uuid() ipw.netns.add(nsname) try: # read without root_helper. if exists, not required. ipw_nohelp = ip_lib.IPWrapper() exists = ipw_nohelp.netns.exists(nsname) finally: ipw.netns.delete(nsname) return not exists def get_minimal_dnsmasq_version_supported(): return MINIMUM_DNSMASQ_VERSION def dnsmasq_version_supported(): try: cmd = ['dnsmasq', '--version'] env = {'LC_ALL': 'C'} out = agent_utils.execute(cmd, addl_env=env) m = re.search(r"version (\d+\.\d+)", out) ver = float(m.group(1)) if m else 0 if ver < MINIMUM_DNSMASQ_VERSION: return False except (OSError, RuntimeError, IndexError, ValueError) as e: LOG.debug("Exception while checking minimal dnsmasq version. " "Exception: %s", e) return False return True class KeepalivedIPv6Test(object): def __init__(self, ha_port, gw_port, gw_vip, default_gw): self.ha_port = ha_port self.gw_port = gw_port self.gw_vip = gw_vip self.default_gw = default_gw self.manager = None self.config = None self.config_path = None self.nsname = "keepalivedtest-" + uuidutils.generate_uuid() self.pm = external_process.ProcessMonitor(cfg.CONF, 'router') self.orig_interval = cfg.CONF.AGENT.check_child_processes_interval def configure(self): config = keepalived.KeepalivedConf() instance1 = keepalived.KeepalivedInstance('MASTER', self.ha_port, 1, ['169.254.192.0/18'], advert_int=5) instance1.track_interfaces.append(self.ha_port) # Configure keepalived with an IPv6 address (gw_vip) on gw_port. vip_addr1 = keepalived.KeepalivedVipAddress(self.gw_vip, self.gw_port) instance1.vips.append(vip_addr1) # Configure keepalived with an IPv6 default route on gw_port. gateway_route = keepalived.KeepalivedVirtualRoute(n_consts.IPv6_ANY, self.default_gw, self.gw_port) instance1.virtual_routes.gateway_routes = [gateway_route] config.add_instance(instance1) self.config = config def start_keepalived_process(self): # Disable process monitoring for Keepalived process. cfg.CONF.set_override('check_child_processes_interval', 0, 'AGENT') # Create a temp directory to store keepalived configuration. self.config_path = tempfile.mkdtemp() # Instantiate keepalived manager with the IPv6 configuration. self.manager = keepalived.KeepalivedManager('router1', self.config, namespace=self.nsname, process_monitor=self.pm, conf_path=self.config_path) self.manager.spawn() def verify_ipv6_address_assignment(self, gw_dev): process = self.manager.get_process() agent_utils.wait_until_true(lambda: process.active) def _gw_vip_assigned(): iface_ip = gw_dev.addr.list(ip_version=6, scope='global') if iface_ip: return self.gw_vip == iface_ip[0]['cidr'] agent_utils.wait_until_true(_gw_vip_assigned) def __enter__(self): ip_lib.IPWrapper().netns.add(self.nsname) return self def __exit__(self, exc_type, exc_value, exc_tb): self.pm.stop() if self.manager: self.manager.disable() if self.config_path: shutil.rmtree(self.config_path, ignore_errors=True) ip_lib.IPWrapper().netns.delete(self.nsname) cfg.CONF.set_override('check_child_processes_interval', self.orig_interval, 'AGENT') def keepalived_ipv6_supported(): """Check if keepalived supports IPv6 functionality. Validation is done as follows. 1. Create a namespace. 2. Create OVS bridge with two ports (ha_port and gw_port) 3. Move the ovs ports to the namespace. 4. Spawn keepalived process inside the namespace with IPv6 configuration. 5. Verify if IPv6 address is assigned to gw_port. 6. Verify if IPv6 default route is configured by keepalived. """ random_str = utils.get_random_string(6) br_name = "ka-test-" + random_str ha_port = ha_router.HA_DEV_PREFIX + random_str gw_port = namespaces.INTERNAL_DEV_PREFIX + random_str gw_vip = 'fdf8:f53b:82e4::10/64' expected_default_gw = 'fe80:f816::1' with ovs_lib.OVSBridge(br_name) as br: with KeepalivedIPv6Test(ha_port, gw_port, gw_vip, expected_default_gw) as ka: br.add_port(ha_port, ('type', 'internal')) br.add_port(gw_port, ('type', 'internal')) ha_dev = ip_lib.IPDevice(ha_port) gw_dev = ip_lib.IPDevice(gw_port) ha_dev.link.set_netns(ka.nsname) gw_dev.link.set_netns(ka.nsname) ha_dev.link.set_up() gw_dev.link.set_up() ka.configure() ka.start_keepalived_process() ka.verify_ipv6_address_assignment(gw_dev) default_gw = gw_dev.route.get_gateway(ip_version=6) if default_gw: default_gw = default_gw['gateway'] return expected_default_gw == default_gw def ovsdb_native_supported(): # Running the test should ensure we are configured for OVSDB native try: ovs = ovs_lib.BaseOVS() ovs.get_bridges() return True except ImportError as ex: LOG.error(_LE("Failed to import required modules. Ensure that the " "python-openvswitch package is installed. Error: %s"), ex) except Exception: LOG.exception(_LE("Unexpected exception occurred.")) return False def ebtables_supported(): try: cmd = ['ebtables', '--version'] agent_utils.execute(cmd) return True except (OSError, RuntimeError, IndexError, ValueError) as e: LOG.debug("Exception while checking for installed ebtables. " "Exception: %s", e) return False def get_minimal_dibbler_version_supported(): return MINIMUM_DIBBLER_VERSION def dibbler_version_supported(): try: cmd = ['dibbler-client', 'help'] out = agent_utils.execute(cmd) return '-w' in out except (OSError, RuntimeError, IndexError, ValueError) as e: LOG.debug("Exception while checking minimal dibbler version. " "Exception: %s", e) return False
# -*- coding: utf-8 -*- """ Various useful functions """ # Author: Remi Flamary <remi.flamary@unice.fr> # # License: MIT License import multiprocessing from functools import reduce import time import numpy as np from scipy.spatial.distance import cdist import sys import warnings try: from inspect import signature except ImportError: from .externals.funcsigs import signature __time_tic_toc = time.time() def tic(): """ Python implementation of Matlab tic() function """ global __time_tic_toc __time_tic_toc = time.time() def toc(message='Elapsed time : {} s'): """ Python implementation of Matlab toc() function """ t = time.time() print(message.format(t - __time_tic_toc)) return t - __time_tic_toc def toq(): """ Python implementation of Julia toc() function """ t = time.time() return t - __time_tic_toc def kernel(x1, x2, method='gaussian', sigma=1, **kwargs): """Compute kernel matrix""" if method.lower() in ['gaussian', 'gauss', 'rbf']: K = np.exp(-dist(x1, x2) / (2 * sigma**2)) return K def unif(n): """ return a uniform histogram of length n (simplex) Parameters ---------- n : int number of bins in the histogram Returns ------- h : np.array (n,) histogram of length n such that h_i=1/n for all i """ return np.ones((n,)) / n def clean_zeros(a, b, M): """ Remove all components with zeros weights in a and b """ M2 = M[a > 0, :][:, b > 0].copy() # copy force c style matrix (froemd) a2 = a[a > 0] b2 = b[b > 0] return a2, b2, M2 def euclidean_distances(X, Y, squared=False): """ Considering the rows of X (and Y=X) as vectors, compute the distance matrix between each pair of vectors. Parameters ---------- X : {array-like}, shape (n_samples_1, n_features) Y : {array-like}, shape (n_samples_2, n_features) squared : boolean, optional Return squared Euclidean distances. Returns ------- distances : {array}, shape (n_samples_1, n_samples_2) """ XX = np.einsum('ij,ij->i', X, X)[:, np.newaxis] YY = np.einsum('ij,ij->i', Y, Y)[np.newaxis, :] distances = np.dot(X, Y.T) distances *= -2 distances += XX distances += YY np.maximum(distances, 0, out=distances) if X is Y: # Ensure that distances between vectors and themselves are set to 0.0. # This may not be the case due to floating point rounding errors. distances.flat[::distances.shape[0] + 1] = 0.0 return distances if squared else np.sqrt(distances, out=distances) def dist(x1, x2=None, metric='sqeuclidean'): """Compute distance between samples in x1 and x2 using function scipy.spatial.distance.cdist Parameters ---------- x1 : ndarray, shape (n1,d) matrix with n1 samples of size d x2 : array, shape (n2,d), optional matrix with n2 samples of size d (if None then x2=x1) metric : str | callable, optional Name of the metric to be computed (full list in the doc of scipy), If a string, the distance function can be 'braycurtis', 'canberra', 'chebyshev', 'cityblock', 'correlation', 'cosine', 'dice', 'euclidean', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean', 'wminkowski', 'yule'. Returns ------- M : np.array (n1,n2) distance matrix computed with given metric """ if x2 is None: x2 = x1 if metric == "sqeuclidean": return euclidean_distances(x1, x2, squared=True) return cdist(x1, x2, metric=metric) def dist0(n, method='lin_square'): """Compute standard cost matrices of size (n, n) for OT problems Parameters ---------- n : int Size of the cost matrix. method : str, optional Type of loss matrix chosen from: * 'lin_square' : linear sampling between 0 and n-1, quadratic loss Returns ------- M : ndarray, shape (n1,n2) Distance matrix computed with given metric. """ res = 0 if method == 'lin_square': x = np.arange(n, dtype=np.float64).reshape((n, 1)) res = dist(x, x) return res def cost_normalization(C, norm=None): """ Apply normalization to the loss matrix Parameters ---------- C : ndarray, shape (n1, n2) The cost matrix to normalize. norm : str Type of normalization from 'median', 'max', 'log', 'loglog'. Any other value do not normalize. Returns ------- C : ndarray, shape (n1, n2) The input cost matrix normalized according to given norm. """ if norm is None: pass elif norm == "median": C /= float(np.median(C)) elif norm == "max": C /= float(np.max(C)) elif norm == "log": C = np.log(1 + C) elif norm == "loglog": C = np.log1p(np.log1p(C)) else: raise ValueError('Norm %s is not a valid option.\n' 'Valid options are:\n' 'median, max, log, loglog' % norm) return C def dots(*args): """ dots function for multiple matrix multiply """ return reduce(np.dot, args) def label_normalization(y, start=0): """ Transform labels to start at a given value Parameters ---------- y : array-like, shape (n, ) The vector of labels to be normalized. start : int Desired value for the smallest label in y (default=0) Returns ------- y : array-like, shape (n1, ) The input vector of labels normalized according to given start value. """ diff = np.min(np.unique(y)) - start if diff != 0: y -= diff return y def fun(f, q_in, q_out): """ Utility function for parmap with no serializing problems """ while True: i, x = q_in.get() if i is None: break q_out.put((i, f(x))) def parmap(f, X, nprocs=multiprocessing.cpu_count()): """ paralell map for multiprocessing (only map on windows)""" if not sys.platform.endswith('win32'): q_in = multiprocessing.Queue(1) q_out = multiprocessing.Queue() proc = [multiprocessing.Process(target=fun, args=(f, q_in, q_out)) for _ in range(nprocs)] for p in proc: p.daemon = True p.start() sent = [q_in.put((i, x)) for i, x in enumerate(X)] [q_in.put((None, None)) for _ in range(nprocs)] res = [q_out.get() for _ in range(len(sent))] [p.join() for p in proc] return [x for i, x in sorted(res)] else: return list(map(f, X)) def check_params(**kwargs): """check_params: check whether some parameters are missing """ missing_params = [] check = True for param in kwargs: if kwargs[param] is None: missing_params.append(param) if len(missing_params) > 0: print("POT - Warning: following necessary parameters are missing") for p in missing_params: print("\n", p) check = False return check def check_random_state(seed): """Turn seed into a np.random.RandomState instance Parameters ---------- seed : None | int | instance of RandomState If seed is None, return the RandomState singleton used by np.random. If seed is an int, return a new RandomState instance seeded with seed. If seed is already a RandomState instance, return it. Otherwise raise ValueError. """ if seed is None or seed is np.random: return np.random.mtrand._rand if isinstance(seed, (int, np.integer)): return np.random.RandomState(seed) if isinstance(seed, np.random.RandomState): return seed raise ValueError('{} cannot be used to seed a numpy.random.RandomState' ' instance'.format(seed)) class deprecated(object): """Decorator to mark a function or class as deprecated. deprecated class from scikit-learn package https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/utils/deprecation.py Issue a warning when the function is called/the class is instantiated and adds a warning to the docstring. The optional extra argument will be appended to the deprecation message and the docstring. Note: to use this with the default value for extra, put in an empty of parentheses: >>> from ot.deprecation import deprecated # doctest: +SKIP >>> @deprecated() # doctest: +SKIP ... def some_function(): pass # doctest: +SKIP Parameters ---------- extra : str To be added to the deprecation messages. """ # Adapted from http://wiki.python.org/moin/PythonDecoratorLibrary, # but with many changes. def __init__(self, extra=''): self.extra = extra def __call__(self, obj): """Call method Parameters ---------- obj : object """ if isinstance(obj, type): return self._decorate_class(obj) else: return self._decorate_fun(obj) def _decorate_class(self, cls): msg = "Class %s is deprecated" % cls.__name__ if self.extra: msg += "; %s" % self.extra # FIXME: we should probably reset __new__ for full generality init = cls.__init__ def wrapped(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning) return init(*args, **kwargs) cls.__init__ = wrapped wrapped.__name__ = '__init__' wrapped.__doc__ = self._update_doc(init.__doc__) wrapped.deprecated_original = init return cls def _decorate_fun(self, fun): """Decorate function fun""" msg = "Function %s is deprecated" % fun.__name__ if self.extra: msg += "; %s" % self.extra def wrapped(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning) return fun(*args, **kwargs) wrapped.__name__ = fun.__name__ wrapped.__dict__ = fun.__dict__ wrapped.__doc__ = self._update_doc(fun.__doc__) return wrapped def _update_doc(self, olddoc): newdoc = "DEPRECATED" if self.extra: newdoc = "%s: %s" % (newdoc, self.extra) if olddoc: newdoc = "%s\n\n%s" % (newdoc, olddoc) return newdoc def _is_deprecated(func): """Helper to check if func is wraped by our deprecated decorator""" if sys.version_info < (3, 5): raise NotImplementedError("This is only available for python3.5 " "or above") closures = getattr(func, '__closure__', []) if closures is None: closures = [] is_deprecated = ('deprecated' in ''.join([c.cell_contents for c in closures if isinstance(c.cell_contents, str)])) return is_deprecated class BaseEstimator(object): """Base class for most objects in POT Code adapted from sklearn BaseEstimator class Notes ----- All estimators should specify all the parameters that can be set at the class level in their ``__init__`` as explicit keyword arguments (no ``*args`` or ``**kwargs``). """ @classmethod def _get_param_names(cls): """Get parameter names for the estimator""" # fetch the constructor or the original constructor before # deprecation wrapping if any init = getattr(cls.__init__, 'deprecated_original', cls.__init__) if init is object.__init__: # No explicit constructor to introspect return [] # introspect the constructor arguments to find the model parameters # to represent init_signature = signature(init) # Consider the constructor parameters excluding 'self' parameters = [p for p in init_signature.parameters.values() if p.name != 'self' and p.kind != p.VAR_KEYWORD] for p in parameters: if p.kind == p.VAR_POSITIONAL: raise RuntimeError("POT estimators should always " "specify their parameters in the signature" " of their __init__ (no varargs)." " %s with constructor %s doesn't " " follow this convention." % (cls, init_signature)) # Extract and sort argument names excluding 'self' return sorted([p.name for p in parameters]) def get_params(self, deep=True): """Get parameters for this estimator. Parameters ---------- deep : bool, optional If True, will return the parameters for this estimator and contained subobjects that are estimators. Returns ------- params : mapping of string to any Parameter names mapped to their values. """ out = dict() for key in self._get_param_names(): # We need deprecation warnings to always be on in order to # catch deprecated param values. # This is set in utils/__init__.py but it gets overwritten # when running under python3 somehow. warnings.simplefilter("always", DeprecationWarning) try: with warnings.catch_warnings(record=True) as w: value = getattr(self, key, None) if len(w) and w[0].category == DeprecationWarning: # if the parameter is deprecated, don't show it continue finally: warnings.filters.pop(0) # XXX: should we rather test if instance of estimator? if deep and hasattr(value, 'get_params'): deep_items = value.get_params().items() out.update((key + '__' + k, val) for k, val in deep_items) out[key] = value return out def set_params(self, **params): """Set the parameters of this estimator. The method works on simple estimators as well as on nested objects (such as pipelines). The latter have parameters of the form ``<component>__<parameter>`` so that it's possible to update each component of a nested object. Returns ------- self """ if not params: # Simple optimisation to gain speed (inspect is slow) return self valid_params = self.get_params(deep=True) # for key, value in iteritems(params): for key, value in params.items(): split = key.split('__', 1) if len(split) > 1: # nested objects case name, sub_name = split if name not in valid_params: raise ValueError('Invalid parameter %s for estimator %s. ' 'Check the list of available parameters ' 'with `estimator.get_params().keys()`.' % (name, self)) sub_object = valid_params[name] sub_object.set_params(**{sub_name: value}) else: # simple objects case if key not in valid_params: raise ValueError('Invalid parameter %s for estimator %s. ' 'Check the list of available parameters ' 'with `estimator.get_params().keys()`.' % (key, self.__class__.__name__)) setattr(self, key, value) return self class UndefinedParameter(Exception): """ Aim at raising an Exception when a undefined parameter is called """ pass
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function from six.moves import xrange # pylint: disable=redefined-builtin from tensorflow.core.framework import summary_pb2 from tensorflow.python.framework import constant_op from tensorflow.python.framework import meta_graph from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.summary import summary as summary_lib class ScalarSummaryTest(test.TestCase): def testScalarSummary(self): with self.cached_session() as s: i = constant_op.constant(3) with ops.name_scope('outer'): im = summary_lib.scalar('inner', i) summary_str = s.run(im) summary = summary_pb2.Summary() summary.ParseFromString(summary_str) values = summary.value self.assertEqual(len(values), 1) self.assertEqual(values[0].tag, 'outer/inner') self.assertEqual(values[0].simple_value, 3.0) def testScalarSummaryWithFamily(self): with self.cached_session() as s: i = constant_op.constant(7) with ops.name_scope('outer'): im1 = summary_lib.scalar('inner', i, family='family') self.assertEquals(im1.op.name, 'outer/family/inner') im2 = summary_lib.scalar('inner', i, family='family') self.assertEquals(im2.op.name, 'outer/family/inner_1') sm1, sm2 = s.run([im1, im2]) summary = summary_pb2.Summary() summary.ParseFromString(sm1) values = summary.value self.assertEqual(len(values), 1) self.assertEqual(values[0].tag, 'family/outer/family/inner') self.assertEqual(values[0].simple_value, 7.0) summary.ParseFromString(sm2) values = summary.value self.assertEqual(len(values), 1) self.assertEqual(values[0].tag, 'family/outer/family/inner_1') self.assertEqual(values[0].simple_value, 7.0) def testSummarizingVariable(self): with self.cached_session() as s: c = constant_op.constant(42.0) v = variables.Variable(c) ss = summary_lib.scalar('summary', v) init = variables.global_variables_initializer() s.run(init) summ_str = s.run(ss) summary = summary_pb2.Summary() summary.ParseFromString(summ_str) self.assertEqual(len(summary.value), 1) value = summary.value[0] self.assertEqual(value.tag, 'summary') self.assertEqual(value.simple_value, 42.0) def testImageSummary(self): with self.cached_session() as s: i = array_ops.ones((5, 4, 4, 3)) with ops.name_scope('outer'): im = summary_lib.image('inner', i, max_outputs=3) summary_str = s.run(im) summary = summary_pb2.Summary() summary.ParseFromString(summary_str) values = summary.value self.assertEqual(len(values), 3) tags = sorted(v.tag for v in values) expected = sorted('outer/inner/image/{}'.format(i) for i in xrange(3)) self.assertEqual(tags, expected) def testImageSummaryWithFamily(self): with self.cached_session() as s: i = array_ops.ones((5, 2, 3, 1)) with ops.name_scope('outer'): im = summary_lib.image('inner', i, max_outputs=3, family='family') self.assertEquals(im.op.name, 'outer/family/inner') summary_str = s.run(im) summary = summary_pb2.Summary() summary.ParseFromString(summary_str) values = summary.value self.assertEqual(len(values), 3) tags = sorted(v.tag for v in values) expected = sorted('family/outer/family/inner/image/{}'.format(i) for i in xrange(3)) self.assertEqual(tags, expected) def testHistogramSummary(self): with self.cached_session() as s: i = array_ops.ones((5, 4, 4, 3)) with ops.name_scope('outer'): summ_op = summary_lib.histogram('inner', i) summary_str = s.run(summ_op) summary = summary_pb2.Summary() summary.ParseFromString(summary_str) self.assertEqual(len(summary.value), 1) self.assertEqual(summary.value[0].tag, 'outer/inner') def testHistogramSummaryWithFamily(self): with self.cached_session() as s: i = array_ops.ones((5, 4, 4, 3)) with ops.name_scope('outer'): summ_op = summary_lib.histogram('inner', i, family='family') self.assertEquals(summ_op.op.name, 'outer/family/inner') summary_str = s.run(summ_op) summary = summary_pb2.Summary() summary.ParseFromString(summary_str) self.assertEqual(len(summary.value), 1) self.assertEqual(summary.value[0].tag, 'family/outer/family/inner') def testAudioSummary(self): with self.cached_session() as s: i = array_ops.ones((5, 3, 4)) with ops.name_scope('outer'): aud = summary_lib.audio('inner', i, 0.2, max_outputs=3) summary_str = s.run(aud) summary = summary_pb2.Summary() summary.ParseFromString(summary_str) values = summary.value self.assertEqual(len(values), 3) tags = sorted(v.tag for v in values) expected = sorted('outer/inner/audio/{}'.format(i) for i in xrange(3)) self.assertEqual(tags, expected) def testAudioSummaryWithFamily(self): with self.cached_session() as s: i = array_ops.ones((5, 3, 4)) with ops.name_scope('outer'): aud = summary_lib.audio('inner', i, 0.2, max_outputs=3, family='family') self.assertEquals(aud.op.name, 'outer/family/inner') summary_str = s.run(aud) summary = summary_pb2.Summary() summary.ParseFromString(summary_str) values = summary.value self.assertEqual(len(values), 3) tags = sorted(v.tag for v in values) expected = sorted('family/outer/family/inner/audio/{}'.format(i) for i in xrange(3)) self.assertEqual(tags, expected) def testSummaryNameConversion(self): c = constant_op.constant(3) s = summary_lib.scalar('name with spaces', c) self.assertEqual(s.op.name, 'name_with_spaces') s2 = summary_lib.scalar('name with many $#illegal^: characters!', c) self.assertEqual(s2.op.name, 'name_with_many___illegal___characters_') s3 = summary_lib.scalar('/name/with/leading/slash', c) self.assertEqual(s3.op.name, 'name/with/leading/slash') def testSummaryWithFamilyMetaGraphExport(self): with ops.name_scope('outer'): i = constant_op.constant(11) summ = summary_lib.scalar('inner', i) self.assertEquals(summ.op.name, 'outer/inner') summ_f = summary_lib.scalar('inner', i, family='family') self.assertEquals(summ_f.op.name, 'outer/family/inner') metagraph_def, _ = meta_graph.export_scoped_meta_graph(export_scope='outer') with ops.Graph().as_default() as g: meta_graph.import_scoped_meta_graph(metagraph_def, graph=g, import_scope='new_outer') # The summaries should exist, but with outer scope renamed. new_summ = g.get_tensor_by_name('new_outer/inner:0') new_summ_f = g.get_tensor_by_name('new_outer/family/inner:0') # However, the tags are unaffected. with self.cached_session() as s: new_summ_str, new_summ_f_str = s.run([new_summ, new_summ_f]) new_summ_pb = summary_pb2.Summary() new_summ_pb.ParseFromString(new_summ_str) self.assertEquals('outer/inner', new_summ_pb.value[0].tag) new_summ_f_pb = summary_pb2.Summary() new_summ_f_pb.ParseFromString(new_summ_f_str) self.assertEquals('family/outer/family/inner', new_summ_f_pb.value[0].tag) if __name__ == '__main__': test.main()
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for lstm_object_detection.lstm.rnn_decoder.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf import numpy as np from lstm_object_detection.lstm import rnn_decoder class MockRnnCell(tf.contrib.rnn.RNNCell): def __init__(self, input_size, num_units): self._input_size = input_size self._num_units = num_units self._filter_size = [3, 3] def __call__(self, inputs, state_tuple): outputs = tf.concat([inputs, state_tuple[0]], axis=3) new_state_tuple = (tf.multiply(state_tuple[0], 2), state_tuple[1]) return outputs, new_state_tuple def state_size(self): return self._num_units def output_size(self): return self._input_size + self._num_units def pre_bottleneck(self, inputs, state, input_index): with tf.variable_scope('bottleneck_%d' % input_index, reuse=tf.AUTO_REUSE): inputs = tf.contrib.layers.separable_conv2d( tf.concat([inputs, state], 3), self._input_size, self._filter_size, depth_multiplier=1, activation_fn=tf.nn.relu6, normalizer_fn=None) return inputs class RnnDecoderTest(tf.test.TestCase): def test_rnn_decoder_single_unroll(self): batch_size = 2 num_unroll = 1 num_units = 64 width = 8 height = 10 input_channels = 128 initial_state = tf.random_normal((batch_size, width, height, num_units)) inputs = tf.random_normal([batch_size, width, height, input_channels]) rnn_cell = MockRnnCell(input_channels, num_units) outputs, states = rnn_decoder.rnn_decoder( decoder_inputs=[inputs] * num_unroll, initial_state=(initial_state, initial_state), cell=rnn_cell) self.assertEqual(len(outputs), num_unroll) self.assertEqual(len(states), num_unroll) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) results = sess.run((outputs, states, inputs, initial_state)) outputs_results = results[0] states_results = results[1] inputs_results = results[2] initial_states_results = results[3] self.assertEqual(outputs_results[0].shape, (batch_size, width, height, input_channels + num_units)) self.assertAllEqual( outputs_results[0], np.concatenate((inputs_results, initial_states_results), axis=3)) self.assertEqual(states_results[0][0].shape, (batch_size, width, height, num_units)) self.assertEqual(states_results[0][1].shape, (batch_size, width, height, num_units)) self.assertAllEqual(states_results[0][0], np.multiply(initial_states_results, 2.0)) self.assertAllEqual(states_results[0][1], initial_states_results) def test_rnn_decoder_multiple_unroll(self): batch_size = 2 num_unroll = 3 num_units = 64 width = 8 height = 10 input_channels = 128 initial_state = tf.random_normal((batch_size, width, height, num_units)) inputs = tf.random_normal([batch_size, width, height, input_channels]) rnn_cell = MockRnnCell(input_channels, num_units) outputs, states = rnn_decoder.rnn_decoder( decoder_inputs=[inputs] * num_unroll, initial_state=(initial_state, initial_state), cell=rnn_cell) self.assertEqual(len(outputs), num_unroll) self.assertEqual(len(states), num_unroll) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) results = sess.run((outputs, states, inputs, initial_state)) outputs_results = results[0] states_results = results[1] inputs_results = results[2] initial_states_results = results[3] for i in range(num_unroll): previous_state = ([initial_states_results, initial_states_results] if i == 0 else states_results[i - 1]) self.assertEqual( outputs_results[i].shape, (batch_size, width, height, input_channels + num_units)) self.assertAllEqual( outputs_results[i], np.concatenate((inputs_results, previous_state[0]), axis=3)) self.assertEqual(states_results[i][0].shape, (batch_size, width, height, num_units)) self.assertEqual(states_results[i][1].shape, (batch_size, width, height, num_units)) self.assertAllEqual(states_results[i][0], np.multiply(previous_state[0], 2.0)) self.assertAllEqual(states_results[i][1], previous_state[1]) class MultiInputRnnDecoderTest(tf.test.TestCase): def test_rnn_decoder_single_unroll(self): batch_size = 2 num_unroll = 1 num_units = 12 width = 8 height = 10 input_channels_large = 24 input_channels_small = 12 bottleneck_channels = 20 initial_state_c = tf.random_normal((batch_size, width, height, num_units)) initial_state_h = tf.random_normal((batch_size, width, height, num_units)) initial_state = (initial_state_c, initial_state_h) inputs_large = tf.random_normal( [batch_size, width, height, input_channels_large]) inputs_small = tf.random_normal( [batch_size, width, height, input_channels_small]) rnn_cell = MockRnnCell(bottleneck_channels, num_units) outputs, states = rnn_decoder.multi_input_rnn_decoder( decoder_inputs=[[inputs_large] * num_unroll, [inputs_small] * num_unroll], initial_state=initial_state, cell=rnn_cell, sequence_step=tf.zeros([batch_size]), pre_bottleneck=True) self.assertEqual(len(outputs), num_unroll) self.assertEqual(len(states), num_unroll) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) results = sess.run( (outputs, states, inputs_large, inputs_small, initial_state)) outputs_results = results[0] states_results = results[1] inputs_large_results = results[2] inputs_small_results = results[3] initial_states_results = results[4] self.assertEqual( outputs_results[0].shape, (batch_size, width, height, bottleneck_channels + num_units)) self.assertEqual(states_results[0][0].shape, (batch_size, width, height, num_units)) self.assertEqual(states_results[0][1].shape, (batch_size, width, height, num_units)) # The first step should always update state. self.assertAllEqual(states_results[0][0], np.multiply(initial_states_results[0], 2)) self.assertAllEqual(states_results[0][1], initial_states_results[1]) def test_rnn_decoder_multiple_unroll(self): batch_size = 2 num_unroll = 3 num_units = 12 width = 8 height = 10 input_channels_large = 24 input_channels_small = 12 bottleneck_channels = 20 initial_state_c = tf.random_normal((batch_size, width, height, num_units)) initial_state_h = tf.random_normal((batch_size, width, height, num_units)) initial_state = (initial_state_c, initial_state_h) inputs_large = tf.random_normal( [batch_size, width, height, input_channels_large]) inputs_small = tf.random_normal( [batch_size, width, height, input_channels_small]) rnn_cell = MockRnnCell(bottleneck_channels, num_units) outputs, states = rnn_decoder.multi_input_rnn_decoder( decoder_inputs=[[inputs_large] * num_unroll, [inputs_small] * num_unroll], initial_state=initial_state, cell=rnn_cell, sequence_step=tf.zeros([batch_size]), pre_bottleneck=True) self.assertEqual(len(outputs), num_unroll) self.assertEqual(len(states), num_unroll) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) results = sess.run( (outputs, states, inputs_large, inputs_small, initial_state)) outputs_results = results[0] states_results = results[1] inputs_large_results = results[2] inputs_small_results = results[3] initial_states_results = results[4] # The first step should always update state. self.assertAllEqual(states_results[0][0], np.multiply(initial_states_results[0], 2)) self.assertAllEqual(states_results[0][1], initial_states_results[1]) for i in range(num_unroll): self.assertEqual( outputs_results[i].shape, (batch_size, width, height, bottleneck_channels + num_units)) self.assertEqual(states_results[i][0].shape, (batch_size, width, height, num_units)) self.assertEqual(states_results[i][1].shape, (batch_size, width, height, num_units)) def test_rnn_decoder_multiple_unroll_with_skip(self): batch_size = 2 num_unroll = 5 num_units = 12 width = 8 height = 10 input_channels_large = 24 input_channels_small = 12 bottleneck_channels = 20 skip = 2 initial_state_c = tf.random_normal((batch_size, width, height, num_units)) initial_state_h = tf.random_normal((batch_size, width, height, num_units)) initial_state = (initial_state_c, initial_state_h) inputs_large = tf.random_normal( [batch_size, width, height, input_channels_large]) inputs_small = tf.random_normal( [batch_size, width, height, input_channels_small]) rnn_cell = MockRnnCell(bottleneck_channels, num_units) outputs, states = rnn_decoder.multi_input_rnn_decoder( decoder_inputs=[[inputs_large] * num_unroll, [inputs_small] * num_unroll], initial_state=initial_state, cell=rnn_cell, sequence_step=tf.zeros([batch_size]), pre_bottleneck=True, selection_strategy='SKIP%d' % skip) self.assertEqual(len(outputs), num_unroll) self.assertEqual(len(states), num_unroll) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) results = sess.run( (outputs, states, inputs_large, inputs_small, initial_state)) outputs_results = results[0] states_results = results[1] inputs_large_results = results[2] inputs_small_results = results[3] initial_states_results = results[4] for i in range(num_unroll): self.assertEqual( outputs_results[i].shape, (batch_size, width, height, bottleneck_channels + num_units)) self.assertEqual(states_results[i][0].shape, (batch_size, width, height, num_units)) self.assertEqual(states_results[i][1].shape, (batch_size, width, height, num_units)) previous_state = ( initial_states_results if i == 0 else states_results[i - 1]) # State only updates during key frames if i % (skip + 1) == 0: self.assertAllEqual(states_results[i][0], np.multiply(previous_state[0], 2)) self.assertAllEqual(states_results[i][1], previous_state[1]) else: self.assertAllEqual(states_results[i][0], previous_state[0]) self.assertAllEqual(states_results[i][1], previous_state[1]) if __name__ == '__main__': tf.test.main()
#!/usr/bin/env python # coding:utf-8 import sys import os current_path = os.path.dirname(os.path.abspath(__file__)) if __name__ == "__main__": python_path = os.path.abspath( os.path.join(current_path, os.pardir, os.pardir, 'python27', '1.0')) noarch_lib = os.path.abspath( os.path.join(python_path, 'lib', 'noarch')) sys.path.append(noarch_lib) if sys.platform == "win32": win32_lib = os.path.abspath( os.path.join(python_path, 'lib', 'win32')) sys.path.append(win32_lib) elif sys.platform == "linux" or sys.platform == "linux2": win32_lib = os.path.abspath( os.path.join(python_path, 'lib', 'linux')) sys.path.append(win32_lib) import platform import BaseHTTPServer import urlparse import json import os import re import subprocess import cgi import urllib2 import sys import logging import ConfigParser os.environ['HTTPS_PROXY'] = '' current_path = os.path.dirname(os.path.abspath(__file__)) root_path = os.path.abspath(os.path.join(current_path, os.pardir, os.pardir)) class User_config(object): php_enable = '1' php_password = '123456' php_server = '' proxy_enable = "0" proxy_host = "" proxy_port = "" proxy_user = "" proxy_passwd = "" CONFIG_USER_FILENAME = os.path.abspath( os.path.join(root_path, os.pardir, os.pardir, 'data', 'php_proxy', 'config.ini')) def __init__(self): self.load() def load(self): ConfigParser.RawConfigParser.OPTCRE = re.compile(r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') CONFIG = ConfigParser.ConfigParser() try: if os.path.isfile(self.CONFIG_USER_FILENAME): CONFIG.read(self.CONFIG_USER_FILENAME) else: return try: self.php_enable = CONFIG.get('php', 'enable') self.php_password = CONFIG.get('php', 'password') self.php_server = CONFIG.get('php', 'fetchserver') except: pass self.proxy_enable = CONFIG.get('proxy', 'enable') self.proxy_host = CONFIG.get('proxy', 'host') self.proxy_port = CONFIG.get('proxy', 'port') self.proxy_user = CONFIG.get('proxy', 'username') self.proxy_passwd = CONFIG.get('proxy', 'password') except Exception as e: logging.warn("User_config.load except:%s", e) def save(self): try: f = open(self.CONFIG_USER_FILENAME, 'w') f.write("[php]\n") f.write("enable = %s\n" % self.php_enable) f.write("password = %s\n" % self.php_password) f.write("fetchserver = %s\n\n" % self.php_server) f.write("[proxy]\n") f.write("enable = %s\n" % self.proxy_enable) f.write("host = %s\n" % self.proxy_host) f.write("port = %s\n" % self.proxy_port) f.write("username = %s\n" % self.proxy_user) f.write("password = %s\n" % self.proxy_passwd) f.close() except: logging.exception("PHP config save user config fail:%s", self.CONFIG_USER_FILENAME) user_config = User_config() def http_request(url, method="GET"): proxy_handler = urllib2.ProxyHandler({}) opener = urllib2.build_opener(proxy_handler) try: req = opener.open(url) except Exception as e: logging.exception("web_control http_request:%s fail:%s", url, e) return class RemoteContralServerHandler(BaseHTTPServer.BaseHTTPRequestHandler): deploy_proc = None def address_string(self): return '%s:%s' % self.client_address[:2] def do_CONNECT(self): self.wfile.write(b'HTTP/1.1 403\r\nConnection: close\r\n\r\n') def do_GET(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch("localhost"): logging.warn("web control ref:%s refuse", netloc) return except: pass path = urlparse.urlparse(self.path).path if path == "/log": return self.req_log_handler() elif path == "/config": return self.req_config_handler() elif path == "/is_ready": return self.req_is_ready_handler() elif path == "/quit": common.keep_run = False data = "Quit" self.wfile.write(('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % ('text/plain', len(data))).encode()) self.wfile.write(data) sys.exit() return else: logging.debug('PHP Web_control %s %s %s ', self.address_string(), self.command, self.path) self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') logging.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path) def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch("localhost"): logging.warn("web control ref:%s refuse", netloc) return except: pass logging.debug ('PHP web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header(self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == "/config": return self.req_config_handler() else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') logging.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path) def send_response(self, mimetype, data): self.wfile.write(('HTTP/1.1 200\r\nAccess-Control-Allow-Origin: *\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (mimetype, len(data))).encode()) self.wfile.write(data) def send_file(self, filename, mimetype): # logging.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) data = '' with open(filename, 'rb') as fp: data = fp.read() if data: self.send_response(mimetype, data) def req_log_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' cmd = "get_last" if reqs["cmd"]: cmd = reqs["cmd"][0] if cmd == "set_buffer_size" : if not reqs["buffer_size"]: data = '{"res":"fail", "reason":"size not set"}' mimetype = 'text/plain' self.send_response(mimetype, data) return buffer_size = reqs["buffer_size"][0] logging.set_buffer_size(buffer_size) elif cmd == "get_last": max_line = int(reqs["max_line"][0]) data = logging.get_last_lines(max_line) elif cmd == "get_new": last_no = int(reqs["last_no"][0]) data = logging.get_new_lines(last_no) else: logging.error('PAC %s %s %s ', self.address_string(), self.command, self.path) mimetype = 'text/plain' self.send_response(mimetype, data) def req_config_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' try: if reqs['cmd'] == ['get_config']: data = json.dumps(user_config, default=lambda o: o.__dict__) elif reqs['cmd'] == ['set_config']: user_config.php_password = self.postvars['php_password'][0] user_config.php_server = self.postvars['php_server'][0] user_config.proxy_enable = self.postvars['proxy_enable'][0] user_config.proxy_host = self.postvars['proxy_host'][0] user_config.proxy_port = self.postvars['proxy_port'][0] user_config.proxy_username = self.postvars['proxy_username'][0] user_config.proxy_password = self.postvars['proxy_password'][0] user_config.save() data = '{"res":"success"}' self.send_response('text/html', data) http_request("http://127.0.0.1:8085/init_module?module=php_proxy&cmd=restart") return except Exception as e: logging.exception("req_config_handler except:%s", e) data = '{"res":"fail", "except":"%s"}' % e self.send_response('text/html', data) def req_is_ready_handler(self): data = "True" mimetype = 'text/plain' self.send_response(mimetype, data) if __name__ == "__main__": pass
# -*- test-case-name: twisted.web.test.test_proxy -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Simplistic HTTP proxy support. This comes in two main variants - the Proxy and the ReverseProxy. When a Proxy is in use, a browser trying to connect to a server (say, www.yahoo.com) will be intercepted by the Proxy, and the proxy will covertly connect to the server, and return the result. When a ReverseProxy is in use, the client connects directly to the ReverseProxy (say, www.yahoo.com) which farms off the request to one of a pool of servers, and returns the result. Normally, a Proxy is used on the client end of an Internet connection, while a ReverseProxy is used on the server end. """ import urlparse from urllib import quote as urlquote from twisted.internet import reactor from twisted.internet.protocol import ClientFactory from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET from twisted.web.http import HTTPClient, Request, HTTPChannel class ProxyClient(HTTPClient): """ Used by ProxyClientFactory to implement a simple web proxy. @ivar _finished: A flag which indicates whether or not the original request has been finished yet. """ _finished = False def __init__(self, command, rest, version, headers, data, father): self.father = father self.command = command self.rest = rest if "proxy-connection" in headers: del headers["proxy-connection"] headers["connection"] = "close" headers.pop('keep-alive', None) self.headers = headers self.data = data def connectionMade(self): self.sendCommand(self.command, self.rest) for header, value in self.headers.items(): self.sendHeader(header, value) self.endHeaders() self.transport.write(self.data) def handleStatus(self, version, code, message): self.father.setResponseCode(int(code), message) def handleHeader(self, key, value): # t.web.server.Request sets default values for these headers in its # 'process' method. When these headers are received from the remote # server, they ought to override the defaults, rather than append to # them. if key.lower() in ['server', 'date', 'content-type']: self.father.responseHeaders.setRawHeaders(key, [value]) else: self.father.responseHeaders.addRawHeader(key, value) def handleResponsePart(self, buffer): self.father.write(buffer) def handleResponseEnd(self): """ Finish the original request, indicating that the response has been completely written to it, and disconnect the outgoing transport. """ if not self._finished: self._finished = True self.father.finish() self.transport.loseConnection() class ProxyClientFactory(ClientFactory): """ Used by ProxyRequest to implement a simple web proxy. """ protocol = ProxyClient def __init__(self, command, rest, version, headers, data, father): self.father = father self.command = command self.rest = rest self.headers = headers self.data = data self.version = version def buildProtocol(self, addr): return self.protocol(self.command, self.rest, self.version, self.headers, self.data, self.father) def clientConnectionFailed(self, connector, reason): """ Report a connection failure in a response to the incoming request as an error. """ self.father.setResponseCode(501, "Gateway error") self.father.responseHeaders.addRawHeader("Content-Type", "text/html") self.father.write("<H1>Could not connect</H1>") self.father.finish() class ProxyRequest(Request): """ Used by Proxy to implement a simple web proxy. @ivar reactor: the reactor used to create connections. @type reactor: object providing L{twisted.internet.interfaces.IReactorTCP} """ protocols = {'http': ProxyClientFactory} ports = {'http': 80} def __init__(self, channel, queued, reactor=reactor): Request.__init__(self, channel, queued) self.reactor = reactor def process(self): parsed = urlparse.urlparse(self.uri) protocol = parsed[0] host = parsed[1] port = self.ports[protocol] if ':' in host: host, port = host.split(':') port = int(port) rest = urlparse.urlunparse(('', '') + parsed[2:]) if not rest: rest = rest + '/' class_ = self.protocols[protocol] headers = self.getAllHeaders().copy() if 'host' not in headers: headers['host'] = host self.content.seek(0, 0) s = self.content.read() clientFactory = class_(self.method, rest, self.clientproto, headers, s, self) self.reactor.connectTCP(host, port, clientFactory) class Proxy(HTTPChannel): """ This class implements a simple web proxy. Since it inherits from L{twisted.web.http.HTTPChannel}, to use it you should do something like this:: from twisted.web import http f = http.HTTPFactory() f.protocol = Proxy Make the HTTPFactory a listener on a port as per usual, and you have a fully-functioning web proxy! """ requestFactory = ProxyRequest class ReverseProxyRequest(Request): """ Used by ReverseProxy to implement a simple reverse proxy. @ivar proxyClientFactoryClass: a proxy client factory class, used to create new connections. @type proxyClientFactoryClass: L{ClientFactory} @ivar reactor: the reactor used to create connections. @type reactor: object providing L{twisted.internet.interfaces.IReactorTCP} """ proxyClientFactoryClass = ProxyClientFactory def __init__(self, channel, queued, reactor=reactor): Request.__init__(self, channel, queued) self.reactor = reactor def process(self): """ Handle this request by connecting to the proxied server and forwarding it there, then forwarding the response back as the response to this request. """ self.received_headers['host'] = self.factory.host clientFactory = self.proxyClientFactoryClass( self.method, self.uri, self.clientproto, self.getAllHeaders(), self.content.read(), self) self.reactor.connectTCP(self.factory.host, self.factory.port, clientFactory) class ReverseProxy(HTTPChannel): """ Implements a simple reverse proxy. For details of usage, see the file examples/proxy.py. """ requestFactory = ReverseProxyRequest class ReverseProxyResource(Resource): """ Resource that renders the results gotten from another server Put this resource in the tree to cause everything below it to be relayed to a different server. @ivar proxyClientFactoryClass: a proxy client factory class, used to create new connections. @type proxyClientFactoryClass: L{ClientFactory} @ivar reactor: the reactor used to create connections. @type reactor: object providing L{twisted.internet.interfaces.IReactorTCP} """ proxyClientFactoryClass = ProxyClientFactory def __init__(self, host, port, path, reactor=reactor): """ @param host: the host of the web server to proxy. @type host: C{str} @param port: the port of the web server to proxy. @type port: C{port} @param path: the base path to fetch data from. Note that you shouldn't put any trailing slashes in it, it will be added automatically in request. For example, if you put B{/foo}, a request on B{/bar} will be proxied to B{/foo/bar}. Any required encoding of special characters (such as " " or "/") should have been done already. @type path: C{str} """ Resource.__init__(self) self.host = host self.port = port self.path = path self.reactor = reactor def getChild(self, path, request): """ Create and return a proxy resource with the same proxy configuration as this one, except that its path also contains the segment given by C{path} at the end. """ return ReverseProxyResource( self.host, self.port, self.path + '/' + urlquote(path, safe=""), self.reactor) def render(self, request): """ Render a request by forwarding it to the proxied server. """ # RFC 2616 tells us that we can omit the port if it's the default port, # but we have to provide it otherwise if self.port == 80: host = self.host else: host = "%s:%d" % (self.host, self.port) request.received_headers['host'] = host request.content.seek(0, 0) qs = urlparse.urlparse(request.uri)[4] if qs: rest = self.path + '?' + qs else: rest = self.path clientFactory = self.proxyClientFactoryClass( request.method, rest, request.clientproto, request.getAllHeaders(), request.content.read(), request) self.reactor.connectTCP(self.host, self.port, clientFactory) return NOT_DONE_YET
#!/usr/bin/env python """ Runs a simulation under NPT conditions. Outputs a portable state (.xml) file with positions and velocities, to allow restarting and/or continuation. .2019. joaor@stanford.edu """ from __future__ import print_function, division import argparse import logging import math import os import random import re import sys import numpy as np import simtk.openmm.app as app import simtk.openmm as mm import simtk.unit as units import _utils import _restraints # Format logger logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='[%(asctime)s] %(message)s', datefmt='%Y/%m/%d %H:%M:%S') ## # Parse user input and options ap_fmt = argparse.ArgumentDefaultsHelpFormatter ap = argparse.ArgumentParser(description=__doc__, formatter_class=ap_fmt) # Mandatory ap.add_argument('structure', help='Input coordinate file (.cif)') # Options ap.add_argument('--output', type=str, default=None, help='Root name for output files. Default is input file name.') ap.add_argument('--forcefield', type=str, default='amber14-all.xml', help='Force field to build the system with (XML format).') ap.add_argument('--solvent', type=str, default='amber14/tip3p.xml', help='Solvent model to use in minimization (XML format).') ap.add_argument('--xyz-frequency', dest='xyz_freq', type=int, default=5000, help='Frequency (number of steps) to write coordinates.') ap.add_argument('--log-frequency', dest='log_freq', type=int, default=5000, help='Frequency (number of steps) to log run parameters.') ap.add_argument('--platform', type=str, default=None, choices=('OpenCL', 'CUDA', 'CPU', 'Reference'), help='Platform to run calculations on.') ap.add_argument('--state', type=str, help='Checkpoint/XML file to read positions/velocities from.') ap.add_argument('--seed', type=int, default=917, help='Seed number for random number generator(s).') ap.add_argument('--temperature', default=310, type=float, help='Target temperature, in Kelvin.') ap.add_argument('--pressure', default=1.0, type=float, help='Target pressure, in bar.') ap.add_argument('--barostat', default='isotropic', choices=('isotropic', 'membrane'), help='Type of barostat.') ap.add_argument('--runtime', default=5, type=float, help='Simulation length in nanoseconds. Default 5.') ap.add_argument('--continuation', action='store_true', help='Reads elapsed run time from checkpoint/state files.') ap.add_argument('--restraint-heavy-atom', action='store_true', default=False, help='Apply position restraints to non-solvent heavy atoms') ap.add_argument('--restraint-lipids', action='store_true', default=False, help='Apply position restraints to lipid head groups') ap.add_argument('--restraint-heavy-atom-k', default=500, type=int, help='Force constant for heavy atom restraints.') ap.add_argument('--restraint-lipids-k', default=500, type=int, help='Force constant for lipid restraints.') ap.add_argument('--hmr', action='store_true', default=False, help='Use Hydrogen Mass Repartitioning.') cmd = ap.parse_args() logging.info('Started') # Set random seed for reproducibility random.seed(cmd.seed) # Figure out platform platform, plat_properties = _utils.get_platform(cmd.platform) logging.info('Simulation Details:') logging.info(f' random seed : {cmd.seed}') logging.info(f' structure : {cmd.structure}') logging.info(f' force field : {cmd.forcefield}') logging.info(f' solvent model: {cmd.solvent}') logging.info(f' temperature : {cmd.temperature} K') logging.info(f' barostat : {cmd.barostat}') logging.info(f' pressure : {cmd.pressure} bar') logging.info(f' runtime : {cmd.runtime} ns') logging.info(f' heavy-atom restraints : {cmd.restraint_heavy_atom}') if cmd.restraint_heavy_atom: logging.info(f' K = {cmd.restraint_heavy_atom_k} kJ/mol/nm^2') logging.info(f' lipid restraints : {cmd.restraint_lipids}') if cmd.restraint_lipids: logging.info(f' K = {cmd.restraint_lipids_k} kJ/mol/nm^2') logging.info(f' HMR : {cmd.hmr}') # Make rootname for output files basename = os.path.basename(cmd.structure) fname, fext = os.path.splitext(basename) if cmd.output is None: rootname = fname + '_EqNVT' else: rootname = cmd.output # Read in structure data and setup OpenMM system structure = app.PDBxFile(cmd.structure) # Remove dummy atoms (mass 0) just in case model = app.Modeller(structure.topology, structure.positions) dummy_idx = [a for a in model.topology.atoms() if a.element is None] n_dummies = len(dummy_idx) if n_dummies: logging.info(f'Removing {n_dummies} dummy atoms from input') model.delete(dummy_idx) structure.topology = model.topology structure.positions = model.positions forcefield = app.ForceField(cmd.forcefield, cmd.solvent) md_temp = cmd.temperature * units.kelvin md_step = 2.0*units.femtosecond md_fric = 1.0/units.picosecond md_nbct = 1.0*units.nanometer md_hamu = None md_cstr = app.HBonds if cmd.hmr: # adapt for HMR if necessary md_step *= 2.5 # make 5 fs md_hamu = 4*units.amu md_cstr = app.AllBonds # Build system & integrator logging.info('Setting up system and integrator') system = forcefield.createSystem(structure.topology, nonbondedMethod=app.PME, nonbondedCutoff=md_nbct, constraints=md_cstr, hydrogenMass=md_hamu, ewaldErrorTolerance=0.0005, rigidWater=True) # Setup pressure md_pres = cmd.pressure * units.bar if cmd.barostat == 'isotropic': b = mm.MonteCarloBarostat(md_pres, md_temp, 25) elif cmd.barostat == 'membrane': surface_tension = 0*units.bar*units.nanometer # amber lipids = tensionless b = mm.MonteCarloMembraneBarostat(md_pres, surface_tension, md_temp, mm.MonteCarloMembraneBarostat.XYIsotropic, mm.MonteCarloMembraneBarostat.ZFree, 25) system.addForce(b) # Setup integrator and temperature coupling integrator = mm.LangevinIntegrator(md_temp, md_fric, md_step) integrator.setRandomNumberSeed(cmd.seed) integrator.setConstraintTolerance(0.00001) # Restrain heavy atoms if cmd.restraint_heavy_atom: # force = _restraints.make_heavy_atom_restraints(structure, # cmd.restraint_heavy_atom_k) force = _restraints.make_heavy_atom_restraints_v2(system, structure, cmd.restraint_heavy_atom_k) system.addForce(force) # Restrain lipid headgroups in Z if cmd.restraint_lipids: # force = _restraints.make_lipid_restraints(structure, # cmd.restraint_lipids_k) force = _restraints.make_lipid_restraints_v2(system, structure, cmd.restraint_lipids_k) system.addForce(force) # Setup simulation simulation = app.Simulation(structure.topology, system, integrator, platform, plat_properties) simulation.context.setPositions(structure.positions) simulation.context.setVelocitiesToTemperature(md_temp) # Load checkpoint/state file if cmd.state: if cmd.state.endswith('.xml'): # is XML state file logging.info(f'Loading XML state file: {cmd.state}') simulation.loadState(cmd.state) logging.info(f' resetting simulation time') simulation.context.setTime(0.0) # resets simulation time cmd.runtime = cmd.runtime * units.nanosecond elif cmd.state.endswith('.cpt'): # is binary checkpoint logging.info(f'Loading binary checkpoint file: {cmd.state}') simulation.loadCheckpoint(cmd.state) if cmd.continuation: # Adjust remaining running time run_time = simulation.context.getState().getTime() run_time_val = run_time.value_in_unit(units.nanosecond) logging.info(f' {run_time_val:8.2f}/{cmd.runtime:8.2f} ns completed') expected_t = cmd.runtime * units.nanosecond cmd.runtime = (expected_t - run_time).in_units_of(units.nanosecond) else: # restart from 0 simulation.context.setTime(0.0) cmd.runtime = cmd.runtime * units.nanosecond else: raise Exception(f'State file format not recognized: {cmd.state}') else: cmd.runtime = cmd.runtime * units.nanosecond # Assert we actually have to run something. if cmd.runtime <= 0.00001 * units.nanosecond: logging.info('Equilibration completed. Apparently. Maybe ask for more?') logging.info('Finished') sys.exit(0) # Setup writer/logger frequencies # Default: 0.01 ns if cmd.hmr: # Time step is 5 fs vs 2fs cmd.xyz_freq = int(cmd.xyz_freq // 2.5) cmd.log_freq = int(cmd.log_freq // 2.5) # Calculate total simulation length in steps n_steps = int(math.ceil(cmd.runtime / md_step.in_units_of(units.nanoseconds))) # n_steps is dimensionless (ns/ns) # Setup Reporters dcd_fname = _utils.make_fname_serial(rootname + '.dcd') cpt_fname = _utils.make_fname_serial(rootname + '.cpt') log_fname = _utils.make_fname_serial(rootname + '.log') dcd = app.DCDReporter(dcd_fname, cmd.xyz_freq) cpt = app.CheckpointReporter(cpt_fname, cmd.xyz_freq) state = app.StateDataReporter(log_fname, cmd.log_freq, step=True, time=True, potentialEnergy=True, kineticEnergy=True, temperature=True, progress=True, remainingTime=True, volume=True, totalSteps=n_steps, speed=True, separator='\t') simulation.reporters.append(dcd) simulation.reporters.append(cpt) simulation.reporters.append(state) logging.info(f'Writing coordinates to \'{dcd_fname}\'') logging.info(f'Writing checkpoint file to \'{cpt_fname}\'') logging.info(f'Writing simulation log to \'{log_fname}\'') # Run simulation simulation.step(n_steps) # Write state file (without restraining forces) xml_fname = _utils.make_fname_serial(rootname + '.xml') logging.info(f'Writing state file to \'{xml_fname}\'') system = simulation.system n_rest_forces = sum([cmd.restraint_heavy_atom, cmd.restraint_lipids]) while n_rest_forces: system.removeForce(system.getNumForces() - 1) n_rest_forces -= 1 # Reinitialize context. Keep velocities, positions. state = simulation.context.getState(getPositions=True, getVelocities=True) vx, vy, vz = state.getPeriodicBoxVectors() xyz, vel = state.getPositions(), state.getVelocities() simulation.context.reinitialize(preserveState=False) simulation.context.setPositions(xyz) simulation.context.setVelocities(vel) simulation.context.setPeriodicBoxVectors(vx, vy, vz) simulation.saveState(xml_fname) # Write last frame as mmCIF cif_fname = _utils.make_fname_serial(rootname + '.cif') logging.info(f'Writing final structure to \'{cif_fname}\'') with open(cif_fname, 'w') as handle: app.PDBxFile.writeFile(structure.topology, xyz, handle, keepIds=True) # Write system without dummy atoms # Easier to redo system object # and set positions/velocities manually. model = app.Modeller(structure.topology, structure.positions) dummy = [c for c in model.topology.chains() if c.id.startswith('DUM')] model.delete(dummy) # delete entire chains n_ini_atoms = model.topology.getNumAtoms() logging.info('Writing system without dummy (restraint) atoms') system = forcefield.createSystem(model.topology, nonbondedMethod=app.PME, nonbondedCutoff=md_nbct, constraints=md_cstr, hydrogenMass=md_hamu, ewaldErrorTolerance=0.0005, rigidWater=True) integrator = mm.LangevinIntegrator(md_temp, md_fric, md_step) simulation = app.Simulation(model.topology, system, integrator) simulation.context.setPositions(xyz[:n_ini_atoms]) simulation.context.setVelocities(vel[:n_ini_atoms]) simulation.context.setPeriodicBoxVectors(vx, vy, vz) xml_fname = _utils.make_fname(rootname + '_noDUM' + '.xml') logging.info(f'Writing dummy-less state to \'{xml_fname}\'') simulation.saveState(xml_fname) # Write last frame as mmCIF cif_fname = _utils.make_fname(rootname + '_noDUM' + '.cif') logging.info(f'Writing dummy-less structure to \'{cif_fname}\'') with open(cif_fname, 'w') as handle: app.PDBxFile.writeFile(model.topology, xyz[:n_ini_atoms], handle, keepIds=True) logging.info('Finished')
"""GO terms Resources: geneontology.org and godb Python package from endrebak user (Bakken). https://github.com/endrebak/godb/ """ import urllib import os from collections import defaultdict from biokit import biokitPATH import pandas as pd __all__ = ["num2goid", "GOId", "GODB", "GOTerm"] def num2goid(value): """Returns GO identifier string correctly formatted""" return GOId(value).identifier class GOId(object): """Representation of a GO identifier A GO identifier simply takes the form GO:XXXXXXX where X are digits so this is the string "GO:" followed by 7 digits. This class ease the construction and validation of GO identifiers. As an input, one can provide a valid GO identifiers or a number, which is then transformed into the proper formatted identifier :: >>> o = GOId(5) >>> o.iddentifier 'GO:0000005' >>> o = GOId('GO:0000005') >>> o.identifier 'GO:0000005' >>> print(o) 'GO:0000005' """ def __init__(self, value, verbose=True): """converts the numbers into Gene Ontology IDs. :param list numbers: a list of integer or strings or just a string or int. The numbers must be :return: list of GO Ids IDs are seven-digit numbers preceded by the prefix GO: (Gene Ontology database standard). """ if verbose: print( "Will be deprecated after v0.5 please checkout sequana.readtheodocs.io for an alternative" ) self._identifier = None self.identifier = value def _get_identifier(self): return self._identifier def _set_identifier(self, identifier): self._identifier = self._num2goid(identifier) identifier = property(_get_identifier, _set_identifier) def _num2goid(self, value): msg = "GO identifier format must be GO:XXXXXXX with X being numbers" if isinstance(value, str): if value.startswith("GO:"): prefix, suffix = value.split(":", 1) else: suffix = value suffix = int(suffix) elif isinstance(value, int): suffix = value else: raise TypeError("value must be a string or an integer. Provided %s" % value) if suffix < 1e7: goId = "GO:%07d" % suffix else: raise ValueError(msg) return goId def __repr__(self): return self.identifier def __str__(self): return self.identifier class GOTerm(object): """Transform a text-version of a GO Term into data structures A text contain information about a GO term looks like (obo format):: [Term] id: GO:0000003 name: reproduction namespace: biological_process This can be parsed with this class. You can retrieve these OBO text with other tools from BioServices:: from bioservices import QuickGO q = QuickGO() data = q.Term("GO:0003824", frmt='obo') from biokit import GOTerm term = GOTerm(data) term.to_dict() If an item appears several times (e.g., xref), then the values stored is a list, otherwise, it is the raw text found after the item name. Here is a brief overview of the structure of an ontology (see geneontology.org for details). Unique identifier **id** and term name: every term (e.g., mitochondrion) has a unique zero-padded seven digit identifier (often called the term accession or term accession number) prefixed by GO:, e.g. GO:0005125. Namespace (**namespace**) denotes which of the three sub-ontologies-cellular component, biological process or molecular function-the term belongs to. Definition: a textual description of what the term represents, plus reference(s) to the source of the information. Relationships to other terms: One or more links that capture how the term relates to other terms in the ontology. All terms have an "is a" sub-class relationship to another term. Gene Ontology employs a number of other relations, including 'part of' , and 'regulates'. Synonyms **synonym** are alternative words or phrases closely related in meaning to the term name, with indication of the relationship between the name and synonym given by the synonym scope. The scopes for GO synonyms are EXACT, BROAD, NARROW and RELATED. Database cross-references or dbxrefs, refer to identical or very similar objects in other databases. For instance, the molecular function term retinal isomerase activity is cross-referenced with the Enzyme Commission entry EC:5.2.1.3; the biological process term sulfate assimilation has the cross-reference MetaCyc:PWY-781. Comment: An y extra information about the term and its usage. Subset (*) : Indicates that the term belongs to a designated subset of terms, e.g. one of the GO slims. Obsolet tag: Indicates that the term has been deprecated and should not be used. **id** and **name** are compulsary and unique. optional tags are **is_ .. seealso:: :class:`GODB` and QuickGO from bioservices. For instance, from QuickGO, you may get more information about cross references as compared to :class:`GODB` that relies on geneontologies.org snapshot. """ def __init__(self, text, remove_comments=True): if text.startswith("<obo>"): raise NotImplementedError("obo XML format, use obo plain text instead") elif "id:" in text and "name:" in text: # assume that the text is a OBO formatted text self.text = text[:] else: # Assume the input text is a valid GO identifier goid = GOId(text).identifier # try to retrieve Term for this GO identifier with QuickGO from bioservices import QuickGO q = QuickGO() text = q.Term(goid, frmt="obo") self.text = text self.remove_comments = remove_comments def _remove_comments(self, term): # Let us get rid of all comments, this will make further parsing easier for k, v in term.items(): if isinstance(v, list): term[k] = [x.split("!")[0].strip() for x in v] else: if "!" in v: term[k] = v.split("!")[0].strip() return term def to_dict(self): # We assume all tags are list, which is not true d = defaultdict(list) for this in self.text.strip().split("\n"): if ":" in this: key, value = this.split(":", 1) d[key.strip()].append(value.strip()) # Based on http://oboformat.googlecode.com/svn/trunk/doc/GO.format.obo-1_2.html # we can clean up the lists converting some of them to # single item. We also issue a warning with deprecated ones. lists = [ "alt_id", "synonym", "is_a", "xref", "intersection_of", "relationship", "union_of", "disjoint_from", "consider", "replaced_by", "subset", "property_value", ] nonlist = [ "id", "name", "is_anonymous", "is_obsolete", "def", "comment", "created_by", "creation_date", "namespace", ] deprecated = { "exact_synonym": "synonym", "narrow_synonym": "synonym", "broad_synonym": "synonym", "xref_analog": "xref", "xref_unk": "xref", "use_term": "consider", } # missing: namespace, subset, dd = {} for k, v in d.items(): if k in lists: # nothing to do dd[k] = v elif k in nonlist: if len(v) == 1: dd[k] = v[0] else: raise ValueError( "%s must be found only once. Check %s" % (k, d["id"]) ) elif k in deprecated.keys(): print("%s deprecated. Kept and assuming non unique tag" % (k, d["id"])) dd[k] = v else: print("%s not handled in %s. Assuming non unique tag" % (k, d["id"])) dd[k] = v if self.remove_comments is True: dd = self._remove_comments(dd) # IN OBO/GO format, id and name must be provided # others are optional if "id" not in dd.keys() and "name" not in dd.keys(): raise ValueError("'id' tag and 'name' must be provided") return dd class GODB(object): """Simple handler to get list of GO terms from geneontology website The Gene Ontology project provides controlled vocabularies of defined terms epresenting gene product properties. These cover three domains: Cellular omponent, the parts of a cell or its extracellular environment; Molecular unction, the elemental activities of a gene product at the molecular level, uch as binding or catalysis; and Biological Process, operations or sets of olecular events with a defined beginning and end, pertinent to the functioning of integrated living units: cells, tissues, organs, and organisms. .. seealso:: bioservices QuickGO class """ def __init__(self, name="go.obo", drop_obsolet=True, local=False): """ Searches for a file on geneontology.org exce :param name: name of the go OBO file to be downloaded :param drop_obsolet: drop obsolet GO terms from the entire DB :param local: read the OBO file locally (False) or downloads from geneontology.org if not present in the biokit directory. """ self.name = "go.obo" if local is False: self.filename = biokitPATH + os.sep + self.name else: self.filename = self.name self._init() if drop_obsolet is True: self.df = self.df[self.df.is_obsolete != True] self.df.pop("is_obsolete") # split relationships self.mapping = { "id": "GO id", "namespace": "Ontology", "name": "Term", "def": "Definition", } # TODO: split relationshio into has_part and part_of def _init(self): if os.path.exists(biokitPATH + os.sep + self.name) is False: self._download_godb() # Read the original OBO file self.read_goterms() # Parse the GO terms one by one and transform into a DataFrame self.df = self._terms2df() def _download_godb(self): print("Downloading go db once for all") urllib.urlretrieve( "http://geneontology.org/ontology/%s" % self.name, self.filename ) def read_goterms(self): # lines starting with ! can be ignored # any line may end with a comment starting with ! with open(self.filename) as fhdata: data = "".join(fhdata.readlines()).split("\n\n[Term]")[1:-1] self.obo_terms = data def _term2dict(self, term): term = GOTerm(term) return term.to_dict() def _terms2df(self): terms = [self._term2dict(term) for term in self.obo_terms] df = pd.DataFrame(terms) df.replace("true", True, inplace=True) df.replace("false", False, inplace=True) df.set_index("id", inplace=True) return df def __len__(self): return len(self.df) def get_annotations(self): # replace some columns df = self.df.copy() df = df[["namespace", "name", "synonym", "def", "relationship"]] df.columns = ["Ontology", "Term", "Synonym", "Definition", "relationship"] return df def search(self, search, where="name", method="in"): if method == "in": selection = self.df[where].apply(lambda x: search in x) elif method == "is": selection = self.df[where].apply(lambda x: search == x) elif method == "startswith": selection = self.df[where].apply(lambda x: x.startswith(search)) return self.df[selection].copy() # The is a relation is transitive, which means that if A is a B, and B is a # C, we can infer that A is a C. # B part of A does not mean that A have B systematically (think of # replication fork part of chromosome but chromosome do not have a # replication fork systemtically. # Like is a, part of is transitive: if A part of B part of C # then A part of C # # The logical complement to the part of relation is has part, which # represents a part-whole relationship from the perspective of the parent. # As with part of, the GO relation has part is only used in cases where A # always has B as a part, i.e. where A necessarily has part B. If A exists, # B will always exist; however, if B exists, we cannot say for certain that # A exists. i.e. all A have part B; some B part of A. def get_children(self, ontology="CC", relations=["is_a", "part_of", "has_part"]): """http://geneontology.org/page/ontology-relations""" if ontology in ["CC", "cellular_component"]: ontology = "cellular_component" elif ontology in ["BP", "biological_process"]: ontology = "biological_process" elif ontology in ["MF", "molecular_function"]: ontology = "molecular_function" else: raise ValueError("ontology must be one of %s" % set(self.df.namespace)) newdf = self.df.loc[self.df.namespace == ontology] children = [] parents = [] vrelations = [] # this part is slow and maybe improved. for name, row in newdf.iterrows(): if str(row.is_a) == "nan": pass elif "is_a" in relations: for this in row.is_a: children.append(name) parents.append(this) vrelations.append("is_a") if str(row.relationship) == "nan": pass else: for this in row.relationship: relation, goid = this.split() if relation == "part_of" and relation in relations: children.append(name) parents.append(goid) vrelations.append(relation) elif relation == "has_part" and relation in relations: # not that children/parent are swapped as compared # to the part_of relation. children.append(goid) parents.append(name) vrelations.append(relation) df = pd.DataFrame( {"Child": children, "Parent": parents, "Relation": vrelations} ) df.sort("Relation", inplace=True, ascending=False) return df def get_offspring(self): # again, based on # https://github.com/endrebak/godb/ # slower but no need for now offspring_df = self.get_children().copy()[["Child", "Parent"]] offspring_df.columns = ["Offspring", "Parent"] df = offspring_df new_df = self._compute_transitive_closure(df) while not df.equals(new_df): df = df.append(new_df).drop_duplicates() new_df = self._compute_transitive_closure(df) df = df.drop_duplicates().sort() new_df = new_df.drop_duplicates().sort() return new_df def _compute_transitive_closure(self, df): """Computes the transitive closure from a two-col parent/child map.""" df_temp = df[df["Parent"].isin(df["Offspring"])] df2 = df.merge( df_temp, left_on="Offspring", right_on="Parent", suffixes=["_1_gen", "_2_gen"], ) df2 = df2.drop(["Offspring_1_gen", "Parent_2_gen"], axis=1) df2.columns = ["Parent", "Offspring"] concat_df = pd.concat([df, df2]).drop_duplicates() return concat_df
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import sys import warnings if sys.version >= '3': basestring = unicode = str from pyspark import since, _NoValue from pyspark.rdd import ignore_unicode_prefix from pyspark.sql.session import _monkey_patch_RDD, SparkSession from pyspark.sql.dataframe import DataFrame from pyspark.sql.readwriter import DataFrameReader from pyspark.sql.streaming import DataStreamReader from pyspark.sql.types import IntegerType, Row, StringType from pyspark.sql.udf import UDFRegistration from pyspark.sql.utils import install_exception_handler __all__ = ["SQLContext", "HiveContext"] class SQLContext(object): """The entry point for working with structured data (rows and columns) in Spark, in Spark 1.x. As of Spark 2.0, this is replaced by :class:`SparkSession`. However, we are keeping the class here for backward compatibility. A SQLContext can be used create :class:`DataFrame`, register :class:`DataFrame` as tables, execute SQL over tables, cache tables, and read parquet files. :param sparkContext: The :class:`SparkContext` backing this SQLContext. :param sparkSession: The :class:`SparkSession` around which this SQLContext wraps. :param jsqlContext: An optional JVM Scala SQLContext. If set, we do not instantiate a new SQLContext in the JVM, instead we make all calls to this object. """ _instantiatedContext = None @ignore_unicode_prefix def __init__(self, sparkContext, sparkSession=None, jsqlContext=None): """Creates a new SQLContext. .. note:: Deprecated in 3.0.0. Use :func:`SparkSession.builder.getOrCreate()` instead. >>> from datetime import datetime >>> sqlContext = SQLContext(sc) >>> allTypes = sc.parallelize([Row(i=1, s="string", d=1.0, l=1, ... b=True, list=[1, 2, 3], dict={"s": 0}, row=Row(a=1), ... time=datetime(2014, 8, 1, 14, 1, 5))]) >>> df = allTypes.toDF() >>> df.createOrReplaceTempView("allTypes") >>> sqlContext.sql('select i+1, d+1, not b, list[1], dict["s"], time, row.a ' ... 'from allTypes where b and i > 0').collect() [Row((i + CAST(1 AS BIGINT))=2, (d + CAST(1 AS DOUBLE))=2.0, (NOT b)=False, list[1]=2, \ dict[s]=0, time=datetime.datetime(2014, 8, 1, 14, 1, 5), a=1)] >>> df.rdd.map(lambda x: (x.i, x.s, x.d, x.l, x.b, x.time, x.row.a, x.list)).collect() [(1, u'string', 1.0, 1, True, datetime.datetime(2014, 8, 1, 14, 1, 5), 1, [1, 2, 3])] """ warnings.warn( "Deprecated in 3.0.0. Use SparkSession.builder.getOrCreate() instead.", DeprecationWarning) self._sc = sparkContext self._jsc = self._sc._jsc self._jvm = self._sc._jvm if sparkSession is None: sparkSession = SparkSession.builder.getOrCreate() if jsqlContext is None: jsqlContext = sparkSession._jwrapped self.sparkSession = sparkSession self._jsqlContext = jsqlContext _monkey_patch_RDD(self.sparkSession) install_exception_handler() if (SQLContext._instantiatedContext is None or SQLContext._instantiatedContext._sc._jsc is None): SQLContext._instantiatedContext = self @property def _ssql_ctx(self): """Accessor for the JVM Spark SQL context. Subclasses can override this property to provide their own JVM Contexts. """ return self._jsqlContext @property def _conf(self): """Accessor for the JVM SQL-specific configurations""" return self.sparkSession._jsparkSession.sessionState().conf() @classmethod @since(1.6) def getOrCreate(cls, sc): """ Get the existing SQLContext or create a new one with given SparkContext. :param sc: SparkContext .. note:: Deprecated in 3.0.0. Use :func:`SparkSession.builder.getOrCreate()` instead. """ warnings.warn( "Deprecated in 3.0.0. Use SparkSession.builder.getOrCreate() instead.", DeprecationWarning) if (cls._instantiatedContext is None or SQLContext._instantiatedContext._sc._jsc is None): jsqlContext = sc._jvm.SparkSession.builder().sparkContext( sc._jsc.sc()).getOrCreate().sqlContext() sparkSession = SparkSession(sc, jsqlContext.sparkSession()) cls(sc, sparkSession, jsqlContext) return cls._instantiatedContext @since(1.6) def newSession(self): """ Returns a new SQLContext as new session, that has separate SQLConf, registered temporary views and UDFs, but shared SparkContext and table cache. """ return self.__class__(self._sc, self.sparkSession.newSession()) @since(1.3) def setConf(self, key, value): """Sets the given Spark SQL configuration property. """ self.sparkSession.conf.set(key, value) @ignore_unicode_prefix @since(1.3) def getConf(self, key, defaultValue=_NoValue): """Returns the value of Spark SQL configuration property for the given key. If the key is not set and defaultValue is set, return defaultValue. If the key is not set and defaultValue is not set, return the system default value. >>> sqlContext.getConf("spark.sql.shuffle.partitions") u'200' >>> sqlContext.getConf("spark.sql.shuffle.partitions", u"10") u'10' >>> sqlContext.setConf("spark.sql.shuffle.partitions", u"50") >>> sqlContext.getConf("spark.sql.shuffle.partitions", u"10") u'50' """ return self.sparkSession.conf.get(key, defaultValue) @property @since("1.3.1") def udf(self): """Returns a :class:`UDFRegistration` for UDF registration. :return: :class:`UDFRegistration` """ return self.sparkSession.udf @since(1.4) def range(self, start, end=None, step=1, numPartitions=None): """ Create a :class:`DataFrame` with single :class:`pyspark.sql.types.LongType` column named ``id``, containing elements in a range from ``start`` to ``end`` (exclusive) with step value ``step``. :param start: the start value :param end: the end value (exclusive) :param step: the incremental step (default: 1) :param numPartitions: the number of partitions of the DataFrame :return: :class:`DataFrame` >>> sqlContext.range(1, 7, 2).collect() [Row(id=1), Row(id=3), Row(id=5)] If only one argument is specified, it will be used as the end value. >>> sqlContext.range(3).collect() [Row(id=0), Row(id=1), Row(id=2)] """ return self.sparkSession.range(start, end, step, numPartitions) @since(1.2) def registerFunction(self, name, f, returnType=None): """An alias for :func:`spark.udf.register`. See :meth:`pyspark.sql.UDFRegistration.register`. .. note:: Deprecated in 2.3.0. Use :func:`spark.udf.register` instead. """ warnings.warn( "Deprecated in 2.3.0. Use spark.udf.register instead.", DeprecationWarning) return self.sparkSession.udf.register(name, f, returnType) @since(2.1) def registerJavaFunction(self, name, javaClassName, returnType=None): """An alias for :func:`spark.udf.registerJavaFunction`. See :meth:`pyspark.sql.UDFRegistration.registerJavaFunction`. .. note:: Deprecated in 2.3.0. Use :func:`spark.udf.registerJavaFunction` instead. """ warnings.warn( "Deprecated in 2.3.0. Use spark.udf.registerJavaFunction instead.", DeprecationWarning) return self.sparkSession.udf.registerJavaFunction(name, javaClassName, returnType) # TODO(andrew): delete this once we refactor things to take in SparkSession def _inferSchema(self, rdd, samplingRatio=None): """ Infer schema from an RDD of Row or tuple. :param rdd: an RDD of Row or tuple :param samplingRatio: sampling ratio, or no sampling (default) :return: :class:`pyspark.sql.types.StructType` """ return self.sparkSession._inferSchema(rdd, samplingRatio) @since(1.3) @ignore_unicode_prefix def createDataFrame(self, data, schema=None, samplingRatio=None, verifySchema=True): """ Creates a :class:`DataFrame` from an :class:`RDD`, a list or a :class:`pandas.DataFrame`. When ``schema`` is a list of column names, the type of each column will be inferred from ``data``. When ``schema`` is ``None``, it will try to infer the schema (column names and types) from ``data``, which should be an RDD of :class:`Row`, or :class:`namedtuple`, or :class:`dict`. When ``schema`` is :class:`pyspark.sql.types.DataType` or a datatype string it must match the real data, or an exception will be thrown at runtime. If the given schema is not :class:`pyspark.sql.types.StructType`, it will be wrapped into a :class:`pyspark.sql.types.StructType` as its only field, and the field name will be "value", each record will also be wrapped into a tuple, which can be converted to row later. If schema inference is needed, ``samplingRatio`` is used to determined the ratio of rows used for schema inference. The first row will be used if ``samplingRatio`` is ``None``. :param data: an RDD of any kind of SQL data representation(e.g. :class:`Row`, :class:`tuple`, ``int``, ``boolean``, etc.), or :class:`list`, or :class:`pandas.DataFrame`. :param schema: a :class:`pyspark.sql.types.DataType` or a datatype string or a list of column names, default is None. The data type string format equals to :class:`pyspark.sql.types.DataType.simpleString`, except that top level struct type can omit the ``struct<>`` and atomic types use ``typeName()`` as their format, e.g. use ``byte`` instead of ``tinyint`` for :class:`pyspark.sql.types.ByteType`. We can also use ``int`` as a short name for :class:`pyspark.sql.types.IntegerType`. :param samplingRatio: the sample ratio of rows used for inferring :param verifySchema: verify data types of every row against schema. :return: :class:`DataFrame` .. versionchanged:: 2.0 The ``schema`` parameter can be a :class:`pyspark.sql.types.DataType` or a datatype string after 2.0. If it's not a :class:`pyspark.sql.types.StructType`, it will be wrapped into a :class:`pyspark.sql.types.StructType` and each record will also be wrapped into a tuple. .. versionchanged:: 2.1 Added verifySchema. >>> l = [('Alice', 1)] >>> sqlContext.createDataFrame(l).collect() [Row(_1=u'Alice', _2=1)] >>> sqlContext.createDataFrame(l, ['name', 'age']).collect() [Row(name=u'Alice', age=1)] >>> d = [{'name': 'Alice', 'age': 1}] >>> sqlContext.createDataFrame(d).collect() [Row(age=1, name=u'Alice')] >>> rdd = sc.parallelize(l) >>> sqlContext.createDataFrame(rdd).collect() [Row(_1=u'Alice', _2=1)] >>> df = sqlContext.createDataFrame(rdd, ['name', 'age']) >>> df.collect() [Row(name=u'Alice', age=1)] >>> from pyspark.sql import Row >>> Person = Row('name', 'age') >>> person = rdd.map(lambda r: Person(*r)) >>> df2 = sqlContext.createDataFrame(person) >>> df2.collect() [Row(name=u'Alice', age=1)] >>> from pyspark.sql.types import * >>> schema = StructType([ ... StructField("name", StringType(), True), ... StructField("age", IntegerType(), True)]) >>> df3 = sqlContext.createDataFrame(rdd, schema) >>> df3.collect() [Row(name=u'Alice', age=1)] >>> sqlContext.createDataFrame(df.toPandas()).collect() # doctest: +SKIP [Row(name=u'Alice', age=1)] >>> sqlContext.createDataFrame(pandas.DataFrame([[1, 2]])).collect() # doctest: +SKIP [Row(0=1, 1=2)] >>> sqlContext.createDataFrame(rdd, "a: string, b: int").collect() [Row(a=u'Alice', b=1)] >>> rdd = rdd.map(lambda row: row[1]) >>> sqlContext.createDataFrame(rdd, "int").collect() [Row(value=1)] >>> sqlContext.createDataFrame(rdd, "boolean").collect() # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... Py4JJavaError: ... """ return self.sparkSession.createDataFrame(data, schema, samplingRatio, verifySchema) @since(1.3) def registerDataFrameAsTable(self, df, tableName): """Registers the given :class:`DataFrame` as a temporary table in the catalog. Temporary tables exist only during the lifetime of this instance of :class:`SQLContext`. >>> sqlContext.registerDataFrameAsTable(df, "table1") """ df.createOrReplaceTempView(tableName) @since(1.6) def dropTempTable(self, tableName): """ Remove the temporary table from catalog. >>> sqlContext.registerDataFrameAsTable(df, "table1") >>> sqlContext.dropTempTable("table1") """ self.sparkSession.catalog.dropTempView(tableName) @since(1.3) def createExternalTable(self, tableName, path=None, source=None, schema=None, **options): """Creates an external table based on the dataset in a data source. It returns the DataFrame associated with the external table. The data source is specified by the ``source`` and a set of ``options``. If ``source`` is not specified, the default data source configured by ``spark.sql.sources.default`` will be used. Optionally, a schema can be provided as the schema of the returned :class:`DataFrame` and created external table. :return: :class:`DataFrame` """ return self.sparkSession.catalog.createExternalTable( tableName, path, source, schema, **options) @ignore_unicode_prefix @since(1.0) def sql(self, sqlQuery): """Returns a :class:`DataFrame` representing the result of the given query. :return: :class:`DataFrame` >>> sqlContext.registerDataFrameAsTable(df, "table1") >>> df2 = sqlContext.sql("SELECT field1 AS f1, field2 as f2 from table1") >>> df2.collect() [Row(f1=1, f2=u'row1'), Row(f1=2, f2=u'row2'), Row(f1=3, f2=u'row3')] """ return self.sparkSession.sql(sqlQuery) @since(1.0) def table(self, tableName): """Returns the specified table or view as a :class:`DataFrame`. :return: :class:`DataFrame` >>> sqlContext.registerDataFrameAsTable(df, "table1") >>> df2 = sqlContext.table("table1") >>> sorted(df.collect()) == sorted(df2.collect()) True """ return self.sparkSession.table(tableName) @ignore_unicode_prefix @since(1.3) def tables(self, dbName=None): """Returns a :class:`DataFrame` containing names of tables in the given database. If ``dbName`` is not specified, the current database will be used. The returned DataFrame has two columns: ``tableName`` and ``isTemporary`` (a column with :class:`BooleanType` indicating if a table is a temporary one or not). :param dbName: string, name of the database to use. :return: :class:`DataFrame` >>> sqlContext.registerDataFrameAsTable(df, "table1") >>> df2 = sqlContext.tables() >>> df2.filter("tableName = 'table1'").first() Row(database=u'', tableName=u'table1', isTemporary=True) """ if dbName is None: return DataFrame(self._ssql_ctx.tables(), self) else: return DataFrame(self._ssql_ctx.tables(dbName), self) @since(1.3) def tableNames(self, dbName=None): """Returns a list of names of tables in the database ``dbName``. :param dbName: string, name of the database to use. Default to the current database. :return: list of table names, in string >>> sqlContext.registerDataFrameAsTable(df, "table1") >>> "table1" in sqlContext.tableNames() True >>> "table1" in sqlContext.tableNames("default") True """ if dbName is None: return [name for name in self._ssql_ctx.tableNames()] else: return [name for name in self._ssql_ctx.tableNames(dbName)] @since(1.0) def cacheTable(self, tableName): """Caches the specified table in-memory.""" self._ssql_ctx.cacheTable(tableName) @since(1.0) def uncacheTable(self, tableName): """Removes the specified table from the in-memory cache.""" self._ssql_ctx.uncacheTable(tableName) @since(1.3) def clearCache(self): """Removes all cached tables from the in-memory cache. """ self._ssql_ctx.clearCache() @property @since(1.4) def read(self): """ Returns a :class:`DataFrameReader` that can be used to read data in as a :class:`DataFrame`. :return: :class:`DataFrameReader` """ return DataFrameReader(self) @property @since(2.0) def readStream(self): """ Returns a :class:`DataStreamReader` that can be used to read data streams as a streaming :class:`DataFrame`. .. note:: Evolving. :return: :class:`DataStreamReader` >>> text_sdf = sqlContext.readStream.text(tempfile.mkdtemp()) >>> text_sdf.isStreaming True """ return DataStreamReader(self) @property @since(2.0) def streams(self): """Returns a :class:`StreamingQueryManager` that allows managing all the :class:`StreamingQuery` StreamingQueries active on `this` context. .. note:: Evolving. """ from pyspark.sql.streaming import StreamingQueryManager return StreamingQueryManager(self._ssql_ctx.streams()) class HiveContext(SQLContext): """A variant of Spark SQL that integrates with data stored in Hive. Configuration for Hive is read from ``hive-site.xml`` on the classpath. It supports running both SQL and HiveQL commands. :param sparkContext: The SparkContext to wrap. :param jhiveContext: An optional JVM Scala HiveContext. If set, we do not instantiate a new :class:`HiveContext` in the JVM, instead we make all calls to this object. .. note:: Deprecated in 2.0.0. Use SparkSession.builder.enableHiveSupport().getOrCreate(). """ def __init__(self, sparkContext, jhiveContext=None): warnings.warn( "HiveContext is deprecated in Spark 2.0.0. Please use " + "SparkSession.builder.enableHiveSupport().getOrCreate() instead.", DeprecationWarning) if jhiveContext is None: sparkContext._conf.set("spark.sql.catalogImplementation", "hive") sparkSession = SparkSession.builder._sparkContext(sparkContext).getOrCreate() else: sparkSession = SparkSession(sparkContext, jhiveContext.sparkSession()) SQLContext.__init__(self, sparkContext, sparkSession, jhiveContext) @classmethod def _createForTesting(cls, sparkContext): """(Internal use only) Create a new HiveContext for testing. All test code that touches HiveContext *must* go through this method. Otherwise, you may end up launching multiple derby instances and encounter with incredibly confusing error messages. """ jsc = sparkContext._jsc.sc() jtestHive = sparkContext._jvm.org.apache.spark.sql.hive.test.TestHiveContext(jsc, False) return cls(sparkContext, jtestHive) def refreshTable(self, tableName): """Invalidate and refresh all the cached the metadata of the given table. For performance reasons, Spark SQL or the external data source library it uses might cache certain metadata about a table, such as the location of blocks. When those change outside of Spark SQL, users should call this function to invalidate the cache. """ self._ssql_ctx.refreshTable(tableName) def _test(): import os import doctest import tempfile from pyspark.context import SparkContext from pyspark.sql import Row, SQLContext import pyspark.sql.context os.chdir(os.environ["SPARK_HOME"]) globs = pyspark.sql.context.__dict__.copy() sc = SparkContext('local[4]', 'PythonTest') globs['tempfile'] = tempfile globs['os'] = os globs['sc'] = sc globs['sqlContext'] = SQLContext(sc) globs['rdd'] = rdd = sc.parallelize( [Row(field1=1, field2="row1"), Row(field1=2, field2="row2"), Row(field1=3, field2="row3")] ) globs['df'] = rdd.toDF() jsonStrings = [ '{"field1": 1, "field2": "row1", "field3":{"field4":11}}', '{"field1" : 2, "field3":{"field4":22, "field5": [10, 11]},"field6":[{"field7": "row2"}]}', '{"field1" : null, "field2": "row3", "field3":{"field4":33, "field5": []}}' ] globs['jsonStrings'] = jsonStrings globs['json'] = sc.parallelize(jsonStrings) (failure_count, test_count) = doctest.testmod( pyspark.sql.context, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE) globs['sc'].stop() if failure_count: sys.exit(-1) if __name__ == "__main__": _test()
#!/usr/bin/env python from ctypes import * from ctypes.util import find_library import sys import os import platform LIB_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lib') if sys.platform.startswith('linux'): filename = os.path.join(LIB_PATH, 'linux', 'libsvm.so.2') libsvm = CDLL(filename) elif sys.platform.startswith('darwin'): filename = os.path.join(LIB_PATH, 'mac', 'libsvm.so.2') libsvm = CDLL(filename) else: libdir = 'win32' if platform.architecture()[0] == '32bit' else 'win64' filename = os.path.join(LIB_PATH, libdir,'libsvm.dll') libsvm = cdll.LoadLibrary(filename) # Construct constants SVM_TYPE = ['C_SVC', 'NU_SVC', 'ONE_CLASS', 'EPSILON_SVR', 'NU_SVR' ] KERNEL_TYPE = ['LINEAR', 'POLY', 'RBF', 'SIGMOID', 'PRECOMPUTED', 'CHI2', 'HI'] for i, s in enumerate(SVM_TYPE): exec("%s = %d" % (s , i)) for i, s in enumerate(KERNEL_TYPE): exec("%s = %d" % (s , i)) PRINT_STRING_FUN = CFUNCTYPE(None, c_char_p) def print_null(s): return def genFields(names, types): return list(zip(names, types)) def fillprototype(f, restype, argtypes): f.restype = restype f.argtypes = argtypes class svm_node(Structure): _names = ["index", "value"] _types = [c_int, c_double] _fields_ = genFields(_names, _types) def gen_svm_nodearray(xi, feature_max=None, issparse=None): if isinstance(xi, dict): index_range = xi.keys() elif isinstance(xi, (list, tuple)): index_range = range(len(xi)) else: raise TypeError('xi should be a dictionary, list or tuple') if feature_max: assert(isinstance(feature_max, int)) index_range = filter(lambda j: j <= feature_max, index_range) if issparse: index_range = filter(lambda j:xi[j] != 0, index_range) index_range = sorted(index_range) ret = (svm_node * (len(index_range)+1))() ret[-1].index = -1 for idx, j in enumerate(index_range): ret[idx].index = j ret[idx].value = xi[j] max_idx = 0 if index_range: max_idx = index_range[-1] return ret, max_idx class svm_problem(Structure): _names = ["l", "y", "x"] _types = [c_int, POINTER(c_double), POINTER(POINTER(svm_node))] _fields_ = genFields(_names, _types) def __init__(self, y, x): if len(y) != len(x): raise ValueError("len(y) != len(x)") self.l = l = len(y) max_idx = 0 x_space = self.x_space = [] for i, xi in enumerate(x): tmp_xi, tmp_idx = gen_svm_nodearray(xi) x_space += [tmp_xi] max_idx = max(max_idx, tmp_idx) self.n = max_idx self.y = (c_double * l)() for i, yi in enumerate(y): self.y[i] = yi self.x = (POINTER(svm_node) * l)() for i, xi in enumerate(self.x_space): self.x[i] = xi class svm_parameter(Structure): _names = ["svm_type", "kernel_type", "degree", "gamma", "coef0", "cache_size", "eps", "C", "nr_weight", "weight_label", "weight", "nu", "p", "shrinking", "probability"] _types = [c_int, c_int, c_int, c_double, c_double, c_double, c_double, c_double, c_int, POINTER(c_int), POINTER(c_double), c_double, c_double, c_int, c_int] _fields_ = genFields(_names, _types) def __init__(self, options = None): if options == None: options = '' self.parse_options(options) def show(self): attrs = svm_parameter._names + self.__dict__.keys() values = map(lambda attr: getattr(self, attr), attrs) for attr, val in zip(attrs, values): print(' %s: %s' % (attr, val)) def set_to_default_values(self): self.svm_type = C_SVC; self.kernel_type = RBF self.degree = 3 self.gamma = 0 self.coef0 = 0 self.nu = 0.5 self.cache_size = 100 self.C = 1 self.eps = 0.001 self.p = 0.1 self.shrinking = 1 self.probability = 0 self.nr_weight = 0 self.weight_label = (c_int*0)() self.weight = (c_double*0)() self.cross_validation = False self.nr_fold = 0 self.print_func = None def parse_options(self, options): argv = options.split() self.set_to_default_values() self.print_func = cast(None, PRINT_STRING_FUN) weight_label = [] weight = [] i = 0 while i < len(argv): if argv[i] == "-s": i = i + 1 self.svm_type = int(argv[i]) elif argv[i] == "-t": i = i + 1 self.kernel_type = int(argv[i]) elif argv[i] == "-d": i = i + 1 self.degree = int(argv[i]) elif argv[i] == "-g": i = i + 1 self.gamma = float(argv[i]) elif argv[i] == "-r": i = i + 1 self.coef0 = float(argv[i]) elif argv[i] == "-n": i = i + 1 self.nu = float(argv[i]) elif argv[i] == "-m": i = i + 1 self.cache_size = float(argv[i]) elif argv[i] == "-c": i = i + 1 self.C = float(argv[i]) elif argv[i] == "-e": i = i + 1 self.eps = float(argv[i]) elif argv[i] == "-p": i = i + 1 self.p = float(argv[i]) elif argv[i] == "-h": i = i + 1 self.shrinking = int(argv[i]) elif argv[i] == "-b": i = i + 1 self.probability = int(argv[i]) elif argv[i] == "-q": self.print_func = PRINT_STRING_FUN(print_null) elif argv[i] == "-v": i = i + 1 self.cross_validation = 1 self.nr_fold = int(argv[i]) if self.nr_fold < 2: raise ValueError("n-fold cross validation: n must >= 2") elif argv[i].startswith("-w"): i = i + 1 self.nr_weight += 1 nr_weight = self.nr_weight weight_label += [int(argv[i-1][2:])] weight += [float(argv[i])] else: raise ValueError("Wrong options") i += 1 libsvm.svm_set_print_string_function(self.print_func) self.weight_label = (c_int*self.nr_weight)() self.weight = (c_double*self.nr_weight)() for i in range(self.nr_weight): self.weight[i] = weight[i] self.weight_label[i] = weight_label[i] class svm_model(Structure): def __init__(self): self.__createfrom__ = 'python' def __del__(self): # free memory created by C to avoid memory leak if hasattr(self, '__createfrom__') and self.__createfrom__ == 'C': libsvm.svm_free_and_destroy_model(pointer(self)) def get_svm_type(self): return libsvm.svm_get_svm_type(self) def get_nr_class(self): return libsvm.svm_get_nr_class(self) def get_svr_probability(self): return libsvm.svm_get_svr_probability(self) def get_labels(self): nr_class = self.get_nr_class() labels = (c_int * nr_class)() libsvm.svm_get_labels(self, labels) return labels[:nr_class] def is_probability_model(self): return (libsvm.svm_check_probability_model(self) == 1) def toPyModel(model_ptr): """ toPyModel(model_ptr) -> svm_model Convert a ctypes POINTER(svm_model) to a Python svm_model """ if bool(model_ptr) == False: raise ValueError("Null pointer") m = model_ptr.contents m.__createfrom__ = 'C' return m fillprototype(libsvm.svm_train, POINTER(svm_model), [POINTER(svm_problem), POINTER(svm_parameter)]) fillprototype(libsvm.svm_cross_validation, None, [POINTER(svm_problem), POINTER(svm_parameter), c_int, POINTER(c_double)]) fillprototype(libsvm.svm_save_model, c_int, [c_char_p, POINTER(svm_model)]) fillprototype(libsvm.svm_load_model, POINTER(svm_model), [c_char_p]) fillprototype(libsvm.svm_get_svm_type, c_int, [POINTER(svm_model)]) fillprototype(libsvm.svm_get_nr_class, c_int, [POINTER(svm_model)]) fillprototype(libsvm.svm_get_labels, None, [POINTER(svm_model), POINTER(c_int)]) fillprototype(libsvm.svm_get_svr_probability, c_double, [POINTER(svm_model)]) fillprototype(libsvm.svm_predict_values, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)]) fillprototype(libsvm.svm_predict, c_double, [POINTER(svm_model), POINTER(svm_node)]) fillprototype(libsvm.svm_predict_probability, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)]) fillprototype(libsvm.svm_free_model_content, None, [POINTER(svm_model)]) fillprototype(libsvm.svm_free_and_destroy_model, None, [POINTER(POINTER(svm_model))]) fillprototype(libsvm.svm_destroy_param, None, [POINTER(svm_parameter)]) fillprototype(libsvm.svm_check_parameter, c_char_p, [POINTER(svm_problem), POINTER(svm_parameter)]) fillprototype(libsvm.svm_check_probability_model, c_int, [POINTER(svm_model)]) fillprototype(libsvm.svm_set_print_string_function, None, [PRINT_STRING_FUN]) fillprototype(libsvm.svm_get_kernel_value, c_double, [POINTER(svm_node), POINTER(svm_node), POINTER(svm_parameter)]) def svm_predict(model, x): nr_class = model.get_nr_class() assert(nr_class == 2) dec_values = (c_double * nr_class)() node_x = gen_svm_nodearray(x)[0] if model.is_probability_model(): label = libsvm.svm_predict_probability(model, node_x, dec_values) if model.get_labels()[0] == -1: score = dec_values[1] else: score = dec_values[0] else: label = libsvm.svm_predict_values(model, node_x, dec_values) score = model.get_labels()[0] * dec_values[0] #print dec_values[0], dec_values[1] return (label, score)
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import datetime from typing import Dict, List, Optional, Union from azure.core.exceptions import HttpResponseError import msrest.serialization from ._key_vault_client_enums import * class Attributes(msrest.serialization.Model): """The object attributes managed by the KeyVault service. Variables are only populated by the server, and will be ignored when sending a request. :ivar enabled: Determines whether the object is enabled. :vartype enabled: bool :ivar not_before: Not before date in UTC. :vartype not_before: ~datetime.datetime :ivar expires: Expiry date in UTC. :vartype expires: ~datetime.datetime :ivar created: Creation time in UTC. :vartype created: ~datetime.datetime :ivar updated: Last updated time in UTC. :vartype updated: ~datetime.datetime """ _validation = { 'created': {'readonly': True}, 'updated': {'readonly': True}, } _attribute_map = { 'enabled': {'key': 'enabled', 'type': 'bool'}, 'not_before': {'key': 'nbf', 'type': 'unix-time'}, 'expires': {'key': 'exp', 'type': 'unix-time'}, 'created': {'key': 'created', 'type': 'unix-time'}, 'updated': {'key': 'updated', 'type': 'unix-time'}, } def __init__( self, *, enabled: Optional[bool] = None, not_before: Optional[datetime.datetime] = None, expires: Optional[datetime.datetime] = None, **kwargs ): """ :keyword enabled: Determines whether the object is enabled. :paramtype enabled: bool :keyword not_before: Not before date in UTC. :paramtype not_before: ~datetime.datetime :keyword expires: Expiry date in UTC. :paramtype expires: ~datetime.datetime """ super(Attributes, self).__init__(**kwargs) self.enabled = enabled self.not_before = not_before self.expires = expires self.created = None self.updated = None class BackupKeyResult(msrest.serialization.Model): """The backup key result, containing the backup blob. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: The backup blob containing the backed up key. :vartype value: bytes """ _validation = { 'value': {'readonly': True}, } _attribute_map = { 'value': {'key': 'value', 'type': 'base64'}, } def __init__( self, **kwargs ): """ """ super(BackupKeyResult, self).__init__(**kwargs) self.value = None class KeyBundle(msrest.serialization.Model): """A KeyBundle consisting of a WebKey plus its attributes. Variables are only populated by the server, and will be ignored when sending a request. :ivar key: The Json web key. :vartype key: ~azure.keyvault.v7_2.models.JsonWebKey :ivar attributes: The key management attributes. :vartype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :ivar tags: A set of tags. Application specific metadata in the form of key-value pairs. :vartype tags: dict[str, str] :ivar managed: True if the key's lifetime is managed by key vault. If this is a key backing a certificate, then managed will be true. :vartype managed: bool """ _validation = { 'managed': {'readonly': True}, } _attribute_map = { 'key': {'key': 'key', 'type': 'JsonWebKey'}, 'attributes': {'key': 'attributes', 'type': 'KeyAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'managed': {'key': 'managed', 'type': 'bool'}, } def __init__( self, *, key: Optional["JsonWebKey"] = None, attributes: Optional["KeyAttributes"] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword key: The Json web key. :paramtype key: ~azure.keyvault.v7_2.models.JsonWebKey :keyword attributes: The key management attributes. :paramtype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :keyword tags: A set of tags. Application specific metadata in the form of key-value pairs. :paramtype tags: dict[str, str] """ super(KeyBundle, self).__init__(**kwargs) self.key = key self.attributes = attributes self.tags = tags self.managed = None class DeletedKeyBundle(KeyBundle): """A DeletedKeyBundle consisting of a WebKey plus its Attributes and deletion info. Variables are only populated by the server, and will be ignored when sending a request. :ivar key: The Json web key. :vartype key: ~azure.keyvault.v7_2.models.JsonWebKey :ivar attributes: The key management attributes. :vartype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :ivar tags: A set of tags. Application specific metadata in the form of key-value pairs. :vartype tags: dict[str, str] :ivar managed: True if the key's lifetime is managed by key vault. If this is a key backing a certificate, then managed will be true. :vartype managed: bool :ivar recovery_id: The url of the recovery object, used to identify and recover the deleted key. :vartype recovery_id: str :ivar scheduled_purge_date: The time when the key is scheduled to be purged, in UTC. :vartype scheduled_purge_date: ~datetime.datetime :ivar deleted_date: The time when the key was deleted, in UTC. :vartype deleted_date: ~datetime.datetime """ _validation = { 'managed': {'readonly': True}, 'scheduled_purge_date': {'readonly': True}, 'deleted_date': {'readonly': True}, } _attribute_map = { 'key': {'key': 'key', 'type': 'JsonWebKey'}, 'attributes': {'key': 'attributes', 'type': 'KeyAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'managed': {'key': 'managed', 'type': 'bool'}, 'recovery_id': {'key': 'recoveryId', 'type': 'str'}, 'scheduled_purge_date': {'key': 'scheduledPurgeDate', 'type': 'unix-time'}, 'deleted_date': {'key': 'deletedDate', 'type': 'unix-time'}, } def __init__( self, *, key: Optional["JsonWebKey"] = None, attributes: Optional["KeyAttributes"] = None, tags: Optional[Dict[str, str]] = None, recovery_id: Optional[str] = None, **kwargs ): """ :keyword key: The Json web key. :paramtype key: ~azure.keyvault.v7_2.models.JsonWebKey :keyword attributes: The key management attributes. :paramtype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :keyword tags: A set of tags. Application specific metadata in the form of key-value pairs. :paramtype tags: dict[str, str] :keyword recovery_id: The url of the recovery object, used to identify and recover the deleted key. :paramtype recovery_id: str """ super(DeletedKeyBundle, self).__init__(key=key, attributes=attributes, tags=tags, **kwargs) self.recovery_id = recovery_id self.scheduled_purge_date = None self.deleted_date = None class KeyItem(msrest.serialization.Model): """The key item containing key metadata. Variables are only populated by the server, and will be ignored when sending a request. :ivar kid: Key identifier. :vartype kid: str :ivar attributes: The key management attributes. :vartype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :ivar tags: A set of tags. Application specific metadata in the form of key-value pairs. :vartype tags: dict[str, str] :ivar managed: True if the key's lifetime is managed by key vault. If this is a key backing a certificate, then managed will be true. :vartype managed: bool """ _validation = { 'managed': {'readonly': True}, } _attribute_map = { 'kid': {'key': 'kid', 'type': 'str'}, 'attributes': {'key': 'attributes', 'type': 'KeyAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'managed': {'key': 'managed', 'type': 'bool'}, } def __init__( self, *, kid: Optional[str] = None, attributes: Optional["KeyAttributes"] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword kid: Key identifier. :paramtype kid: str :keyword attributes: The key management attributes. :paramtype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :keyword tags: A set of tags. Application specific metadata in the form of key-value pairs. :paramtype tags: dict[str, str] """ super(KeyItem, self).__init__(**kwargs) self.kid = kid self.attributes = attributes self.tags = tags self.managed = None class DeletedKeyItem(KeyItem): """The deleted key item containing the deleted key metadata and information about deletion. Variables are only populated by the server, and will be ignored when sending a request. :ivar kid: Key identifier. :vartype kid: str :ivar attributes: The key management attributes. :vartype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :ivar tags: A set of tags. Application specific metadata in the form of key-value pairs. :vartype tags: dict[str, str] :ivar managed: True if the key's lifetime is managed by key vault. If this is a key backing a certificate, then managed will be true. :vartype managed: bool :ivar recovery_id: The url of the recovery object, used to identify and recover the deleted key. :vartype recovery_id: str :ivar scheduled_purge_date: The time when the key is scheduled to be purged, in UTC. :vartype scheduled_purge_date: ~datetime.datetime :ivar deleted_date: The time when the key was deleted, in UTC. :vartype deleted_date: ~datetime.datetime """ _validation = { 'managed': {'readonly': True}, 'scheduled_purge_date': {'readonly': True}, 'deleted_date': {'readonly': True}, } _attribute_map = { 'kid': {'key': 'kid', 'type': 'str'}, 'attributes': {'key': 'attributes', 'type': 'KeyAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'managed': {'key': 'managed', 'type': 'bool'}, 'recovery_id': {'key': 'recoveryId', 'type': 'str'}, 'scheduled_purge_date': {'key': 'scheduledPurgeDate', 'type': 'unix-time'}, 'deleted_date': {'key': 'deletedDate', 'type': 'unix-time'}, } def __init__( self, *, kid: Optional[str] = None, attributes: Optional["KeyAttributes"] = None, tags: Optional[Dict[str, str]] = None, recovery_id: Optional[str] = None, **kwargs ): """ :keyword kid: Key identifier. :paramtype kid: str :keyword attributes: The key management attributes. :paramtype attributes: ~azure.keyvault.v7_2.models.KeyAttributes :keyword tags: A set of tags. Application specific metadata in the form of key-value pairs. :paramtype tags: dict[str, str] :keyword recovery_id: The url of the recovery object, used to identify and recover the deleted key. :paramtype recovery_id: str """ super(DeletedKeyItem, self).__init__(kid=kid, attributes=attributes, tags=tags, **kwargs) self.recovery_id = recovery_id self.scheduled_purge_date = None self.deleted_date = None class DeletedKeyListResult(msrest.serialization.Model): """A list of keys that have been deleted in this vault. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A response message containing a list of deleted keys in the vault along with a link to the next page of deleted keys. :vartype value: list[~azure.keyvault.v7_2.models.DeletedKeyItem] :ivar next_link: The URL to get the next set of deleted keys. :vartype next_link: str """ _validation = { 'value': {'readonly': True}, 'next_link': {'readonly': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[DeletedKeyItem]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ """ super(DeletedKeyListResult, self).__init__(**kwargs) self.value = None self.next_link = None class Error(msrest.serialization.Model): """The key vault server error. Variables are only populated by the server, and will be ignored when sending a request. :ivar code: The error code. :vartype code: str :ivar message: The error message. :vartype message: str :ivar inner_error: The key vault server error. :vartype inner_error: ~azure.keyvault.v7_2.models.Error """ _validation = { 'code': {'readonly': True}, 'message': {'readonly': True}, 'inner_error': {'readonly': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'inner_error': {'key': 'innererror', 'type': 'Error'}, } def __init__( self, **kwargs ): """ """ super(Error, self).__init__(**kwargs) self.code = None self.message = None self.inner_error = None class JsonWebKey(msrest.serialization.Model): """As of http://tools.ietf.org/html/draft-ietf-jose-json-web-key-18. :ivar kid: Key identifier. :vartype kid: str :ivar kty: JsonWebKey Key Type (kty), as defined in https://tools.ietf.org/html/draft-ietf-jose-json-web-algorithms-40. Possible values include: "EC", "EC-HSM", "RSA", "RSA-HSM", "oct", "oct-HSM". :vartype kty: str or ~azure.keyvault.v7_2.models.JsonWebKeyType :ivar key_ops: :vartype key_ops: list[str] :ivar n: RSA modulus. :vartype n: bytes :ivar e: RSA public exponent. :vartype e: bytes :ivar d: RSA private exponent, or the D component of an EC private key. :vartype d: bytes :ivar dp: RSA private key parameter. :vartype dp: bytes :ivar dq: RSA private key parameter. :vartype dq: bytes :ivar qi: RSA private key parameter. :vartype qi: bytes :ivar p: RSA secret prime. :vartype p: bytes :ivar q: RSA secret prime, with p < q. :vartype q: bytes :ivar k: Symmetric key. :vartype k: bytes :ivar t: Protected Key, used with 'Bring Your Own Key'. :vartype t: bytes :ivar crv: Elliptic curve name. For valid values, see JsonWebKeyCurveName. Possible values include: "P-256", "P-384", "P-521", "P-256K". :vartype crv: str or ~azure.keyvault.v7_2.models.JsonWebKeyCurveName :ivar x: X component of an EC public key. :vartype x: bytes :ivar y: Y component of an EC public key. :vartype y: bytes """ _attribute_map = { 'kid': {'key': 'kid', 'type': 'str'}, 'kty': {'key': 'kty', 'type': 'str'}, 'key_ops': {'key': 'key_ops', 'type': '[str]'}, 'n': {'key': 'n', 'type': 'base64'}, 'e': {'key': 'e', 'type': 'base64'}, 'd': {'key': 'd', 'type': 'base64'}, 'dp': {'key': 'dp', 'type': 'base64'}, 'dq': {'key': 'dq', 'type': 'base64'}, 'qi': {'key': 'qi', 'type': 'base64'}, 'p': {'key': 'p', 'type': 'base64'}, 'q': {'key': 'q', 'type': 'base64'}, 'k': {'key': 'k', 'type': 'base64'}, 't': {'key': 'key_hsm', 'type': 'base64'}, 'crv': {'key': 'crv', 'type': 'str'}, 'x': {'key': 'x', 'type': 'base64'}, 'y': {'key': 'y', 'type': 'base64'}, } def __init__( self, *, kid: Optional[str] = None, kty: Optional[Union[str, "JsonWebKeyType"]] = None, key_ops: Optional[List[str]] = None, n: Optional[bytes] = None, e: Optional[bytes] = None, d: Optional[bytes] = None, dp: Optional[bytes] = None, dq: Optional[bytes] = None, qi: Optional[bytes] = None, p: Optional[bytes] = None, q: Optional[bytes] = None, k: Optional[bytes] = None, t: Optional[bytes] = None, crv: Optional[Union[str, "JsonWebKeyCurveName"]] = None, x: Optional[bytes] = None, y: Optional[bytes] = None, **kwargs ): """ :keyword kid: Key identifier. :paramtype kid: str :keyword kty: JsonWebKey Key Type (kty), as defined in https://tools.ietf.org/html/draft-ietf-jose-json-web-algorithms-40. Possible values include: "EC", "EC-HSM", "RSA", "RSA-HSM", "oct", "oct-HSM". :paramtype kty: str or ~azure.keyvault.v7_2.models.JsonWebKeyType :keyword key_ops: :paramtype key_ops: list[str] :keyword n: RSA modulus. :paramtype n: bytes :keyword e: RSA public exponent. :paramtype e: bytes :keyword d: RSA private exponent, or the D component of an EC private key. :paramtype d: bytes :keyword dp: RSA private key parameter. :paramtype dp: bytes :keyword dq: RSA private key parameter. :paramtype dq: bytes :keyword qi: RSA private key parameter. :paramtype qi: bytes :keyword p: RSA secret prime. :paramtype p: bytes :keyword q: RSA secret prime, with p < q. :paramtype q: bytes :keyword k: Symmetric key. :paramtype k: bytes :keyword t: Protected Key, used with 'Bring Your Own Key'. :paramtype t: bytes :keyword crv: Elliptic curve name. For valid values, see JsonWebKeyCurveName. Possible values include: "P-256", "P-384", "P-521", "P-256K". :paramtype crv: str or ~azure.keyvault.v7_2.models.JsonWebKeyCurveName :keyword x: X component of an EC public key. :paramtype x: bytes :keyword y: Y component of an EC public key. :paramtype y: bytes """ super(JsonWebKey, self).__init__(**kwargs) self.kid = kid self.kty = kty self.key_ops = key_ops self.n = n self.e = e self.d = d self.dp = dp self.dq = dq self.qi = qi self.p = p self.q = q self.k = k self.t = t self.crv = crv self.x = x self.y = y class KeyAttributes(Attributes): """The attributes of a key managed by the key vault service. Variables are only populated by the server, and will be ignored when sending a request. :ivar enabled: Determines whether the object is enabled. :vartype enabled: bool :ivar not_before: Not before date in UTC. :vartype not_before: ~datetime.datetime :ivar expires: Expiry date in UTC. :vartype expires: ~datetime.datetime :ivar created: Creation time in UTC. :vartype created: ~datetime.datetime :ivar updated: Last updated time in UTC. :vartype updated: ~datetime.datetime :ivar recoverable_days: softDelete data retention days. Value should be >=7 and <=90 when softDelete enabled, otherwise 0. :vartype recoverable_days: int :ivar recovery_level: Reflects the deletion recovery level currently in effect for keys in the current vault. If it contains 'Purgeable' the key can be permanently deleted by a privileged user; otherwise, only the system can purge the key, at the end of the retention interval. Possible values include: "Purgeable", "Recoverable+Purgeable", "Recoverable", "Recoverable+ProtectedSubscription", "CustomizedRecoverable+Purgeable", "CustomizedRecoverable", "CustomizedRecoverable+ProtectedSubscription". :vartype recovery_level: str or ~azure.keyvault.v7_2.models.DeletionRecoveryLevel """ _validation = { 'created': {'readonly': True}, 'updated': {'readonly': True}, 'recoverable_days': {'readonly': True}, 'recovery_level': {'readonly': True}, } _attribute_map = { 'enabled': {'key': 'enabled', 'type': 'bool'}, 'not_before': {'key': 'nbf', 'type': 'unix-time'}, 'expires': {'key': 'exp', 'type': 'unix-time'}, 'created': {'key': 'created', 'type': 'unix-time'}, 'updated': {'key': 'updated', 'type': 'unix-time'}, 'recoverable_days': {'key': 'recoverableDays', 'type': 'int'}, 'recovery_level': {'key': 'recoveryLevel', 'type': 'str'}, } def __init__( self, *, enabled: Optional[bool] = None, not_before: Optional[datetime.datetime] = None, expires: Optional[datetime.datetime] = None, **kwargs ): """ :keyword enabled: Determines whether the object is enabled. :paramtype enabled: bool :keyword not_before: Not before date in UTC. :paramtype not_before: ~datetime.datetime :keyword expires: Expiry date in UTC. :paramtype expires: ~datetime.datetime """ super(KeyAttributes, self).__init__(enabled=enabled, not_before=not_before, expires=expires, **kwargs) self.recoverable_days = None self.recovery_level = None class KeyCreateParameters(msrest.serialization.Model): """The key create parameters. All required parameters must be populated in order to send to Azure. :ivar kty: Required. The type of key to create. For valid values, see JsonWebKeyType. Possible values include: "EC", "EC-HSM", "RSA", "RSA-HSM", "oct", "oct-HSM". :vartype kty: str or ~azure.keyvault.v7_2.models.JsonWebKeyType :ivar key_size: The key size in bits. For example: 2048, 3072, or 4096 for RSA. :vartype key_size: int :ivar public_exponent: The public exponent for a RSA key. :vartype public_exponent: int :ivar key_ops: :vartype key_ops: list[str or ~azure.keyvault.v7_2.models.JsonWebKeyOperation] :ivar key_attributes: The attributes of a key managed by the key vault service. :vartype key_attributes: ~azure.keyvault.v7_2.models.KeyAttributes :ivar tags: A set of tags. Application specific metadata in the form of key-value pairs. :vartype tags: dict[str, str] :ivar curve: Elliptic curve name. For valid values, see JsonWebKeyCurveName. Possible values include: "P-256", "P-384", "P-521", "P-256K". :vartype curve: str or ~azure.keyvault.v7_2.models.JsonWebKeyCurveName """ _validation = { 'kty': {'required': True}, } _attribute_map = { 'kty': {'key': 'kty', 'type': 'str'}, 'key_size': {'key': 'key_size', 'type': 'int'}, 'public_exponent': {'key': 'public_exponent', 'type': 'int'}, 'key_ops': {'key': 'key_ops', 'type': '[str]'}, 'key_attributes': {'key': 'attributes', 'type': 'KeyAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'curve': {'key': 'crv', 'type': 'str'}, } def __init__( self, *, kty: Union[str, "JsonWebKeyType"], key_size: Optional[int] = None, public_exponent: Optional[int] = None, key_ops: Optional[List[Union[str, "JsonWebKeyOperation"]]] = None, key_attributes: Optional["KeyAttributes"] = None, tags: Optional[Dict[str, str]] = None, curve: Optional[Union[str, "JsonWebKeyCurveName"]] = None, **kwargs ): """ :keyword kty: Required. The type of key to create. For valid values, see JsonWebKeyType. Possible values include: "EC", "EC-HSM", "RSA", "RSA-HSM", "oct", "oct-HSM". :paramtype kty: str or ~azure.keyvault.v7_2.models.JsonWebKeyType :keyword key_size: The key size in bits. For example: 2048, 3072, or 4096 for RSA. :paramtype key_size: int :keyword public_exponent: The public exponent for a RSA key. :paramtype public_exponent: int :keyword key_ops: :paramtype key_ops: list[str or ~azure.keyvault.v7_2.models.JsonWebKeyOperation] :keyword key_attributes: The attributes of a key managed by the key vault service. :paramtype key_attributes: ~azure.keyvault.v7_2.models.KeyAttributes :keyword tags: A set of tags. Application specific metadata in the form of key-value pairs. :paramtype tags: dict[str, str] :keyword curve: Elliptic curve name. For valid values, see JsonWebKeyCurveName. Possible values include: "P-256", "P-384", "P-521", "P-256K". :paramtype curve: str or ~azure.keyvault.v7_2.models.JsonWebKeyCurveName """ super(KeyCreateParameters, self).__init__(**kwargs) self.kty = kty self.key_size = key_size self.public_exponent = public_exponent self.key_ops = key_ops self.key_attributes = key_attributes self.tags = tags self.curve = curve class KeyImportParameters(msrest.serialization.Model): """The key import parameters. All required parameters must be populated in order to send to Azure. :ivar hsm: Whether to import as a hardware key (HSM) or software key. :vartype hsm: bool :ivar key: Required. The Json web key. :vartype key: ~azure.keyvault.v7_2.models.JsonWebKey :ivar key_attributes: The key management attributes. :vartype key_attributes: ~azure.keyvault.v7_2.models.KeyAttributes :ivar tags: A set of tags. Application specific metadata in the form of key-value pairs. :vartype tags: dict[str, str] """ _validation = { 'key': {'required': True}, } _attribute_map = { 'hsm': {'key': 'Hsm', 'type': 'bool'}, 'key': {'key': 'key', 'type': 'JsonWebKey'}, 'key_attributes': {'key': 'attributes', 'type': 'KeyAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, *, key: "JsonWebKey", hsm: Optional[bool] = None, key_attributes: Optional["KeyAttributes"] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword hsm: Whether to import as a hardware key (HSM) or software key. :paramtype hsm: bool :keyword key: Required. The Json web key. :paramtype key: ~azure.keyvault.v7_2.models.JsonWebKey :keyword key_attributes: The key management attributes. :paramtype key_attributes: ~azure.keyvault.v7_2.models.KeyAttributes :keyword tags: A set of tags. Application specific metadata in the form of key-value pairs. :paramtype tags: dict[str, str] """ super(KeyImportParameters, self).__init__(**kwargs) self.hsm = hsm self.key = key self.key_attributes = key_attributes self.tags = tags class KeyListResult(msrest.serialization.Model): """The key list result. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A response message containing a list of keys in the key vault along with a link to the next page of keys. :vartype value: list[~azure.keyvault.v7_2.models.KeyItem] :ivar next_link: The URL to get the next set of keys. :vartype next_link: str """ _validation = { 'value': {'readonly': True}, 'next_link': {'readonly': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[KeyItem]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): """ """ super(KeyListResult, self).__init__(**kwargs) self.value = None self.next_link = None class KeyOperationResult(msrest.serialization.Model): """The key operation result. Variables are only populated by the server, and will be ignored when sending a request. :ivar kid: Key identifier. :vartype kid: str :ivar result: :vartype result: bytes :ivar iv: :vartype iv: bytes :ivar authentication_tag: :vartype authentication_tag: bytes :ivar additional_authenticated_data: :vartype additional_authenticated_data: bytes """ _validation = { 'kid': {'readonly': True}, 'result': {'readonly': True}, 'iv': {'readonly': True}, 'authentication_tag': {'readonly': True}, 'additional_authenticated_data': {'readonly': True}, } _attribute_map = { 'kid': {'key': 'kid', 'type': 'str'}, 'result': {'key': 'value', 'type': 'base64'}, 'iv': {'key': 'iv', 'type': 'base64'}, 'authentication_tag': {'key': 'tag', 'type': 'base64'}, 'additional_authenticated_data': {'key': 'aad', 'type': 'base64'}, } def __init__( self, **kwargs ): """ """ super(KeyOperationResult, self).__init__(**kwargs) self.kid = None self.result = None self.iv = None self.authentication_tag = None self.additional_authenticated_data = None class KeyOperationsParameters(msrest.serialization.Model): """The key operations parameters. All required parameters must be populated in order to send to Azure. :ivar algorithm: Required. algorithm identifier. Possible values include: "RSA-OAEP", "RSA-OAEP-256", "RSA1_5", "A128GCM", "A192GCM", "A256GCM", "A128KW", "A192KW", "A256KW", "A128CBC", "A192CBC", "A256CBC", "A128CBCPAD", "A192CBCPAD", "A256CBCPAD". :vartype algorithm: str or ~azure.keyvault.v7_2.models.JsonWebKeyEncryptionAlgorithm :ivar value: Required. :vartype value: bytes :ivar iv: Initialization vector for symmetric algorithms. :vartype iv: bytes :ivar aad: Additional data to authenticate but not encrypt/decrypt when using authenticated crypto algorithms. :vartype aad: bytes :ivar tag: The tag to authenticate when performing decryption with an authenticated algorithm. :vartype tag: bytes """ _validation = { 'algorithm': {'required': True}, 'value': {'required': True}, } _attribute_map = { 'algorithm': {'key': 'alg', 'type': 'str'}, 'value': {'key': 'value', 'type': 'base64'}, 'iv': {'key': 'iv', 'type': 'base64'}, 'aad': {'key': 'aad', 'type': 'base64'}, 'tag': {'key': 'tag', 'type': 'base64'}, } def __init__( self, *, algorithm: Union[str, "JsonWebKeyEncryptionAlgorithm"], value: bytes, iv: Optional[bytes] = None, aad: Optional[bytes] = None, tag: Optional[bytes] = None, **kwargs ): """ :keyword algorithm: Required. algorithm identifier. Possible values include: "RSA-OAEP", "RSA-OAEP-256", "RSA1_5", "A128GCM", "A192GCM", "A256GCM", "A128KW", "A192KW", "A256KW", "A128CBC", "A192CBC", "A256CBC", "A128CBCPAD", "A192CBCPAD", "A256CBCPAD". :paramtype algorithm: str or ~azure.keyvault.v7_2.models.JsonWebKeyEncryptionAlgorithm :keyword value: Required. :paramtype value: bytes :keyword iv: Initialization vector for symmetric algorithms. :paramtype iv: bytes :keyword aad: Additional data to authenticate but not encrypt/decrypt when using authenticated crypto algorithms. :paramtype aad: bytes :keyword tag: The tag to authenticate when performing decryption with an authenticated algorithm. :paramtype tag: bytes """ super(KeyOperationsParameters, self).__init__(**kwargs) self.algorithm = algorithm self.value = value self.iv = iv self.aad = aad self.tag = tag class KeyProperties(msrest.serialization.Model): """Properties of the key pair backing a certificate. :ivar exportable: Not supported in this version. Indicates if the private key can be exported. :vartype exportable: bool :ivar key_type: The type of key pair to be used for the certificate. Possible values include: "EC", "EC-HSM", "RSA", "RSA-HSM", "oct", "oct-HSM". :vartype key_type: str or ~azure.keyvault.v7_2.models.JsonWebKeyType :ivar key_size: The key size in bits. For example: 2048, 3072, or 4096 for RSA. :vartype key_size: int :ivar reuse_key: Indicates if the same key pair will be used on certificate renewal. :vartype reuse_key: bool :ivar curve: Elliptic curve name. For valid values, see JsonWebKeyCurveName. Possible values include: "P-256", "P-384", "P-521", "P-256K". :vartype curve: str or ~azure.keyvault.v7_2.models.JsonWebKeyCurveName """ _attribute_map = { 'exportable': {'key': 'exportable', 'type': 'bool'}, 'key_type': {'key': 'kty', 'type': 'str'}, 'key_size': {'key': 'key_size', 'type': 'int'}, 'reuse_key': {'key': 'reuse_key', 'type': 'bool'}, 'curve': {'key': 'crv', 'type': 'str'}, } def __init__( self, *, exportable: Optional[bool] = None, key_type: Optional[Union[str, "JsonWebKeyType"]] = None, key_size: Optional[int] = None, reuse_key: Optional[bool] = None, curve: Optional[Union[str, "JsonWebKeyCurveName"]] = None, **kwargs ): """ :keyword exportable: Not supported in this version. Indicates if the private key can be exported. :paramtype exportable: bool :keyword key_type: The type of key pair to be used for the certificate. Possible values include: "EC", "EC-HSM", "RSA", "RSA-HSM", "oct", "oct-HSM". :paramtype key_type: str or ~azure.keyvault.v7_2.models.JsonWebKeyType :keyword key_size: The key size in bits. For example: 2048, 3072, or 4096 for RSA. :paramtype key_size: int :keyword reuse_key: Indicates if the same key pair will be used on certificate renewal. :paramtype reuse_key: bool :keyword curve: Elliptic curve name. For valid values, see JsonWebKeyCurveName. Possible values include: "P-256", "P-384", "P-521", "P-256K". :paramtype curve: str or ~azure.keyvault.v7_2.models.JsonWebKeyCurveName """ super(KeyProperties, self).__init__(**kwargs) self.exportable = exportable self.key_type = key_type self.key_size = key_size self.reuse_key = reuse_key self.curve = curve class KeyRestoreParameters(msrest.serialization.Model): """The key restore parameters. All required parameters must be populated in order to send to Azure. :ivar key_bundle_backup: Required. The backup blob associated with a key bundle. :vartype key_bundle_backup: bytes """ _validation = { 'key_bundle_backup': {'required': True}, } _attribute_map = { 'key_bundle_backup': {'key': 'value', 'type': 'base64'}, } def __init__( self, *, key_bundle_backup: bytes, **kwargs ): """ :keyword key_bundle_backup: Required. The backup blob associated with a key bundle. :paramtype key_bundle_backup: bytes """ super(KeyRestoreParameters, self).__init__(**kwargs) self.key_bundle_backup = key_bundle_backup class KeySignParameters(msrest.serialization.Model): """The key operations parameters. All required parameters must be populated in order to send to Azure. :ivar algorithm: Required. The signing/verification algorithm identifier. For more information on possible algorithm types, see JsonWebKeySignatureAlgorithm. Possible values include: "PS256", "PS384", "PS512", "RS256", "RS384", "RS512", "RSNULL", "ES256", "ES384", "ES512", "ES256K". :vartype algorithm: str or ~azure.keyvault.v7_2.models.JsonWebKeySignatureAlgorithm :ivar value: Required. :vartype value: bytes """ _validation = { 'algorithm': {'required': True}, 'value': {'required': True}, } _attribute_map = { 'algorithm': {'key': 'alg', 'type': 'str'}, 'value': {'key': 'value', 'type': 'base64'}, } def __init__( self, *, algorithm: Union[str, "JsonWebKeySignatureAlgorithm"], value: bytes, **kwargs ): """ :keyword algorithm: Required. The signing/verification algorithm identifier. For more information on possible algorithm types, see JsonWebKeySignatureAlgorithm. Possible values include: "PS256", "PS384", "PS512", "RS256", "RS384", "RS512", "RSNULL", "ES256", "ES384", "ES512", "ES256K". :paramtype algorithm: str or ~azure.keyvault.v7_2.models.JsonWebKeySignatureAlgorithm :keyword value: Required. :paramtype value: bytes """ super(KeySignParameters, self).__init__(**kwargs) self.algorithm = algorithm self.value = value class KeyUpdateParameters(msrest.serialization.Model): """The key update parameters. :ivar key_ops: Json web key operations. For more information on possible key operations, see JsonWebKeyOperation. :vartype key_ops: list[str or ~azure.keyvault.v7_2.models.JsonWebKeyOperation] :ivar key_attributes: The attributes of a key managed by the key vault service. :vartype key_attributes: ~azure.keyvault.v7_2.models.KeyAttributes :ivar tags: A set of tags. Application specific metadata in the form of key-value pairs. :vartype tags: dict[str, str] """ _attribute_map = { 'key_ops': {'key': 'key_ops', 'type': '[str]'}, 'key_attributes': {'key': 'attributes', 'type': 'KeyAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, *, key_ops: Optional[List[Union[str, "JsonWebKeyOperation"]]] = None, key_attributes: Optional["KeyAttributes"] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): """ :keyword key_ops: Json web key operations. For more information on possible key operations, see JsonWebKeyOperation. :paramtype key_ops: list[str or ~azure.keyvault.v7_2.models.JsonWebKeyOperation] :keyword key_attributes: The attributes of a key managed by the key vault service. :paramtype key_attributes: ~azure.keyvault.v7_2.models.KeyAttributes :keyword tags: A set of tags. Application specific metadata in the form of key-value pairs. :paramtype tags: dict[str, str] """ super(KeyUpdateParameters, self).__init__(**kwargs) self.key_ops = key_ops self.key_attributes = key_attributes self.tags = tags class KeyVaultError(msrest.serialization.Model): """The key vault error exception. Variables are only populated by the server, and will be ignored when sending a request. :ivar error: The key vault server error. :vartype error: ~azure.keyvault.v7_2.models.Error """ _validation = { 'error': {'readonly': True}, } _attribute_map = { 'error': {'key': 'error', 'type': 'Error'}, } def __init__( self, **kwargs ): """ """ super(KeyVaultError, self).__init__(**kwargs) self.error = None class KeyVerifyParameters(msrest.serialization.Model): """The key verify parameters. All required parameters must be populated in order to send to Azure. :ivar algorithm: Required. The signing/verification algorithm. For more information on possible algorithm types, see JsonWebKeySignatureAlgorithm. Possible values include: "PS256", "PS384", "PS512", "RS256", "RS384", "RS512", "RSNULL", "ES256", "ES384", "ES512", "ES256K". :vartype algorithm: str or ~azure.keyvault.v7_2.models.JsonWebKeySignatureAlgorithm :ivar digest: Required. The digest used for signing. :vartype digest: bytes :ivar signature: Required. The signature to be verified. :vartype signature: bytes """ _validation = { 'algorithm': {'required': True}, 'digest': {'required': True}, 'signature': {'required': True}, } _attribute_map = { 'algorithm': {'key': 'alg', 'type': 'str'}, 'digest': {'key': 'digest', 'type': 'base64'}, 'signature': {'key': 'value', 'type': 'base64'}, } def __init__( self, *, algorithm: Union[str, "JsonWebKeySignatureAlgorithm"], digest: bytes, signature: bytes, **kwargs ): """ :keyword algorithm: Required. The signing/verification algorithm. For more information on possible algorithm types, see JsonWebKeySignatureAlgorithm. Possible values include: "PS256", "PS384", "PS512", "RS256", "RS384", "RS512", "RSNULL", "ES256", "ES384", "ES512", "ES256K". :paramtype algorithm: str or ~azure.keyvault.v7_2.models.JsonWebKeySignatureAlgorithm :keyword digest: Required. The digest used for signing. :paramtype digest: bytes :keyword signature: Required. The signature to be verified. :paramtype signature: bytes """ super(KeyVerifyParameters, self).__init__(**kwargs) self.algorithm = algorithm self.digest = digest self.signature = signature class KeyVerifyResult(msrest.serialization.Model): """The key verify result. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: True if the signature is verified, otherwise false. :vartype value: bool """ _validation = { 'value': {'readonly': True}, } _attribute_map = { 'value': {'key': 'value', 'type': 'bool'}, } def __init__( self, **kwargs ): """ """ super(KeyVerifyResult, self).__init__(**kwargs) self.value = None
import logging import xml.etree.ElementTree as etree import pytest import sdk_cmd import sdk_hosts import sdk_install import sdk_marathon import sdk_metrics import sdk_plan import sdk_tasks import sdk_upgrade import sdk_utils import shakedown from tests import config log = logging.getLogger(__name__) @pytest.fixture(scope='module', autouse=True) def configure_package(configure_security): try: foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) sdk_install.uninstall(config.PACKAGE_NAME, foldered_name) if sdk_utils.dcos_version_less_than("1.9"): # HDFS upgrade in 1.8 is not supported. sdk_install.install( config.PACKAGE_NAME, foldered_name, config.DEFAULT_TASK_COUNT, additional_options={"service": {"name": foldered_name}}, timeout_seconds=30 * 60) else: sdk_upgrade.test_upgrade( config.PACKAGE_NAME, foldered_name, config.DEFAULT_TASK_COUNT, additional_options={"service": {"name": foldered_name}}, timeout_seconds=30 * 60) yield # let the test session execute finally: sdk_install.uninstall(config.PACKAGE_NAME, foldered_name) @pytest.fixture(autouse=True) def pre_test_setup(): config.check_healthy(service_name=sdk_utils.get_foldered_name(config.SERVICE_NAME)) @pytest.mark.sanity @pytest.mark.smoke @pytest.mark.mesos_v0 def test_mesos_v0_api(): service_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) prior_api_version = sdk_marathon.get_mesos_api_version(service_name) if prior_api_version is not "V0": sdk_marathon.set_mesos_api_version(service_name, "V0") sdk_marathon.set_mesos_api_version(service_name, prior_api_version) @pytest.mark.sanity def test_endpoints(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) # check that we can reach the scheduler via admin router, and that returned endpoints are sanitized: core_site = etree.fromstring(sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'endpoints core-site.xml')) check_properties(core_site, { 'ha.zookeeper.parent-znode': '/{}/hadoop-ha'.format(sdk_utils.get_zk_path( foldered_name)) }) hdfs_site = etree.fromstring(sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'endpoints hdfs-site.xml')) expect = { 'dfs.namenode.shared.edits.dir': 'qjournal://{}/hdfs'.format(';'.join([ sdk_hosts.autoip_host( foldered_name, 'journal-{}-node'.format(i), 8485 ) for i in range(3)])), } for i in range(2): name_node = 'name-{}-node'.format(i) expect['dfs.namenode.rpc-address.hdfs.{}'.format(name_node)] = sdk_hosts.autoip_host( foldered_name, name_node, 9001) expect['dfs.namenode.http-address.hdfs.{}'.format(name_node)] = sdk_hosts.autoip_host( foldered_name, name_node, 9002) check_properties(hdfs_site, expect) def check_properties(xml, expect): found = {} for prop in xml.findall('property'): name = prop.find('name').text if name in expect: found[name] = prop.find('value').text log.info('expect: {}\nfound: {}'.format(expect, found)) assert expect == found @pytest.mark.recovery def test_kill_journal_node(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal-0') name_ids = sdk_tasks.get_task_ids(foldered_name, 'name') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') sdk_cmd.kill_task_with_pattern('journalnode', sdk_hosts.system_host(foldered_name, 'journal-0-node')) config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'name', name_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.sanity @pytest.mark.recovery def test_kill_name_node(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) name_ids = sdk_tasks.get_task_ids(foldered_name, 'name-0') journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') sdk_cmd.kill_task_with_pattern('namenode', sdk_hosts.system_host(foldered_name, 'name-0-node')) config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'name', name_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.sanity @pytest.mark.recovery def test_kill_data_node(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) data_ids = sdk_tasks.get_task_ids(foldered_name, 'data-0') journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') name_ids = sdk_tasks.get_task_ids(foldered_name, 'name') sdk_cmd.kill_task_with_pattern('datanode', sdk_hosts.system_host(foldered_name, 'data-0-node')) config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'data', data_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'name', name_ids) @pytest.mark.sanity @pytest.mark.recovery def test_kill_scheduler(): sdk_cmd.kill_task_with_pattern('hdfs.scheduler.Main', shakedown.get_service_ips('marathon').pop()) config.check_healthy(service_name=sdk_utils.get_foldered_name(config.SERVICE_NAME)) @pytest.mark.sanity @pytest.mark.recovery def test_kill_all_journalnodes(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') data_ids = sdk_tasks.get_task_ids(sdk_utils.get_foldered_name(config.SERVICE_NAME), 'data') for journal_pod in config.get_pod_type_instances("journal", foldered_name): sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod restart {}'.format(journal_pod)) config.expect_recovery(service_name=foldered_name) # name nodes fail and restart, so don't check those sdk_tasks.check_tasks_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.sanity @pytest.mark.recovery def test_kill_all_namenodes(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') name_ids = sdk_tasks.get_task_ids(foldered_name, 'name') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') for name_pod in config.get_pod_type_instances("name", foldered_name): sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod restart {}'.format(name_pod)) config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'name', name_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.sanity @pytest.mark.recovery def test_kill_all_datanodes(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') name_ids = sdk_tasks.get_task_ids(foldered_name, 'name') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') for data_pod in config.get_pod_type_instances("data", foldered_name): sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod restart {}'.format(data_pod)) config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'data', data_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'name', name_ids) @pytest.mark.sanity @pytest.mark.recovery def test_permanently_replace_namenodes(): replace_name_node(0) replace_name_node(1) replace_name_node(0) @pytest.mark.sanity @pytest.mark.recovery def test_permanent_and_transient_namenode_failures_0_1(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) config.check_healthy(service_name=foldered_name) name_0_ids = sdk_tasks.get_task_ids(foldered_name, 'name-0') name_1_ids = sdk_tasks.get_task_ids(foldered_name, 'name-1') journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod replace name-0') sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod restart name-1') config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'name-0', name_0_ids) sdk_tasks.check_tasks_updated(foldered_name, 'name-1', name_1_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.sanity @pytest.mark.recovery def test_permanent_and_transient_namenode_failures_1_0(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) config.check_healthy(service_name=foldered_name) name_0_ids = sdk_tasks.get_task_ids(foldered_name, 'name-0') name_1_ids = sdk_tasks.get_task_ids(foldered_name, 'name-1') journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod replace name-1') sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod restart name-0') config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'name-0', name_0_ids) sdk_tasks.check_tasks_updated(foldered_name, 'name-1', name_1_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.smoke def test_install(): config.check_healthy(service_name=sdk_utils.get_foldered_name(config.SERVICE_NAME)) @pytest.mark.sanity def test_bump_journal_cpus(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') name_ids = sdk_tasks.get_task_ids(foldered_name, 'name') log.info('journal ids: ' + str(journal_ids)) sdk_marathon.bump_cpu_count_config(foldered_name, 'JOURNAL_CPUS') sdk_tasks.check_tasks_updated(foldered_name, 'journal', journal_ids) # journal node update should not cause any of the name nodes to crash # if the name nodes crashed, then it implies the journal nodes were updated in parallel, when they should've been updated serially # for journal nodes, the deploy plan is parallel, while the update plan is serial. maybe the deploy plan was mistakenly used? sdk_tasks.check_tasks_not_updated(foldered_name, 'name', name_ids) config.check_healthy(service_name=foldered_name) @pytest.mark.sanity def test_bump_data_nodes(): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') log.info('data ids: ' + str(data_ids)) sdk_marathon.bump_task_count_config(foldered_name, 'DATA_COUNT') config.check_healthy(service_name=foldered_name, count=config.DEFAULT_TASK_COUNT + 1) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.readiness_check @pytest.mark.sanity def test_modify_app_config(): """This tests checks that the modification of the app config does not trigger a recovery.""" foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) sdk_plan.wait_for_completed_recovery(foldered_name) old_recovery_plan = sdk_plan.get_plan(foldered_name, "recovery") app_config_field = 'TASKCFG_ALL_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_EXPIRY_MS' journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') name_ids = sdk_tasks.get_task_ids(foldered_name, 'name') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') marathon_config = sdk_marathon.get_config(foldered_name) log.info('marathon config: ') log.info(marathon_config) expiry_ms = int(marathon_config['env'][app_config_field]) marathon_config['env'][app_config_field] = str(expiry_ms + 1) sdk_marathon.update_app(foldered_name, marathon_config, timeout=15 * 60) # All tasks should be updated because hdfs-site.xml has changed config.check_healthy(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_updated(foldered_name, 'name', name_ids) sdk_tasks.check_tasks_updated(foldered_name, 'data', data_ids) sdk_plan.wait_for_completed_recovery(foldered_name) new_recovery_plan = sdk_plan.get_plan(foldered_name, "recovery") assert old_recovery_plan == new_recovery_plan @pytest.mark.sanity def test_modify_app_config_rollback(): app_config_field = 'TASKCFG_ALL_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_EXPIRY_MS' foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') old_config = sdk_marathon.get_config(foldered_name) marathon_config = sdk_marathon.get_config(foldered_name) log.info('marathon config: ') log.info(marathon_config) expiry_ms = int(marathon_config['env'][app_config_field]) log.info('expiry ms: ' + str(expiry_ms)) marathon_config['env'][app_config_field] = str(expiry_ms + 1) sdk_marathon.update_app(foldered_name, marathon_config, timeout=15 * 60) # Wait for journal nodes to be affected by the change sdk_tasks.check_tasks_updated(foldered_name, 'journal', journal_ids) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') log.info('old config: ') log.info(old_config) # Put the old config back (rollback) sdk_marathon.update_app(foldered_name, old_config) # Wait for the journal nodes to return to their old configuration sdk_tasks.check_tasks_updated(foldered_name, 'journal', journal_ids) config.check_healthy(service_name=foldered_name) marathon_config = sdk_marathon.get_config(foldered_name) assert int(marathon_config['env'][app_config_field]) == expiry_ms # Data tasks should not have been affected sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids) @pytest.mark.sanity @pytest.mark.metrics @pytest.mark.dcos_min_version('1.9') def test_metrics(): expected_metrics = [ "JournalNode.jvm.JvmMetrics.ThreadsRunnable", "null.rpc.rpc.RpcQueueTimeNumOps", "null.metricssystem.MetricsSystem.PublishAvgTime" ] def expected_metrics_exist(emitted_metrics): # HDFS metric names need sanitation as they're dynamic. # For eg: ip-10-0-0-139.null.rpc.rpc.RpcQueueTimeNumOps # This is consistent across all HDFS metric names. metric_names = set(['.'.join(metric_name.split(".")[1:]) for metric_name in emitted_metrics]) return sdk_metrics.check_metrics_presence(metric_names, expected_metrics) sdk_metrics.wait_for_service_metrics( config.PACKAGE_NAME, sdk_utils.get_foldered_name(config.SERVICE_NAME), "journal-0-node", config.DEFAULT_HDFS_TIMEOUT, expected_metrics_exist ) def replace_name_node(index): foldered_name = sdk_utils.get_foldered_name(config.SERVICE_NAME) config.check_healthy(service_name=foldered_name) name_node_name = 'name-' + str(index) name_id = sdk_tasks.get_task_ids(foldered_name, name_node_name) journal_ids = sdk_tasks.get_task_ids(foldered_name, 'journal') data_ids = sdk_tasks.get_task_ids(foldered_name, 'data') sdk_cmd.svc_cli(config.PACKAGE_NAME, foldered_name, 'pod replace {}'.format(name_node_name)) config.expect_recovery(service_name=foldered_name) sdk_tasks.check_tasks_updated(foldered_name, name_node_name, name_id) sdk_tasks.check_tasks_not_updated(foldered_name, 'journal', journal_ids) sdk_tasks.check_tasks_not_updated(foldered_name, 'data', data_ids)
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2010 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Stubouts, mocks and fixtures for the test suite""" import pickle import random from nova.openstack.common import jsonutils from nova import test import nova.tests.image.fake from nova.virt.xenapi import driver as xenapi_conn from nova.virt.xenapi import fake from nova.virt.xenapi import vm_utils from nova.virt.xenapi import vmops def stubout_firewall_driver(stubs, conn): def fake_none(self, *args): return _vmops = conn._vmops stubs.Set(_vmops.firewall_driver, 'prepare_instance_filter', fake_none) stubs.Set(_vmops.firewall_driver, 'instance_filter_exists', fake_none) def stubout_instance_snapshot(stubs): def fake_fetch_image(context, session, instance, name_label, image, type): return {'root': dict(uuid=_make_fake_vdi(), file=None), 'kernel': dict(uuid=_make_fake_vdi(), file=None), 'ramdisk': dict(uuid=_make_fake_vdi(), file=None)} stubs.Set(vm_utils, '_fetch_image', fake_fetch_image) def fake_wait_for_vhd_coalesce(*args): #TODO(sirp): Should we actually fake out the data here return "fakeparent", "fakebase" stubs.Set(vm_utils, '_wait_for_vhd_coalesce', fake_wait_for_vhd_coalesce) def stubout_session(stubs, cls, product_version=(5, 6, 2), product_brand='XenServer', **opt_args): """Stubs out methods from XenAPISession""" stubs.Set(xenapi_conn.XenAPISession, '_create_session', lambda s, url: cls(url, **opt_args)) stubs.Set(xenapi_conn.XenAPISession, '_get_product_version_and_brand', lambda s: (product_version, product_brand)) def stubout_get_this_vm_uuid(stubs): def f(): vms = [rec['uuid'] for ref, rec in fake.get_all_records('VM').iteritems() if rec['is_control_domain']] return vms[0] stubs.Set(vm_utils, 'get_this_vm_uuid', f) def stubout_image_service_download(stubs): def fake_download(*args, **kwargs): pass stubs.Set(nova.tests.image.fake._FakeImageService, 'download', fake_download) def stubout_stream_disk(stubs): def fake_stream_disk(*args, **kwargs): pass stubs.Set(vm_utils, '_stream_disk', fake_stream_disk) def stubout_is_vdi_pv(stubs): def f(_1): return False stubs.Set(vm_utils, '_is_vdi_pv', f) def stubout_determine_is_pv_objectstore(stubs): """Assumes VMs stu have PV kernels""" def f(*args): return False stubs.Set(vm_utils, '_determine_is_pv_objectstore', f) def stubout_is_snapshot(stubs): """Always returns true xenapi fake driver does not create vmrefs for snapshots """ def f(*args): return True stubs.Set(vm_utils, 'is_snapshot', f) def stubout_lookup_image(stubs): """Simulates a failure in lookup image.""" def f(_1, _2, _3, _4): raise Exception("Test Exception raised by fake lookup_image") stubs.Set(vm_utils, 'lookup_image', f) def stubout_fetch_disk_image(stubs, raise_failure=False): """Simulates a failure in fetch image_glance_disk.""" def _fake_fetch_disk_image(context, session, instance, name_label, image, image_type): if raise_failure: raise fake.Failure("Test Exception raised by " "fake fetch_image_glance_disk") elif image_type == vm_utils.ImageType.KERNEL: filename = "kernel" elif image_type == vm_utils.ImageType.RAMDISK: filename = "ramdisk" else: filename = "unknown" vdi_type = vm_utils.ImageType.to_string(image_type) return {vdi_type: dict(uuid=None, file=filename)} stubs.Set(vm_utils, '_fetch_disk_image', _fake_fetch_disk_image) def stubout_create_vm(stubs): """Simulates a failure in create_vm.""" def f(*args): raise fake.Failure("Test Exception raised by fake create_vm") stubs.Set(vm_utils, 'create_vm', f) def stubout_attach_disks(stubs): """Simulates a failure in _attach_disks.""" def f(*args): raise fake.Failure("Test Exception raised by fake _attach_disks") stubs.Set(vmops.VMOps, '_attach_disks', f) def _make_fake_vdi(): sr_ref = fake.get_all('SR')[0] vdi_ref = fake.create_vdi('', sr_ref) vdi_rec = fake.get_record('VDI', vdi_ref) return vdi_rec['uuid'] class FakeSessionForVMTests(fake.SessionBase): """Stubs out a XenAPISession for VM tests """ _fake_iptables_save_output = ("# Generated by iptables-save v1.4.10 on " "Sun Nov 6 22:49:02 2011\n" "*filter\n" ":INPUT ACCEPT [0:0]\n" ":FORWARD ACCEPT [0:0]\n" ":OUTPUT ACCEPT [0:0]\n" "COMMIT\n" "# Completed on Sun Nov 6 22:49:02 2011\n") def host_call_plugin(self, _1, _2, plugin, method, _5): if (plugin, method) == ('glance', 'download_vhd'): root_uuid = _make_fake_vdi() return pickle.dumps(dict(root=dict(uuid=root_uuid))) elif (plugin, method) == ("xenhost", "iptables_config"): return fake.as_json(out=self._fake_iptables_save_output, err='') else: return (super(FakeSessionForVMTests, self). host_call_plugin(_1, _2, plugin, method, _5)) def VM_start(self, _1, ref, _2, _3): vm = fake.get_record('VM', ref) if vm['power_state'] != 'Halted': raise fake.Failure(['VM_BAD_POWER_STATE', ref, 'Halted', vm['power_state']]) vm['power_state'] = 'Running' vm['is_a_template'] = False vm['is_control_domain'] = False vm['domid'] = random.randrange(1, 1 << 16) return vm def VM_start_on(self, _1, vm_ref, host_ref, _2, _3): vm_rec = self.VM_start(_1, vm_ref, _2, _3) vm_rec['resident_on'] = host_ref def VDI_snapshot(self, session_ref, vm_ref, _1): sr_ref = "fakesr" return fake.create_vdi('fakelabel', sr_ref, read_only=True) def SR_scan(self, session_ref, sr_ref): pass class FakeSessionForFirewallTests(FakeSessionForVMTests): """Stubs out a XenApi Session for doing IPTable Firewall tests """ def __init__(self, uri, test_case=None): super(FakeSessionForFirewallTests, self).__init__(uri) if hasattr(test_case, '_in_filter_rules'): self._in_filter_rules = test_case._in_filter_rules if hasattr(test_case, '_in6_filter_rules'): self._in6_filter_rules = test_case._in6_filter_rules if hasattr(test_case, '_in_nat_rules'): self._in_nat_rules = test_case._in_nat_rules self._test_case = test_case def host_call_plugin(self, _1, _2, plugin, method, args): """Mock method four host_call_plugin to be used in unit tests for the dom0 iptables Firewall drivers for XenAPI """ if plugin == "xenhost" and method == "iptables_config": # The command to execute is a json-encoded list cmd_args = args.get('cmd_args', None) cmd = jsonutils.loads(cmd_args) if not cmd: ret_str = '' else: output = '' process_input = args.get('process_input', None) if cmd == ['ip6tables-save', '-c', '-t', 'filter']: output = '\n'.join(self._in6_filter_rules) if cmd == ['iptables-save', '-c', '-t', 'filter']: output = '\n'.join(self._in_filter_rules) if cmd == ['iptables-save', '-c', '-t', 'nat']: output = '\n'.join(self._in_nat_rules) if cmd == ['iptables-restore', '-c', ]: lines = process_input.split('\n') if '*filter' in lines: if self._test_case is not None: self._test_case._out_rules = lines output = '\n'.join(lines) if cmd == ['ip6tables-restore', '-c', ]: lines = process_input.split('\n') if '*filter' in lines: output = '\n'.join(lines) ret_str = fake.as_json(out=output, err='') return ret_str def stub_out_vm_methods(stubs): def fake_acquire_bootlock(self, vm): pass def fake_release_bootlock(self, vm): pass def fake_generate_ephemeral(*args): pass def fake_wait_for_device(dev): pass stubs.Set(vmops.VMOps, "_acquire_bootlock", fake_acquire_bootlock) stubs.Set(vmops.VMOps, "_release_bootlock", fake_release_bootlock) stubs.Set(vm_utils, 'generate_ephemeral', fake_generate_ephemeral) stubs.Set(vm_utils, '_wait_for_device', fake_wait_for_device) class FakeSessionForVolumeTests(fake.SessionBase): """Stubs out a XenAPISession for Volume tests """ def VDI_introduce(self, _1, uuid, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11): valid_vdi = False refs = fake.get_all('VDI') for ref in refs: rec = fake.get_record('VDI', ref) if rec['uuid'] == uuid: valid_vdi = True if not valid_vdi: raise fake.Failure([['INVALID_VDI', 'session', self._session]]) class FakeSessionForVolumeFailedTests(FakeSessionForVolumeTests): """Stubs out a XenAPISession for Volume tests: it injects failures """ def VDI_introduce(self, _1, uuid, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11): # This is for testing failure raise fake.Failure([['INVALID_VDI', 'session', self._session]]) def PBD_unplug(self, _1, ref): rec = fake.get_record('PBD', ref) rec['currently-attached'] = False def SR_forget(self, _1, ref): pass def stub_out_migration_methods(stubs): fakesr = fake.create_sr() def fake_move_disks(self, instance, disk_info): vdi_ref = fake.create_vdi(instance['name'], fakesr) vdi_rec = fake.get_record('VDI', vdi_ref) vdi_rec['other_config']['nova_disk_type'] = 'root' return {'uuid': vdi_rec['uuid'], 'ref': vdi_ref} def fake_get_vdi(session, vm_ref): vdi_ref_parent = fake.create_vdi('derp-parent', fakesr) vdi_rec_parent = fake.get_record('VDI', vdi_ref_parent) vdi_ref = fake.create_vdi('derp', fakesr, sm_config={'vhd-parent': vdi_rec_parent['uuid']}) vdi_rec = session.call_xenapi("VDI.get_record", vdi_ref) return vdi_ref, vdi_rec def fake_sr(session, *args): return fakesr def fake_get_sr_path(*args): return "fake" def fake_destroy(*args, **kwargs): pass def fake_generate_ephemeral(*args): pass stubs.Set(vmops.VMOps, '_destroy', fake_destroy) stubs.Set(vm_utils, 'move_disks', fake_move_disks) stubs.Set(vm_utils, 'scan_default_sr', fake_sr) stubs.Set(vm_utils, 'get_vdi_for_vm_safely', fake_get_vdi) stubs.Set(vm_utils, 'get_sr_path', fake_get_sr_path) stubs.Set(vm_utils, 'generate_ephemeral', fake_generate_ephemeral) class FakeSessionForFailedMigrateTests(FakeSessionForVMTests): def VM_assert_can_migrate(self, session, vmref, migrate_data, live, vdi_map, vif_map, options): raise fake.Failure("XenAPI VM.assert_can_migrate failed") def host_migrate_receive(self, session, hostref, networkref, options): raise fake.Failure("XenAPI host.migrate_receive failed") def VM_migrate_send(self, session, vmref, migrate_data, islive, vdi_map, vif_map, options): raise fake.Failure("XenAPI VM.migrate_send failed") class XenAPITestBase(test.TestCase): def setUp(self): super(XenAPITestBase, self).setUp() self.useFixture(test.ReplaceModule('XenAPI', fake)) fake.reset()
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import print_function from __future__ import absolute_import import contextlib import logging import os import py_utils from py_utils import binary_manager from py_utils import cloud_storage from py_utils import dependency_util import dependency_manager from dependency_manager import base_config from devil import devil_env from telemetry.core import exceptions from telemetry.core import util TELEMETRY_PROJECT_CONFIG = os.path.join( util.GetTelemetryDir(), 'telemetry', 'binary_dependencies.json') CHROME_BINARY_CONFIG = os.path.join(util.GetCatapultDir(), 'common', 'py_utils', 'py_utils', 'chrome_binaries.json') SUPPORTED_DEP_PLATFORMS = ( 'linux_aarch64', 'linux_x86_64', 'linux_armv7l', 'linux_mips', 'mac_x86_64', 'mac_arm64', 'win_x86', 'win_AMD64', 'android_arm64-v8a', 'android_armeabi-v7a', 'android_arm', 'android_x64', 'android_x86' ) PLATFORMS_TO_DOWNLOAD_FOLDER_MAP = { 'linux_aarch64': 'bin/linux/aarch64', 'linux_x86_64': 'bin/linux/x86_64', 'linux_armv7l': 'bin/linux/armv7l', 'linux_mips': 'bin/linux/mips', 'mac_x86_64': 'bin/mac/x86_64', 'mac_arm64': 'bin/mac/arm64', 'win_x86': 'bin/win/x86', 'win_AMD64': 'bin/win/AMD64', 'android_arm64-v8a': 'bin/android/arm64-v8a', 'android_armeabi-v7a': 'bin/android/armeabi-v7a', 'android_arm': 'bin/android/arm', 'android_x64': 'bin/android/x64', 'android_x86': 'bin/android/x86', } NoPathFoundError = dependency_manager.NoPathFoundError CloudStorageError = dependency_manager.CloudStorageError _binary_manager = None _installed_helpers = set() TELEMETRY_BINARY_BASE_CS_FOLDER = 'binary_dependencies' TELEMETRY_BINARY_CS_BUCKET = cloud_storage.PUBLIC_BUCKET def NeedsInit(): return not _binary_manager def InitDependencyManager(client_configs): if GetBinaryManager(): raise exceptions.InitializationError( 'Trying to re-initialize the binary manager with config %s' % client_configs) configs = [] if client_configs: configs += client_configs configs += [TELEMETRY_PROJECT_CONFIG, CHROME_BINARY_CONFIG] SetBinaryManager(binary_manager.BinaryManager(configs)) devil_env.config.Initialize() @contextlib.contextmanager def TemporarilyReplaceBinaryManager(manager): old_manager = GetBinaryManager() try: SetBinaryManager(manager) yield finally: SetBinaryManager(old_manager) def GetBinaryManager(): return _binary_manager def SetBinaryManager(manager): global _binary_manager # pylint: disable=global-statement _binary_manager = manager def _IsChromeOSLocalMode(os_name): """Determines if we're running telemetry on a Chrome OS device. Used to differentiate local mode (telemetry running on the CrOS DUT) from remote mode (running telemetry on another platform that communicates with the CrOS DUT over SSH). """ return os_name == 'chromeos' and py_utils.GetHostOsName() == 'chromeos' def FetchPath(binary_name, os_name, arch, os_version=None): """ Return a path to the appropriate executable for <binary_name>, downloading from cloud storage if needed, or None if it cannot be found. """ if GetBinaryManager() is None: raise exceptions.InitializationError( 'Called FetchPath with uninitialized binary manager.') return GetBinaryManager().FetchPath( binary_name, 'linux' if _IsChromeOSLocalMode(os_name) else os_name, arch, os_version) def LocalPath(binary_name, os_name, arch, os_version=None): """ Return a local path to the given binary name, or None if an executable cannot be found. Will not download the executable. """ if GetBinaryManager() is None: raise exceptions.InitializationError( 'Called LocalPath with uninitialized binary manager.') return GetBinaryManager().LocalPath(binary_name, os_name, arch, os_version) def FetchBinaryDependencies( platform, client_configs, fetch_reference_chrome_binary): """ Fetch all binary dependenencies for the given |platform|. Note: we don't fetch browser binaries by default because the size of the binary is about 2Gb, and it requires cloud storage permission to chrome-telemetry bucket. Args: platform: an instance of telemetry.core.platform client_configs: A list of paths (string) to dependencies json files. fetch_reference_chrome_binary: whether to fetch reference chrome binary for the given platform. """ configs = [ dependency_manager.BaseConfig(TELEMETRY_PROJECT_CONFIG), ] dep_manager = dependency_manager.DependencyManager(configs) os_name = platform.GetOSName() # If we're running directly on a Chrome OS device, fetch the binaries for # linux instead, which should be compatible with CrOS. Otherwise, if we're # running remotely on CrOS, fetch the binaries for the host platform like # we do with android below. if _IsChromeOSLocalMode(os_name): os_name = 'linux' target_platform = '%s_%s' % (os_name, platform.GetArchName()) dep_manager.PrefetchPaths(target_platform) host_platform = None fetch_devil_deps = False if os_name in ('android', 'chromeos'): host_platform = '%s_%s' % ( py_utils.GetHostOsName(), py_utils.GetHostArchName()) dep_manager.PrefetchPaths(host_platform) if os_name == 'android': if host_platform == 'linux_x86_64': fetch_devil_deps = True else: logging.error('Devil only supports 64 bit linux as a host platform. ' 'Android tests may fail.') if fetch_reference_chrome_binary: _FetchReferenceBrowserBinary(platform) # For now, handle client config separately because the BUILD.gn & .isolate of # telemetry tests in chromium src failed to include the files specified in its # client config. # (https://github.com/catapult-project/catapult/issues/2192) # For now this is ok because the client configs usually don't include cloud # storage infos. # TODO(crbug.com/1111556): remove the logic of swallowing exception once the # issue is fixed on Chromium side. if client_configs: manager = dependency_manager.DependencyManager( list(dependency_manager.BaseConfig(c) for c in client_configs)) try: manager.PrefetchPaths(target_platform) if host_platform is not None: manager.PrefetchPaths(host_platform) except dependency_manager.NoPathFoundError as e: logging.error('Error when trying to prefetch paths for %s: %s', target_platform, e) if fetch_devil_deps: devil_env.config.Initialize() devil_env.config.PrefetchPaths(arch=platform.GetArchName()) devil_env.config.PrefetchPaths() def ReinstallAndroidHelperIfNeeded(binary_name, install_path, device): """ Install a binary helper to a specific location. Args: binary_name: (str) The name of the binary from binary_dependencies.json install_path: (str) The path to install the binary at device: (device_utils.DeviceUtils) a device to install the helper to Raises: Exception: When the binary could not be fetched or could not be pushed to the device. """ if (device.serial, install_path) in _installed_helpers: return host_path = FetchPath(binary_name, 'android', device.GetABI()) if not host_path: raise Exception( '%s binary could not be fetched as %s', binary_name, host_path) device.PushChangedFiles([(host_path, install_path)]) device.RunShellCommand(['chmod', '777', install_path], check_return=True) _installed_helpers.add((device.serial, install_path)) def _FetchReferenceBrowserBinary(platform): os_name = platform.GetOSName() if _IsChromeOSLocalMode(os_name): os_name = 'linux' arch_name = platform.GetArchName() manager = binary_manager.BinaryManager( [CHROME_BINARY_CONFIG]) if os_name == 'android': os_version = dependency_util.GetChromeApkOsVersion( platform.GetOSVersionName()) manager.FetchPath( 'chrome_stable', os_name, arch_name, os_version) else: manager.FetchPath( 'chrome_stable', os_name, arch_name) def UpdateDependency(dependency, dep_local_path, version, os_name=None, arch_name=None): config = os.path.join( util.GetTelemetryDir(), 'telemetry', 'binary_dependencies.json') if not os_name: assert not arch_name, 'arch_name is specified but not os_name' os_name = py_utils.GetHostOsName() arch_name = py_utils.GetHostArchName() else: assert arch_name, 'os_name is specified but not arch_name' dep_platform = '%s_%s' % (os_name, arch_name) c = base_config.BaseConfig(config, writable=True) try: old_version = c.GetVersion(dependency, dep_platform) print('Updating from version: {}'.format(old_version)) except ValueError: raise RuntimeError( ('binary_dependencies.json entry for %s missing or invalid; please add ' 'it first! (need download_path and path_within_archive)') % dep_platform) if dep_local_path: c.AddCloudStorageDependencyUpdateJob( dependency, dep_platform, dep_local_path, version=version, execute_job=True)
import os import shutil import sys import unittest import platform from collections import namedtuple from conans import tools from conans.model.conan_file import ConanFile from conans.model.settings import Settings from conans.client.conf import default_settings_yml from conans.client.build.cmake import CMake from conans.test.utils.tools import TestBufferConanOutput from conans.tools import cpu_count from conans.util.files import save from conans.test.utils.test_files import temp_folder from conans.model.options import Options, PackageOptions from conans.errors import ConanException class CMakeTest(unittest.TestCase): def setUp(self): self.tempdir = temp_folder(path_with_spaces=False) def tearDown(self): shutil.rmtree(self.tempdir) def build_type_ovewrite_test(self): settings = Settings.loads(default_settings_yml) settings.os = "Linux" settings.compiler = "gcc" settings.compiler.version = "6.3" settings.arch = "x86" settings.build_type = "Release" conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) cmake.build_type = "Debug" self.assertIn('WARN: Set CMake build type "Debug" is different than the ' 'settings build_type "Release"', conan_file.output) self.assertEquals(cmake.build_type, "Debug") self.assertIn('-DCMAKE_BUILD_TYPE="Debug"', cmake.command_line) conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) self.assertNotIn('WARN: Set CMake build type ', conan_file.output) self.assertEquals(cmake.build_type, "Release") # Now with visual, (multiconfig) settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "15" settings.arch = "x86" settings.build_type = "Release" conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) cmake.build_type = "Debug" self.assertIn('WARN: Set CMake build type "Debug" is different than the ' 'settings build_type "Release"', conan_file.output) self.assertEquals(cmake.build_type, "Debug") self.assertNotIn('-DCMAKE_BUILD_TYPE="Debug"', cmake.command_line) self.assertIn("--config Debug", cmake.build_config) cmake = CMake(conan_file) cmake.build_type = "Release" self.assertIn("--config Release", cmake.build_config) def loads_default_test(self): settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.arch = "x86" conan_file = ConanFileMock() conan_file.settings = settings def check(text, build_config, generator=None): os = str(settings.os) os_ver = str(settings.os.version) if settings.get_safe('os.version') else None for cmake_system_name in (True, False): cross_ver = ("-DCMAKE_SYSTEM_VERSION=\"%s\" " % os_ver) if os_ver else "" cross = ("-DCMAKE_SYSTEM_NAME=\"%s\" %s-DCMAKE_SYSROOT=\"/path/to/sysroot\" " % ({"Macos": "Darwin"}.get(os, os), cross_ver) if (platform.system() != os and cmake_system_name) else "") cmake = CMake(conan_file, generator=generator, cmake_system_name=cmake_system_name) new_text = text.replace("-DCONAN_EXPORTED", "%s-DCONAN_EXPORTED" % cross) if "Visual Studio" in text: cores = ('-DCONAN_CXX_FLAGS="/MP{0}" ' '-DCONAN_C_FLAGS="/MP{0}" '.format(tools.cpu_count())) new_text = new_text.replace("-Wno-dev", "%s-Wno-dev" % cores) self.assertEqual(new_text, cmake.command_line) self.assertEqual(build_config, cmake.build_config) check('-G "Visual Studio 12 2013" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', "") check('-G "Custom Generator" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', '', generator="Custom Generator") settings.build_type = "Debug" check('-G "Visual Studio 12 2013" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', '--config Debug') settings.arch = "x86_64" check('-G "Visual Studio 12 2013 Win64" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', '--config Debug') settings.compiler = "gcc" settings.compiler.version = "4.8" check('-G "MinGW Makefiles" -DCMAKE_BUILD_TYPE="Debug" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="gcc" -DCONAN_COMPILER_VERSION="4.8" -Wno-dev', "") settings.os = "Linux" settings.arch = "x86" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug"' ' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" ' '-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m32" ' '-DCONAN_SHARED_LINKER_FLAGS="-m32" -DCONAN_C_FLAGS="-m32" -Wno-dev', "") settings.arch = "x86_64" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug"' ' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" ' '-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m64" ' '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" -Wno-dev', "") settings.os = "FreeBSD" settings.compiler = "clang" settings.compiler.version = "3.8" settings.arch = "x86" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug"' ' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="clang" ' '-DCONAN_COMPILER_VERSION="3.8" -DCONAN_CXX_FLAGS="-m32" ' '-DCONAN_SHARED_LINKER_FLAGS="-m32" -DCONAN_C_FLAGS="-m32" -Wno-dev', "") settings.arch = "x86_64" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug"' ' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="clang" ' '-DCONAN_COMPILER_VERSION="3.8" -DCONAN_CXX_FLAGS="-m64" ' '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" -Wno-dev', "") settings.os = "SunOS" settings.compiler = "sun-cc" settings.compiler.version = "5.10" settings.arch = "x86" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug"' ' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="sun-cc" ' '-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS="-m32" ' '-DCONAN_SHARED_LINKER_FLAGS="-m32" -DCONAN_C_FLAGS="-m32" -Wno-dev', "") settings.arch = "x86_64" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug"' ' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="sun-cc" ' '-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS="-m64" ' '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" -Wno-dev', "") settings.arch = "sparc" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="sun-cc" ' '-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS="-m32" ' '-DCONAN_SHARED_LINKER_FLAGS="-m32" -DCONAN_C_FLAGS="-m32" -Wno-dev', "") settings.arch = "sparcv9" check('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE="Debug" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="sun-cc" ' '-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS="-m64" ' '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" -Wno-dev', "") settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.os = "WindowsStore" settings.os.version = "8.1" settings.build_type = "Debug" check('-G "Visual Studio 12 2013" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', "--config Debug") settings.os.version = "10.0" check('-G "Visual Studio 12 2013" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', "--config Debug") def deleted_os_test(self): partial_settings = """ os: [Linux] arch: [x86_64] compiler: gcc: version: ["4.9"] build_type: [ Release] """ settings = Settings.loads(partial_settings) settings.os = "Linux" settings.compiler = "gcc" settings.compiler.version = "4.9" settings.arch = "x86_64" conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) cross = "-DCMAKE_SYSTEM_NAME=\"Linux\" -DCMAKE_SYSROOT=\"/path/to/sysroot\" " if platform.system() != "Linux" else "" self.assertEqual('-G "Unix Makefiles" %s-DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" ' '-DCONAN_COMPILER_VERSION="4.9" -DCONAN_CXX_FLAGS="-m64" ' '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" -Wno-dev' % cross, cmake.command_line) def test_sysroot(self): settings = Settings.loads(default_settings_yml) conan_file = ConanFileMock() conan_file.settings = settings settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.arch = "x86" settings.os = "Windows" cmake = CMake(conan_file) self.assertNotIn("-DCMAKE_SYSROOT=", cmake.flags) if platform.system() == "Windows" else "" # Now activate cross build and check sysroot with(tools.environment_append({"CONAN_CMAKE_SYSTEM_NAME": "Android"})): cmake = CMake(conan_file) self.assertEquals(cmake.definitions["CMAKE_SYSROOT"], "/path/to/sysroot") def test_deprecated_behaviour(self): """"Remove when deprecate the old settings parameter to CMake and conanfile to configure/build/test""" settings = Settings.loads(default_settings_yml) settings.os = "Windows" conan_file = ConanFileMock() conan_file.settings = settings with self.assertRaises(ConanException): CMake(settings) def convenient_functions_test(self): settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.compiler.runtime = "MDd" settings.arch = "x86" settings.build_type = None if sys.platform == 'win32': dot_dir = "." tempdir = self.tempdir else: dot_dir = "'.'" tempdir = "'" + self.tempdir + "'" conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) cross = "-DCMAKE_SYSTEM_NAME=\"Windows\" -DCMAKE_SYSROOT=\"/path/to/sysroot\" " if platform.system() != "Windows" else "" target_test = CMakeTest.scape('--target RUN_TESTS') cmake.configure() cores = '-DCONAN_CXX_FLAGS="/MP{0}" -DCONAN_C_FLAGS="/MP{0}" '.format(tools.cpu_count()) self.assertEqual('cd {0} && cmake -G "Visual Studio 12 2013" -DCONAN_LINK_RUNTIME="/MDd" {1}-DCONAN_EXPORTED="1"' ' -DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" {2}' '-Wno-dev {0}'.format(dot_dir, cross, cores), conan_file.command) cmake.build() self.assertEqual('cmake --build %s %s' % (dot_dir, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) cmake.test() self.assertEqual('cmake --build %s %s %s' % (dot_dir, target_test, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) settings.build_type = "Debug" cmake = CMake(conan_file) cmake.build() self.assertEqual('cmake --build %s --config Debug %s' % (dot_dir,(CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) cmake.test() self.assertEqual('cmake --build %s --config Debug %s %s' % (dot_dir, target_test, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) cmake.configure(source_dir="/source", build_dir=self.tempdir, args=['--foo "bar"'], defs={"SHARED": True}) if sys.platform == 'win32': escaped_args = r'"--foo \"bar\"" -DSHARED="True" /source' else: escaped_args = "'--foo \"bar\"' -DSHARED=\"True\" '/source'" self.assertEqual('cd %s && cmake -G "Visual Studio 12 2013" -DCONAN_LINK_RUNTIME="/MDd" %s-DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" %s' '-Wno-dev %s' % (tempdir, cross, cores, escaped_args), conan_file.command) cmake.build(args=["--bar 'foo'"], target="install") if sys.platform == 'win32': escaped_args = '--target install "--bar \'foo\'"' else: escaped_args = r"'--target' 'install' '--bar '\''foo'\'''" self.assertEqual('cmake --build %s --config Debug %s %s' % (tempdir, escaped_args, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) cmake.test(args=["--bar 'foo'"]) if sys.platform == 'win32': escaped_args = '%s "--bar \'foo\'"' % target_test else: escaped_args = r"%s '--bar '\''foo'\'''" % target_test self.assertEqual('cmake --build %s --config Debug %s %s' % (tempdir, escaped_args, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) settings.build_type = "Release" cmake = CMake(conan_file) cmake.build() self.assertEqual('cmake --build %s --config Release %s' % (dot_dir, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) cmake.test() self.assertEqual('cmake --build %s --config Release %s %s' % (dot_dir, target_test, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) cmake.build(build_dir=self.tempdir) self.assertEqual('cmake --build %s --config Release %s' % (tempdir, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) cmake.test(build_dir=self.tempdir) self.assertEqual('cmake --build %s --config Release %s %s' % (tempdir, target_test, (CMakeTest.scape('-- /m:%i' % cpu_count()))), conan_file.command) settings.compiler = "gcc" settings.compiler.version = "5.4" cmake = CMake(conan_file) cmake.build() self.assertEqual('cmake --build %s' % (CMakeTest.scape('. -- -j%i' % cpu_count())), conan_file.command) cmake.test() self.assertEqual('cmake --build %s' % (CMakeTest.scape('. --target test -- -j%i' % cpu_count())), conan_file.command) cmake.build(args=['foo', '--', 'bar']) self.assertEqual('cmake --build %s' % (CMakeTest.scape('. foo -- bar -j%i' % cpu_count())), conan_file.command) cmake.test(args=['foo', '--', 'bar']) self.assertEqual('cmake --build %s' % (CMakeTest.scape('. --target test foo -- bar -j%i' % cpu_count())), conan_file.command) cmake = CMake(conan_file, parallel=False) cmake.build() self.assertEqual('cmake --build %s' % CMakeTest.scape('.'), conan_file.command) cmake.test() self.assertEqual('cmake --build %s' % CMakeTest.scape('. --target test'), conan_file.command) def test_run_tests(self): settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.compiler.runtime = "MDd" settings.arch = "x86" settings.build_type = None conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) cmake.test() self.assertIn('cmake --build %s' % CMakeTest.scape('. --target RUN_TESTS -- /m:%i' % cpu_count()), conan_file.command) cmake.generator = "Ninja Makefiles" cmake.test() self.assertEqual('cmake --build %s' % CMakeTest.scape('. --target test -- -j%i' % cpu_count()), conan_file.command) cmake.generator = "NMake Makefiles" cmake.test() self.assertEqual('cmake --build %s' % CMakeTest.scape('. --target test -- -j%i' % cpu_count()), conan_file.command) def test_clean_sh_path(self): if platform.system() != "Windows": return os.environ["PATH"] = os.environ.get("PATH", "") + os.pathsep + self.tempdir save(os.path.join(self.tempdir, "sh.exe"), "Fake sh") conanfile = ConanFileMock() settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.arch = "x86" conanfile.settings = settings cmake = CMake(conanfile) cmake.configure() self.assertIn(self.tempdir, conanfile.path) cmake.generator = "MinGW Makefiles" cmake.configure() self.assertNotIn(self.tempdir, conanfile.path) # Automatic gcc settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "gcc" settings.compiler.version = "5.4" settings.arch = "x86" conanfile.settings = settings cmake = CMake(conanfile) cmake.configure() self.assertNotIn(self.tempdir, conanfile.path) def test_shared(self): settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.arch = "x86" settings.os = "Windows" conan_file = ConanFileMock(shared=True) conan_file.settings = settings cmake = CMake(conan_file) self.assertEquals(cmake.definitions["BUILD_SHARED_LIBS"], "ON") conan_file = ConanFileMock(shared=False) conan_file.settings = settings cmake = CMake(conan_file) self.assertEquals(cmake.definitions["BUILD_SHARED_LIBS"], "OFF") conan_file = ConanFileMock(shared=None) conan_file.settings = settings cmake = CMake(conan_file) self.assertNotIn("BUILD_SHARED_LIBS", cmake.definitions) def test_verbose(self): settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "12" settings.arch = "x86" conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) self.assertNotIn("CMAKE_VERBOSE_MAKEFILE", cmake.definitions) cmake.verbose = True self.assertEquals(cmake.definitions["CMAKE_VERBOSE_MAKEFILE"], "ON") cmake.verbose = False self.assertEquals(cmake.definitions["CMAKE_VERBOSE_MAKEFILE"], "OFF") cmake.definitions["CMAKE_VERBOSE_MAKEFILE"] = True self.assertTrue(cmake.verbose) cmake.definitions["CMAKE_VERBOSE_MAKEFILE"] = False self.assertFalse(cmake.verbose) del cmake.definitions["CMAKE_VERBOSE_MAKEFILE"] self.assertFalse(cmake.verbose) def set_toolset_test(self): settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "15" settings.arch = "x86" settings.compiler.toolset = "v140" # Will be overwritten by parameter conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file, toolset="v141") self.assertIn('-T "v141"', cmake.command_line) # DEPRECATED VARIABLE, NOT MODIFY ANYMORE THE TOOLSET with tools.environment_append({"CONAN_CMAKE_TOOLSET": "v141"}): cmake = CMake(conan_file) self.assertNotIn('-T "v141"', cmake.command_line) settings = Settings.loads(default_settings_yml) settings.os = "Windows" settings.compiler = "Visual Studio" settings.compiler.version = "15" settings.arch = "x86" settings.compiler.toolset = "v140" conan_file = ConanFileMock() conan_file.settings = settings cmake = CMake(conan_file) self.assertIn('-T "v140"', cmake.command_line) @staticmethod def scape(args): pattern = "%s" if sys.platform == "win32" else r"'%s'" return ' '.join(pattern % i for i in args.split()) class ConanFileMock(ConanFile): def __init__(self, shared=None): self.command = None self.path = None self.conanfile_directory = "." self.source_folder = self.build_folder = "." self.settings = None self.options = Options(PackageOptions.loads("")) self.deps_cpp_info = namedtuple("deps_cpp_info", "sysroot")("/path/to/sysroot") self.output = TestBufferConanOutput() if shared is not None: self.options = namedtuple("options", "shared")(shared) def run(self, command): self.command = command self.path = os.environ["PATH"]
""" Master configuration file for Evennia. NOTE: NO MODIFICATIONS SHOULD BE MADE TO THIS FILE! All settings changes should be done by copy-pasting the variable and its value to game/settings.py. An empty game/settings.py can be auto-generated by running game/manage.py without any arguments. Hint: Don't copy&paste over more from this file than you actually want to change. Anything you don't copy&paste will thus retain its default value - which may change as Evennia is developed. This way you can always be sure of what you have changed and what is default behaviour. """ import os ###################################################################### # Evennia base server config ###################################################################### # This is the name of your game. Make it catchy! SERVERNAME = "Evennia" # Activate telnet service TELNET_ENABLED = True # A list of ports the Evennia telnet server listens on # Can be one or many. TELNET_PORTS = [4000] # Interface addresses to listen to. If 0.0.0.0, listen to all. TELNET_INTERFACES = ['0.0.0.0'] # OOB (out-of-band) telnet communication allows Evennia to communicate # special commands and data with enabled Telnet clients. This is used # to create custom client interfaces over a telnet connection. To make # full use of OOB, you need to prepare functions to handle the data # server-side (see OOB_FUNC_MODULE). TELNET_ENABLED is required for this # to work. TELNET_OOB_ENABLED = False # OBS - currently not fully implemented - do not use! # Start the evennia django+twisted webserver so you can # browse the evennia website and the admin interface # (Obs - further web configuration can be found below # in the section 'Config for Django web features') WEBSERVER_ENABLED = True # This is a security setting protecting against host poisoning # attacks. It defaults to allowing all. In production, make # sure to change this to your actual host addresses/IPs. ALLOWED_HOSTS = ["*"] # A list of ports the Evennia webserver listens on WEBSERVER_PORTS = [8000] # Interface addresses to listen to. If 0.0.0.0, listen to all. WEBSERVER_INTERFACES = ['0.0.0.0'] # IP addresses that may talk to the server in a reverse proxy configuration, # like NginX. UPSTREAM_IPS = ['127.0.0.1'] # Start the evennia ajax client on /webclient # (the webserver must also be running) WEBCLIENT_ENABLED = True # Activate SSH protocol (SecureShell) SSH_ENABLED = False # Ports to use for SSH SSH_PORTS = [8022] # Interface addresses to listen to. If 0.0.0.0, listen to all. SSH_INTERFACES = ['0.0.0.0'] # Actiave SSL protocol (SecureSocketLibrary) SSL_ENABLED = False # Ports to use for SSL SSL_PORTS = [4001] # Interface addresses to listen to. If 0.0.0.0, listen to all. SSL_INTERFACES = ['0.0.0.0'] # The path that contains this settings.py file (no trailing slash). BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Path to the src directory containing the bulk of the codebase's code. SRC_DIR = os.path.join(BASE_PATH, 'src') # Path to the game directory (containing the database file if using sqlite). GAME_DIR = os.path.join(BASE_PATH, 'game') # Place to put log files LOG_DIR = os.path.join(GAME_DIR, 'logs') SERVER_LOG_FILE = os.path.join(LOG_DIR, 'server.log') PORTAL_LOG_FILE = os.path.join(LOG_DIR, 'portal.log') HTTP_LOG_FILE = os.path.join(LOG_DIR, 'http_requests.log') # Rotate log files when server and/or portal stops. This will keep log file sizes down. # Turn off to get ever growing log files and never loose log info. CYCLE_LOGFILES = True # Local time zone for this installation. All choices can be found here: # http://www.postgresql.org/docs/8.0/interactive/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE TIME_ZONE = 'UTC' # Authentication backends. This is the code used to authenticate a user. AUTHENTICATION_BACKENDS = ('src.web.backends.CaseInsensitiveModelBackend',) # Language code for this installation. All choices can be found here: # http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes LANGUAGE_CODE = 'en-us' # Should the default MUX help files be imported? This might be # interesting to developers for reference, but is frustrating to users # since it creates a lot of help entries that has nothing to do # with what is actually available in the game. IMPORT_MUX_HELP = False # How long time (in seconds) a user may idle before being logged # out. This can be set as big as desired. A user may avoid being # thrown off by sending the empty system command 'idle' to the server # at regular intervals. Set <=0 to deactivate idle timout completely. IDLE_TIMEOUT = 3600 # The idle command can be sent to keep your session active without actually # having to spam normal commands regularly. It gives no feedback, only updates # the idle timer. IDLE_COMMAND = "idle" # The set of encodings tried. A Player object may set an attribute "encoding" on # itself to match the client used. If not set, or wrong encoding is # given, this list is tried, in order, aborting on the first match. # Add sets for languages/regions your players are likely to use. # (see http://en.wikipedia.org/wiki/Character_encoding) ENCODINGS = ["utf-8", "latin-1", "ISO-8859-1"] # The game server opens an AMP port so that the portal can # communicate with it. This is an internal functionality of Evennia, usually # operating between two processes on the same machine. You usually don't need to # change this unless you cannot use the default AMP port/host for whatever reason. AMP_HOST = 'localhost' AMP_PORT = 5000 AMP_INTERFACE = '127.0.0.1' # Caching speeds up all forms of database access, often considerably. There # are (currently) only two settings, "local" or None, the latter of which turns # off all caching completely. Local caching stores data in the process. It's very # fast but will go out of sync if more than one process writes to the database (such # as when using procpool or an extensice web precense). GAME_CACHE_TYPE = "local" # Attributes on objects are cached aggressively for speed. If the number of # objects is large (and their attributes are often accessed) this can use up a lot of # memory. So every now and then Evennia checks the size of this cache and resets # it if it's too big. This variable sets the maximum size (in MB). ATTRIBUTE_CACHE_MAXSIZE = 100 ###################################################################### # Evennia Database config ###################################################################### # Database config syntax for Django 1.2+. # ENGINE - path to the the database backend. Possible choices are: # 'django.db.backends.sqlite3', (default) # 'django.db.backends.mysql', # 'django.db.backends.'postgresql_psycopg2' (see Issue 241), # 'django.db.backends.oracle' (untested). # NAME - database name, or path to the db file for sqlite3 # USER - db admin (unused in sqlite3) # PASSWORD - db admin password (unused in sqlite3) # HOST - empty string is localhost (unused in sqlite3) # PORT - empty string defaults to localhost (unused in sqlite3) DATABASES = { 'default':{ 'ENGINE':'django.db.backends.sqlite3', 'NAME':os.path.join(GAME_DIR, 'evennia.db3'), 'USER':'', 'PASSWORD':'', 'HOST':'', 'PORT':'' }} # Engine Config style for Django versions < 1.2 only. See above. DATABASE_ENGINE = 'sqlite3' DATABASE_NAME = os.path.join(GAME_DIR, 'evennia.db3') DATABASE_USER = '' DATABASE_PASSWORD = '' DATABASE_HOST = '' DATABASE_PORT = '' ###################################################################### # Evennia pluggable modules ###################################################################### # Plugin modules extend Evennia in various ways. In the cases with no # existing default, there are examples of many of these modules # in game/gamesrc/conf/examples. # The command parser module to use. See the default module for which # functions it must implement COMMAND_PARSER = "src.commands.cmdparser.cmdparser" # The handler that outputs errors when searching # objects using object.search(). SEARCH_AT_RESULT = "src.commands.cmdparser.at_search_result" # The parser used in order to separate multiple # object matches (so you can separate between same-named # objects without using dbrefs). SEARCH_AT_MULTIMATCH_INPUT = "src.commands.cmdparser.at_multimatch_input" # The module holding text strings for the connection screen. # This module should contain one or more variables # with strings defining the look of the screen. CONNECTION_SCREEN_MODULE = "src.commands.connection_screen" # An optional module that, if existing, must hold a function # named at_initial_setup(). This hook method can be used to customize # the server's initial setup sequence (the very first startup of the system). # The check will fail quietly if module doesn't exist or fails to load. AT_INITIAL_SETUP_HOOK_MODULE = "" # Module containing your custom at_server_start(), at_server_reload() and # at_server_stop() methods. These methods will be called every time # the server starts, reloads and resets/stops respectively. AT_SERVER_STARTSTOP_MODULE = "" # List of one or more module paths to modules containing a function start_plugin_services(application). This module # will be called with the main Evennia Server application when the Server is initiated. # It will be called last in the startup sequence. SERVER_SERVICES_PLUGIN_MODULES = [] # List of one or more module paths to modules containing a function start_plugin_services(application). This module # will be called with the main Evennia Portal application when the Portal is initiated. # It will be called last in the startup sequence. PORTAL_SERVICES_PLUGIN_MODULES = [] # Module holding MSSP meta data. This is used by MUD-crawlers to determine # what type of game you are running, how many players you have etc. MSSP_META_MODULE = "" # Module holding server-side custom functions for out-of-band protocols to call. # Note that OOB_ENABLED must be True for this to be used. OOB_FUNC_MODULE = "" # Not yet available in Evennia - do not use! # Tuple of modules implementing lock functions. All callable functions # inside these modules will be available as lock functions. LOCK_FUNC_MODULES = ("src.locks.lockfuncs",) ###################################################################### # Default command sets ###################################################################### # Note that with the exception of the unloggedin set (which is not # stored anywhere in the databse), changing these paths will only affect NEW created # characters/objects, not those already in play. So if you plan to change # this, it's recommended you do it before having created a lot of objects # (or simply reset the database after the change for simplicity). Remember # that you should never edit things in src/. Instead copy out the examples # in game/gamesrc/commands/examples up one level and re-point these settings # to point to these copies instead - these you can then change as you please # (or copy/paste from the default modules in src/ if you prefer). # Command set used before player has logged in CMDSET_UNLOGGEDIN = "src.commands.default.cmdset_unloggedin.UnloggedinCmdSet" # Default set for logged in player with characters (fallback) CMDSET_CHARACTER = "src.commands.default.cmdset_character.CharacterCmdSet" # Command set for players without a character (ooc) CMDSET_PLAYER = "src.commands.default.cmdset_player.PlayerCmdSet" ###################################################################### # Typeclasses ###################################################################### # Base paths for typeclassed object classes. These paths must be # defined relative evennia's root directory. They will be searched in # order to find relative typeclass paths. OBJECT_TYPECLASS_PATHS = ["game.gamesrc.objects", "game.gamesrc.objects.examples", "contrib"] SCRIPT_TYPECLASS_PATHS = ["game.gamesrc.scripts", "game.gamesrc.scripts.examples", "contrib"] PLAYER_TYPECLASS_PATHS = ["game.gamesrc.objects", "contrib"] # Typeclass for player objects (linked to a character) (fallback) BASE_PLAYER_TYPECLASS = "src.players.player.Player" # Typeclass and base for all objects (fallback) BASE_OBJECT_TYPECLASS = "src.objects.objects.Object" # Typeclass for character objects linked to a player (fallback) BASE_CHARACTER_TYPECLASS = "src.objects.objects.Character" # Typeclass for rooms (fallback) BASE_ROOM_TYPECLASS = "src.objects.objects.Room" # Typeclass for Exit objects (fallback). BASE_EXIT_TYPECLASS = "src.objects.objects.Exit" # Typeclass for Scripts (fallback). You usually don't need to change this # but create custom variations of scripts on a per-case basis instead. BASE_SCRIPT_TYPECLASS = "src.scripts.scripts.DoNothing" # The home location for new characters. This must be a unique # dbref (default is Limbo #2). If you want more advanced control over # start locations, copy the "create" command from # src/commands/default/unloggedin.py and customize. CHARACTER_DEFAULT_HOME = "#2" ###################################################################### # Batch processors ###################################################################### # Python path to a directory to be searched for batch scripts # for the batch processors (.ev and/or .py files). BASE_BATCHPROCESS_PATHS = ['game.gamesrc.world', 'contrib'] ###################################################################### # Game Time setup ###################################################################### # You don't actually have to use this, but it affects the routines in # src.utils.gametime.py and allows for a convenient measure to # determine the current in-game time. You can of course read "week", # "month" etc as your own in-game time units as desired. #The time factor dictates if the game world runs faster (timefactor>1) # or slower (timefactor<1) than the real world. TIME_FACTOR = 2.0 # The tick is the smallest unit of time in the game. Smallest value is 1s. TIME_TICK = 1.0 # These measures might or might not make sense to your game world. TIME_MIN_PER_HOUR = 60 TIME_HOUR_PER_DAY = 24 TIME_DAY_PER_WEEK = 7 TIME_WEEK_PER_MONTH = 4 TIME_MONTH_PER_YEAR = 12 ###################################################################### # Default Player setup and access ###################################################################### # Different Multisession modes allow a player (=account) to connect to the game simultaneously # with multiple clients (=sessions). In modes 0,1 there is only one character created to the same # name as the account at first login. In modes 1,2 no default character will be created and # the MAX_NR_CHARACTERS value (below) defines how many characters are allowed. # 0 - single session, one player, one character, when a new session is connected, the old one is disconnected # 1 - multiple sessions, one player, one character, each session getting the same data # 2 - multiple sessions, one player, many characters, each session getting data from different characters MULTISESSION_MODE = 0 # The maximum number of characters allowed for MULTISESSION_MODE 2. This is checked # by the default ooc char-creation command. Forced to 1 for MULTISESSION_MODE 0 and 1. MAX_NR_CHARACTERS = 1 # The access hiearchy, in climbing order. A higher permission in the # hierarchy includes access of all levels below it. Used by the perm()/pperm() lock functions. PERMISSION_HIERARCHY = ("Players","PlayerHelpers","Builders", "Wizards", "Immortals") # The default permission given to all new players PERMISSION_PLAYER_DEFAULT = "Players" ###################################################################### # In-game Channels created from server start ###################################################################### # Defines a dict with one key for each from-start # channel. Each key points to a tuple containing # (name, aliases, description, locks) # where aliases may be a tuple too, and locks is # a valid lockstring definition. # Default user channel for communication CHANNEL_PUBLIC = ("Public", ('ooc',), 'Public discussion', "control:perm(Wizards);listen:all();send:all()") # General info about the server CHANNEL_MUDINFO = ("MUDinfo", '', 'Informative messages', "control:perm(Immortals);listen:perm(Immortals);send:false()") # Channel showing when new people connecting CHANNEL_CONNECTINFO = ("MUDconnections", '', 'Connection log', "control:perm(Immortals);listen:perm(Wizards);send:false()") ###################################################################### # External Channel connections ###################################################################### # Note: You do *not* have to make your MUD open to # the public to use the external connections, they # operate as long as you have an internet connection, # just like stand-alone chat clients. IRC and IMC2 # requires that you have twisted.words installed. # Evennia can connect to external IRC channels and # echo what is said on the channel to IRC and vice # versa. Obs - make sure the IRC network allows bots. # When enabled, command @irc2chan will be available in-game IRC_ENABLED = False # IMC (Inter-MUD communication) allows to connect an Evennia channel # to an IMC2 server. This lets them talk to people on other MUDs also # using IMC. Evennia's IMC2 client was developed against MudByte's # network. You must register your MUD on the network before you can # use it, go to http://www.mudbytes.net/imc2-intermud-join-network. # Choose 'Other unsupported IMC2 version' from the choices and and # enter your information there. You should enter the same 'short mud # name' as your SERVERNAME above, then choose imc network server as # well as client/server passwords same as below. When enabled, the # command @imc2chan becomes available in-game and allows you to # connect Evennia channels to IMC channels on the network. The Evennia # discussion channel 'ievennia' is on server01.mudbytes.net:5000. IMC2_ENABLED = False IMC2_NETWORK = "server01.mudbytes.net" IMC2_PORT = 5000 # this is the imc2 port, not on localhost IMC2_CLIENT_PWD = "" IMC2_SERVER_PWD = "" # RSS allows to connect RSS feeds (from forum updates, blogs etc) to # an in-game channel. The channel will be updated when the rss feed # updates. Use @rss2chan in game to connect if this setting is # active. OBS: RSS support requires the python-feedparser package to # be installed (through package manager or from the website # http://code.google.com/p/feedparser/) RSS_ENABLED=False RSS_UPDATE_INTERVAL = 60*10 # 10 minutes ###################################################################### # Django web features ###################################################################### # While DEBUG is False, show a regular server error page on the web # stuff, email the traceback to the people in the ADMINS tuple # below. If True, show a detailed traceback for the web # browser to display. Note however that this will leak memory when # active, so make sure to turn it off for a production server! DEBUG = False # While true, show "pretty" error messages for template syntax errors. TEMPLATE_DEBUG = DEBUG # Emails are sent to these people if the above DEBUG value is False. If you'd # rather nobody recieve emails, leave this commented out or empty. ADMINS = () #'Your Name', 'your_email@domain.com'),) # These guys get broken link notifications when SEND_BROKEN_LINK_EMAILS is True. MANAGERS = ADMINS # Absolute path to the directory that holds media (no trailing slash). # Example: "/home/media/media.lawrence.com" MEDIA_ROOT = os.path.join(SRC_DIR, 'web', 'media') # Absolute path to the directory that holds (usually links to) the # django admin media files. If the target directory does not exist, it # is created and linked by Evennia upon first start. Otherwise link it # manually to django/contrib/admin/media. ADMIN_MEDIA_ROOT = os.path.join(MEDIA_ROOT, 'admin') # It's safe to dis-regard this, as it's a Django feature we only half use as a # dependency, not actually what it's primarily meant for. SITE_ID = 1 # The age for sessions. # Default: 1209600 (2 weeks, in seconds) SESSION_COOKIE_AGE = 1209600 # Session cookie domain # Default: None SESSION_COOKIE_DOMAIN = None # The name of the cookie to use for sessions. # Default: 'sessionid' SESSION_COOKIE_NAME = 'sessionid' # Should the session expire when the browser closes? # Default: False SESSION_EXPIRE_AT_BROWSER_CLOSE = False # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False # Where to find locales (no need to change this, most likely) LOCALE_PATHS = ["../locale/"] # This should be turned off unless you want to do tests with Django's # development webserver (normally Evennia runs its own server) SERVE_MEDIA = False # The master urlconf file that contains all of the sub-branches to the # applications. ROOT_URLCONF = 'src.web.urls' # Where users are redirected after logging in via contrib.auth.login. LOGIN_REDIRECT_URL = '/' # Where to redirect users when using the @login_required decorator. LOGIN_URL = '/accounts/login' # Where to redirect users who wish to logout. LOGOUT_URL = '/accounts/login' # URL that handles the media served from MEDIA_ROOT. # Example: "http://media.lawrence.com" MEDIA_URL = '/media/' # URL prefix for admin media -- CSS, JavaScript and images. Make sure # to use a trailing slash. Django1.4+ will look for admin files under # STATIC_URL/admin. STATIC_URL = '/media/' # The name of the currently selected web template. This corresponds to the # directory names shown in the webtemplates directory. ACTIVE_TEMPLATE = 'prosimii' # We setup the location of the website template as well as the admin site. TEMPLATE_DIRS = ( os.path.join(SRC_DIR, "web", "templates", ACTIVE_TEMPLATE), os.path.join(SRC_DIR, "web", "templates"),) # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader',) # MiddleWare are semi-transparent extensions to Django's functionality. # see http://www.djangoproject.com/documentation/middleware/ for a more detailed # explanation. MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # 1.4? 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.middleware.doc.XViewMiddleware', 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',) # Context processors define context variables, generally for the template # system to use. TEMPLATE_CONTEXT_PROCESSORS = ( 'django.core.context_processors.i18n', 'django.core.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.core.context_processors.media', 'django.core.context_processors.debug', 'src.web.utils.general_context.general_context',) ###################################################################### # Evennia components ###################################################################### # Global and Evennia-specific apps. This ties everything together so we can # refer to app models and perform DB syncs. INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sites', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.admin', 'django.contrib.admindocs', 'django.contrib.flatpages', 'src.server', 'src.players', 'src.objects', 'src.comms', 'src.help', 'src.scripts', 'src.web.news', 'src.web.website',) # The user profile extends the User object with more functionality; # This should usually not be changed. AUTH_PROFILE_MODULE = "players.PlayerDB" # Use a custom test runner that just tests Evennia-specific apps. TEST_RUNNER = 'src.utils.test_utils.EvenniaTestSuiteRunner' ###################################################################### # Django extensions ###################################################################### # Django extesions are useful third-party tools that are not # always included in the default django distro. try: import django_extensions INSTALLED_APPS = INSTALLED_APPS + ('django_extensions',) except ImportError: pass # South handles automatic database scheme migrations when evennia # updates try: import south INSTALLED_APPS = INSTALLED_APPS + ('south',) except ImportError: pass ####################################################################### # SECRET_KEY ####################################################################### # This is the salt for cryptographic hashing used by Django. # It is a fallback for the SECRET_KEY setting in settings.py, which # is randomly seeded when settings.py is first created. If copying # from here, make sure to change it! SECRET_KEY = 'changeme!(*#&*($&*(#*(&SDFKJJKLS*(@#KJAS'
# 6.00 Problem Set 4 # # Caesar Cipher Skeleton # # Problem Set 4 # Name: Shouvik Roy # Collaborators: None # Time: 6 hours 30 minutes import string import random from types import * WORDLIST_FILENAME = "words.txt" # ----------------------------------- # Helper code # (you don't need to understand this helper code) def load_words(): """ Returns a list of valid words. Words are strings of lowercase letters. Depending on the size of the word list, this function may take a while to finish. """ print "Loading word list from file..." # inFile: file inFile = open(WORDLIST_FILENAME, 'r', 0) # line: string line = inFile.readline() # wordlist: list of strings wordlist = line.split() print " ", len(wordlist), "words loaded." return wordlist wordlist = load_words() def is_word(wordlist, word): """ Determines if word is a valid word. wordlist: list of words in the dictionary. word: a possible word. returns True if word is in wordlist. Example: >>> is_word(wordlist, 'bat') returns True >>> is_word(wordlist, 'asdf') returns False """ word = word.lower() word = word.strip(" !@#$%^&*()-_+={}[]|\:;'<>?,./\"") return word in wordlist def random_word(wordlist): """ Returns a random word. wordlist: list of words returns: a word from wordlist at random """ return random.choice(wordlist) def random_string(wordlist, n): """ Returns a string containing n random words from wordlist wordlist: list of words returns: a string of random words separated by spaces. """ return " ".join([random_word(wordlist) for _ in range(n)]) def random_scrambled(wordlist, n): """ Generates a test string by generating an n-word random string and encrypting it with a sequence of random shifts. wordlist: list of words n: number of random words to generate and scamble returns: a scrambled string of n random words NOTE: This function will ONLY work once you have completed your implementation of apply_shifts! """ s = random_string(wordlist, n) + " " shifts = [(i, random.randint(0, 26)) for i in range(len(s)) if s[i-1] == ' '] return apply_shifts(s, shifts)[:-1] def get_fable_string(): """ Returns a fable in encrypted text. """ f = open("fable.txt", "r") fable = str(f.read()) f.close() return fable # (end of helper code) # ----------------------------------- # # Problem 1: Encryption # def build_coder(shift): """ Returns a dict that can apply a Caesar cipher to a letter. The cipher is defined by the shift value. Ignores non-letter characters like punctuation and numbers. shift: -27 < int < 27 returns: dict Example: >>> build_coder(3) {' ': 'c', 'A': 'D', 'C': 'F', 'B': 'E', 'E': 'H', 'D': 'G', 'G': 'J', 'F': 'I', 'I': 'L', 'H': 'K', 'K': 'N', 'J': 'M', 'M': 'P', 'L': 'O', 'O': 'R', 'N': 'Q', 'Q': 'T', 'P': 'S', 'S': 'V', 'R': 'U', 'U': 'X', 'T': 'W', 'W': 'Z', 'V': 'Y', 'Y': 'A', 'X': ' ', 'Z': 'B', 'a': 'd', 'c': 'f', 'b': 'e', 'e': 'h', 'd': 'g', 'g': 'j', 'f': 'i', 'i': 'l', 'h': 'k', 'k': 'n', 'j': 'm', 'm': 'p', 'l': 'o', 'o': 'r', 'n': 'q', 'q': 't', 'p': 's', 's': 'v', 'r': 'u', 'u': 'x', 't': 'w', 'w': 'z', 'v': 'y', 'y': 'a', 'x': ' ', 'z': 'b'} (The order of the key-value pairs may be different.) """ assert type(shift) == IntType and (shift > -27 and shift < 27) small_rotator = string.ascii_lowercase + " " cap_rotator = string.ascii_uppercase + " " #print rotator translator = {} for char in small_rotator: if not translator.has_key(char): #print rotator.index(char) + shift, rotator[(rotator.index(char) + shift) % len(rotator)] translator[char] = small_rotator[(small_rotator.index(char) + shift) % len(small_rotator)] for char in cap_rotator: if not translator.has_key(char): #print rotator.index(char) + shift, rotator[(rotator.index(char) + shift) % len(rotator)] translator[char] = cap_rotator[(cap_rotator.index(char) + shift) % len(cap_rotator)] return translator def build_encoder(shift): """ Returns a dict that can be used to encode a plain text. For example, you could encrypt the plain text by calling the following commands >>>encoder = build_encoder(shift) >>>encrypted_text = apply_coder(plain_text, encoder) The cipher is defined by the shift value. Ignores non-letter characters like punctuation and numbers. shift: 0 <= int < 27 returns: dict Example: >>> build_encoder(3) {' ': 'c', 'A': 'D', 'C': 'F', 'B': 'E', 'E': 'H', 'D': 'G', 'G': 'J', 'F': 'I', 'I': 'L', 'H': 'K', 'K': 'N', 'J': 'M', 'M': 'P', 'L': 'O', 'O': 'R', 'N': 'Q', 'Q': 'T', 'P': 'S', 'S': 'V', 'R': 'U', 'U': 'X', 'T': 'W', 'W': 'Z', 'V': 'Y', 'Y': 'A', 'X': ' ', 'Z': 'B', 'a': 'd', 'c': 'f', 'b': 'e', 'e': 'h', 'd': 'g', 'g': 'j', 'f': 'i', 'i': 'l', 'h': 'k', 'k': 'n', 'j': 'm', 'm': 'p', 'l': 'o', 'o': 'r', 'n': 'q', 'q': 't', 'p': 's', 's': 'v', 'r': 'u', 'u': 'x', 't': 'w', 'w': 'z', 'v': 'y', 'y': 'a', 'x': ' ', 'z': 'b'} (The order of the key-value pairs may be different.) HINT : Use build_coder. """ assert type(shift) == IntType and (shift >= 0 and shift < 27) return build_coder(shift) def build_decoder(shift): """ Returns a dict that can be used to decode an encrypted text. For example, you could decrypt an encrypted text by calling the following commands >>>encoder = build_encoder(shift) >>>encrypted_text = apply_coder(plain_text, encoder) >>>decrypted_text = apply_coder(plain_text, decoder) The cipher is defined by the shift value. Ignores non-letter characters like punctuation and numbers. shift: 0 <= int < 27 returns: dict Example: >>> build_decoder(3) {' ': 'x', 'A': 'Y', 'C': ' ', 'B': 'Z', 'E': 'B', 'D': 'A', 'G': 'D', 'F': 'C', 'I': 'F', 'H': 'E', 'K': 'H', 'J': 'G', 'M': 'J', 'L': 'I', 'O': 'L', 'N': 'K', 'Q': 'N', 'P': 'M', 'S': 'P', 'R': 'O', 'U': 'R', 'T': 'Q', 'W': 'T', 'V': 'S', 'Y': 'V', 'X': 'U', 'Z': 'W', 'a': 'y', 'c': ' ', 'b': 'z', 'e': 'b', 'd': 'a', 'g': 'd', 'f': 'c', 'i': 'f', 'h': 'e', 'k': 'h', 'j': 'g', 'm': 'j', 'l': 'i', 'o': 'l', 'n': 'k', 'q': 'n', 'p': 'm', 's': 'p', 'r': 'o', 'u': 'r', 't': 'q', 'w': 't', 'v': 's', 'y': 'v', 'x': 'u', 'z': 'w'} (The order of the key-value pairs may be different.) HINT : Use build_coder. """ assert type(shift) == IntType and (shift >= 0 and shift < 27) return build_coder(-shift) def apply_coder(text, coder): """ Applies the coder to the text. Returns the encoded text. text: string coder: dict with mappings of characters to shifted characters returns: text after mapping coder chars to original text Example: >>> apply_coder("Hello, world!", build_encoder(3)) 'Khoor,czruog!' >>> apply_coder("Khoor,czruog!", build_decoder(3)) 'Hello, world!' """ cipher_text = "" for char in text: if coder.has_key(char): cipher_text += coder[char] else: cipher_text += char return cipher_text def apply_shift(text, shift): """ Given a text, returns a new text Caesar shifted by the given shift offset. The empty space counts as the 27th letter of the alphabet, so spaces should be replaced by a lowercase letter as appropriate. Otherwise, lower case letters should remain lower case, upper case letters should remain upper case, and all other punctuation should stay as it is. text: string to apply the shift to shift: amount to shift the text returns: text after being shifted by specified amount. Example: >>> apply_shift('This is a test.', 8) 'Apq hq hiham a.' """ if shift > 0: encoder = build_encoder(shift) return apply_coder(text, encoder) else: decoder = build_decoder(-shift) return apply_coder(text, decoder) # # Problem 2: Codebreaking. # def find_best_shift(wordlist, text): """ Decrypts the encoded text and returns the plaintext. text: string returns: 0 <= int 27 Example: >>> s = apply_coder('Hello, world!', build_encoder(8)) >>> s 'Pmttw,hdwztl!' >>> find_best_shift(wordlist, s) returns 8 >>> apply_coder(s, build_decoder(8)) returns 'Hello, world!' """ best_shift = 0 best_count = 0 for shift_key in range(1,27): valid_count = 0 plaintext = apply_coder(text, build_decoder(shift_key)) plain_text_list = plaintext.split(" ") for word in plain_text_list: if is_word(wordlist, word): valid_count += 1 if valid_count > best_count: best_count = valid_count best_shift = shift_key return best_shift # # Problem 3: Multi-level encryption. # def apply_shifts(text, shifts): """ Applies a sequence of shifts to an input text. text: A string to apply the Ceasar shifts to shifts: A list of tuples containing the location each shift should begin and the shift offset. Each tuple is of the form (location, shift) The shifts are layered: each one is applied from its starting position all the way through the end of the string. returns: text after applying the shifts to the appropriate positions Example: >>> apply_shifts("Do Androids Dream of Electric Sheep?", [(0,6), (3, 18), (12, 16)]) 'JufYkaolfapxQdrnzmasmRyrpfdvpmEurrb?' """ cipher_text = text for shift in shifts: cipher_text = cipher_text[0:shift[0]] + apply_shift(cipher_text[shift[0]:], shift[1]) return cipher_text # # Problem 4: Multi-level decryption. # def find_best_shifts(wordlist, text): """ Given a scrambled string, returns a shift key that will decode the text to words in wordlist, or None if there is no such key. Hint: Make use of the recursive function find_best_shifts_rec(wordlist, text, start) wordlist: list of words text: scambled text to try to find the words for returns: list of tuples. each tuple is (position in text, amount of shift) Examples: >>> s = random_scrambled(wordlist, 3) >>> s 'eqorqukvqtbmultiform wyy ion' >>> shifts = find_best_shifts(wordlist, s) >>> shifts [(0, 25), (11, 2), (21, 5)] >>> apply_shifts(s, shifts) 'compositor multiform accents' >>> s = apply_shifts("Do Androids Dream of Electric Sheep?", [(0,6), (3, 18), (12, 16)]) >>> s 'JufYkaolfapxQdrnzmasmRyrpfdvpmEurrb?' >>> shifts = find_best_shifts(wordlist, s) >>> print apply_shifts(s, shifts) Do Androids Dream of Electric Sheep? """ return find_best_shifts_rec(wordlist, text, 0) def find_best_shifts_rec(wordlist, text, start): """ Given a scrambled string and a starting position from which to decode, returns a shift key that will decode the text to words in wordlist, or None if there is no such key. Hint: You will find this function much easier to implement if you use recursion. wordlist: list of words text: scambled text to try to find the words for start: where to start looking at shifts returns: list of tuples. each tuple is (position in text, amount of shift) """ if start > len(text): return [] for shift_key in range(27): decoded_text = apply_coder(text[start:], build_decoder(shift_key)) # decoded text might uncover one or more words decoded_text_list = decoded_text.split(" ") valid_words = [] for word in decoded_text_list: if is_word(wordlist, word): valid_words.append(word) #print word else: #invalid word, dont look any further break if len(valid_words) > 0: s = " ".join(valid_words) new_start = start + len(s) + 1 result = [(start, -shift_key)] print text[:start] + decoded_text, new_start try: result.extend(find_best_shifts_rec(wordlist, text[:start] + decoded_text, new_start)) return result except TypeError, e: continue # tried all shift values but found no valid word # previous shift was false positive def decrypt_fable(): """ Using the methods you created in this problem set, decrypt the fable given by the function get_fable_string(). Once you decrypt the message, be sure to include as a comment at the end of this problem set how the fable relates to your education at MIT. returns: string - fable in plain text """ fable = get_fable_string() shifts = find_best_shifts(wordlist, fable) return apply_shifts(fable, shifts) decrypt_fable() #What is the moral of the story? # #Though theoratical work is important #but successful practical implementation #based upon the theory is of utmost #importance.
# -*- coding: utf-8 -*- import regexUtils import re import urllib import urlparse def findJS(data): idName = '(?:f*id|ch)' jsName = '([^\"\']+?\.js[^\"\']*?)' regex = "(?:java)?scr(?:'\+')?ipt.*?" + idName + "\s*=\s*[\"']([^\"']+)[\"'][^<]*</scr(?:'\+')?ipt\s*>[^<]*<scr(?:'\+')?ipt[^<]*src=[\"']" + jsName + "[\"']" jscript = regexUtils.findall(data, regex) if jscript: jscript = filter(lambda x: x[1].find('twitter') == -1, jscript) return jscript return None def findPHP(data, streamId): regex = "document.write\('.*?src=['\"]*(.*?.(?:php|html)[^&\"]*).*?['\" ]*.*?\)" php = regexUtils.findall(data, regex) if php: return re.sub(r"\'\+\s*(?:[fc]*id|ch)\s*\+\'", "%s" % streamId,php[0]) regex = "document.write\('.*?src=['\"]*(.*?(?:f*id|ch)\s*\+'\.html*).*?['\" ]*.*?\)" html = regexUtils.findall(data, regex) if html: return re.sub(r"\'\+\s*(?:f*id|ch)\s*\+\'", "%s" % streamId,html[0]) return None def findRTMP(url, data): #if data.lower().find('rtmp') == -1: # return None try: text = str(data) except: text = data #method 1 #["'=](http://[^'" ]*.swf[^'" ]*file=([^&"']+)[^'" ]*&streamer=([^"'&]+)) #streamer=([^&"]+).*?file=([^&"]+).*?src="([^"]+.swf)" # method 2 #"([^"]+.swf\?.*?file=(rtmp[^&]+)&.*?id=([^&"]+)[^"]*)" sep1 = '[\'"&\? ]' sep2 = '(?:[\'"]\s*(?:,|\:)\s*[\'"]|=)' value = '([^\'"&]+)' method1 = True method2 = False radius = 400 playpath = '' swfUrl = '' rtmp = regexUtils.findall(text, sep1 + 'streamer' + sep2 + value) if not rtmp: tryMethod2 = regexUtils.findall(text, sep1 + 'file' + sep2 + value) if tryMethod2 and tryMethod2[0].startswith('rtmp'): method1 = False method2 = True rtmp = tryMethod2 if rtmp: for r in rtmp: tmpRtmp = r.replace('/&','').replace('&','') idx = text.find(tmpRtmp) min_idx = 0 max_idx = len(text) - 1 start = idx-radius if start < min_idx: start = min_idx end = idx+radius if end > max_idx: end = max_idx area = text[start:end] clipStart = idx+len(tmpRtmp) if clipStart < max_idx: text = text[clipStart:] if method1: playpath = regexUtils.findall(area, sep1 + 'file' + sep2 + value) if method2: playpath = regexUtils.findall(area, sep1 + 'id' + sep2 + value) if playpath: tmpRtmp = tmpRtmp + '/' + playpath[0] if playpath: swfUrl = regexUtils.findall(area, 'SWFObject\([\'"]([^\'"]+)[\'"]') if not swfUrl: swfUrl = regexUtils.findall(area, sep1 + '([^\'"& ]+\.swf)') if not swfUrl: swfUrl = regexUtils.findall(data, sep1 + '([^\'"& ]+\.swf)') if swfUrl: finalSwfUrl = swfUrl[0] if not finalSwfUrl.startswith('http'): finalSwfUrl = urlparse.urljoin(url, finalSwfUrl) regex = '://(.*?)/' server = regexUtils.findall(tmpRtmp, regex) if server: if server[0].find(':') == -1: tmpRtmp = tmpRtmp.replace(server[0], server[0] + ':1935') return [tmpRtmp, playpath[0], finalSwfUrl] return None def getHostName(url): scheme = urlparse.urlparse(url) if scheme: return scheme.netloc.replace('www.','') return None def findFrames(data): if data.lower().find('frame') == -1: return None return regexUtils.findall(data, "(frame[^>]*)>") def findContentRefreshLink(page, data): regex = '0;\s*url=([^\'" ]+)' links = regexUtils.findall(data, regex) if links: return links[0] regex = 'window.location\s*=\s*[\'"]([^\'"]+)[\'"]' links = regexUtils.findall(data, regex) if links: return links[0] regex = 'frame\s*scrolling=\"auto\"\s*noresize\s*src\s*=\s*[\'"]([^\'"]+)[\'"]' links = regexUtils.findall(data, regex) if links: return links[0] #hd**ee.fv/cr**hd.fv/sp**ts4u.tv regex = '<a\s*href="([^"]+)"\s*target="_blank"><img\s*(?:src="[^"]+"\s*height="\d+"\s*width="\d+"\s*longdesc="[^"]+"|class="alignnone"\s*src="[^"]*"\s*alt="[^"]*"\s*width="\d\d\d"\s*height="\d\d\d")' links = regexUtils.findall(data, regex) if links: return urlparse.urljoin(urllib.unquote(page), links[0]).strip() return None def findEmbedPHPLink(data): regex = '<script type="text/javascript" src="((?![^"]+localtimes)(?![^"]+adcash)[^"]+\.php\?[^"]+)"\s*>\s*</script>' links = regexUtils.findall(data, regex) if links: return links[0] return None def findVideoFrameLink(page, data): minheight=300 minwidth=300 frames = findFrames(data) if not frames: return None iframes = regexUtils.findall(data, "(frame(?![^>]*cbox\.ws)(?![^>]*Publi)(?![^>]*dailymotion)(?![^>]*blacktvlive\.)(?![^>]*chat\d*\.\w+)(?![^>]*ad122m)(?![^>]*adshell)(?![^>]*capacanal)(?![^>]*waframedia)(?![^>]*Beba.tv/embed)(?![^>]*maxtags)(?![^>]*s/a1\.php)(?![^>]*right-sidebar)[^>]*\sheight\s*=\s*[\"']*([\%\d]+)(?:px)?[\"']*[^>]*>)") if iframes: for iframe in iframes: if iframe[1] == '100%': height = minheight+1 else: height = int(iframe[1]) if height > minheight: m = regexUtils.findall(iframe[0], "[\"' ]width\s*=\s*[\"']*(\d+[%]*)(?:px)?[\"']*") if m: if m[0] == '100%': width = minwidth+1 else: width = int(m[0]) if width > minwidth: m = regexUtils.findall(iframe[0], '[\'"\s]+(?:src|SRC)\s*=\s*["\']*\s*([^>"\' ]+)\s*[>"\']*') if m: if 'premiertv' in page: page = page+'/' return urlparse.urljoin(urllib.unquote(page), m[0]).strip() # Alternative 1 iframes = regexUtils.findall(data, "(frame(?![^>]*cbox\.ws)(?![^>]*capacanal)(?![^>]*dailymotion)[^>]*[\"; ]height:\s*(\d+)[^>]*>)") if iframes: for iframe in iframes: height = int(iframe[1]) if height > minheight: m = regexUtils.findall(iframe[0], "[\"; ]width:\s*(\d+)") if m: width = int(m[0]) if width > minwidth: m = regexUtils.findall(iframe[0], '[\"; ](?:src|SRC)=["\']*\s*([^>"\' ]+)\s*[>"\']*') if m: return urlparse.urljoin(urllib.unquote(page), m[0]).strip() # Alternative 2 (Frameset) m = regexUtils.findall(data, '<(?:FRAMESET|frameset)[^>]+100%[^>]+>\s*<(?:FRAME|frame)[^>]+src="([^"]+)"') if m: return urlparse.urljoin(urllib.unquote(page), m[0]).strip() m = regexUtils.findall(data, r'playStream\(\'iframe\', \'[^\']*(https*:[^\']+)\'\)') if m: return urlparse.urljoin(urllib.unquote(page), m[0]).strip() return None
# -*- coding: utf-8 -*- # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the text plugin summary API.""" import glob import os import numpy as np import tensorflow as tf from tensorboard.compat import tf2 from tensorboard.compat.proto import summary_pb2 from tensorboard.plugins.text import metadata from tensorboard.plugins.text import summary from tensorboard.util import tensor_util try: tf2.__version__ # Force lazy import to resolve except ImportError: tf2 = None try: tf.compat.v1.enable_eager_execution() except AttributeError: # TF 2.0 doesn't have this symbol because eager is the default. pass class SummaryBaseTest(object): def text(self, *args, **kwargs): raise NotImplementedError() def test_tag(self): self.assertEqual("a", self.text("a", "foo").value[0].tag) self.assertEqual("a/b", self.text("a/b", "foo").value[0].tag) def test_metadata(self): pb = self.text("do", "A deer. A female deer.") summary_metadata = pb.value[0].metadata plugin_data = summary_metadata.plugin_data self.assertEqual(summary_metadata.summary_description, "") self.assertEqual(plugin_data.plugin_name, metadata.PLUGIN_NAME) content = summary_metadata.plugin_data.content # There's no content, so successfully parsing is fine. metadata.parse_plugin_metadata(content) def test_explicit_description(self): description = "A whole step above do." pb = self.text("re", "A drop of golden sun.", description=description) summary_metadata = pb.value[0].metadata self.assertEqual(summary_metadata.summary_description, description) plugin_data = summary_metadata.plugin_data self.assertEqual(plugin_data.plugin_name, metadata.PLUGIN_NAME) content = summary_metadata.plugin_data.content # There's no content, so successfully parsing is fine. metadata.parse_plugin_metadata(content) def test_bytes_value(self): pb = self.text("mi", b"A name\xe2\x80\xa6I call myself") value = tensor_util.make_ndarray(pb.value[0].tensor).item() self.assertIsInstance(value, bytes) self.assertEqual(b"A name\xe2\x80\xa6I call myself", value) def test_unicode_value(self): pb = self.text("mi", "A name\u2026I call myself") value = tensor_util.make_ndarray(pb.value[0].tensor).item() self.assertIsInstance(value, bytes) self.assertEqual(b"A name\xe2\x80\xa6I call myself", value) def test_np_array_bytes_value(self): pb = self.text( "fa", np.array( [[b"A", b"long", b"long"], [b"way", b"to", b"run \xe2\x80\xbc"]] ), ) values = tensor_util.make_ndarray(pb.value[0].tensor).tolist() self.assertEqual( [[b"A", b"long", b"long"], [b"way", b"to", b"run \xe2\x80\xbc"]], values, ) # Check that all entries are byte strings. for vectors in values: for value in vectors: self.assertIsInstance(value, bytes) def test_np_array_unicode_value(self): pb = self.text( "fa", np.array([["A", "long", "long"], ["way", "to", "run \u203C"]]), ) values = tensor_util.make_ndarray(pb.value[0].tensor).tolist() self.assertEqual( [[b"A", b"long", b"long"], [b"way", b"to", b"run \xe2\x80\xbc"]], values, ) # Check that all entries are byte strings. for vectors in values: for value in vectors: self.assertIsInstance(value, bytes) def test_non_string_value(self): with self.assertRaisesRegex(TypeError, r"must be of type.*string"): self.text("la", np.array(range(42))) class SummaryV1PbTest(SummaryBaseTest, tf.test.TestCase): def text(self, *args, **kwargs): return summary.pb(*args, **kwargs) def test_tag(self): self.assertEqual("a/text_summary", self.text("a", "foo").value[0].tag) self.assertEqual( "a/b/text_summary", self.text("a/b", "foo").value[0].tag ) def test_non_string_value(self): with self.assertRaisesRegex( ValueError, r"Expected binary or unicode string, got 0" ): self.text("la", np.array(range(42))) class SummaryV1OpTest(SummaryBaseTest, tf.test.TestCase): def text(self, *args, **kwargs): return summary_pb2.Summary.FromString( summary.op(*args, **kwargs).numpy() ) def test_tag(self): self.assertEqual("a/text_summary", self.text("a", "foo").value[0].tag) self.assertEqual( "a/b/text_summary", self.text("a/b", "foo").value[0].tag ) def test_scoped_tag(self): with tf.name_scope("scope"): self.assertEqual( "scope/a/text_summary", self.text("a", "foo").value[0].tag ) class SummaryV2PbTest(SummaryBaseTest, tf.test.TestCase): def text(self, *args, **kwargs): return summary.text_pb(*args, **kwargs) class SummaryV2OpTest(SummaryBaseTest, tf.test.TestCase): def setUp(self): super(SummaryV2OpTest, self).setUp() if tf2 is None: self.skipTest("TF v2 summary API not available") def text(self, *args, **kwargs): return self.text_event(*args, **kwargs).summary def text_event(self, *args, **kwargs): self.write_text_event(*args, **kwargs) event_files = sorted(glob.glob(os.path.join(self.get_temp_dir(), "*"))) self.assertEqual(len(event_files), 1) events = list(tf.compat.v1.train.summary_iterator(event_files[0])) # Expect a boilerplate event for the file_version, then the summary one. self.assertEqual(len(events), 2) # Delete the event file to reset to an empty directory for later calls. # TODO(nickfelt): use a unique subdirectory per writer instead. os.remove(event_files[0]) return events[1] def write_text_event(self, *args, **kwargs): kwargs.setdefault("step", 1) writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): summary.text(*args, **kwargs) writer.close() def test_scoped_tag(self): with tf.name_scope("scope"): self.assertEqual("scope/a", self.text("a", "foo").value[0].tag) def test_step(self): event = self.text_event("a", "foo", step=333) self.assertEqual(333, event.step) def test_default_step(self): try: tf2.summary.experimental.set_step(333) # TODO(nickfelt): change test logic so we can just omit `step` entirely. event = self.text_event("a", "foo", step=None) self.assertEqual(333, event.step) finally: # Reset to default state for other tests. tf2.summary.experimental.set_step(None) class SummaryV2OpGraphTest(SummaryV2OpTest, tf.test.TestCase): def write_text_event(self, *args, **kwargs): kwargs.setdefault("step", 1) # Hack to extract current scope since there's no direct API for it. with tf.name_scope("_") as temp_scope: scope = temp_scope.rstrip("/_") @tf2.function def graph_fn(): # Recreate the active scope inside the defun since it won't propagate. with tf.name_scope(scope): summary.text(*args, **kwargs) writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): graph_fn() writer.close() if __name__ == "__main__": tf.test.main()
#!/usr/bin/env python # encoding: utf-8 """ CustomQGraphicsView.py .. module:: CustomQGraphicsView :platform: Unix, Windows, Mac OS X :synopsis: A Custom QGraphicsView module to allow focus input events like mouse clicks and panning and zooming """ from bin import app from cadnano.gui.views.pathview import pathstyles as styles import cadnano.util as util from PyQt5.QtCore import pyqtSignal, Qt, QTimer from PyQt5.QtGui import QPaintEngine from PyQt5.QtWidgets import qApp, QGraphicsView # for OpenGL mode try: from OpenGL import GL from PyQt5.QtOpenGL import QGLWidget, QGLFormat, QGL except: GL = False GL = False class CustomQGraphicsView(QGraphicsView): """ Base class for QGraphicsViews with Mouse Zoom and Pan support via the Control/Command shortcut key. A QGraphics View stores info on the view and handles mouse events for zooming and panning Ctrl-MidMouseButton = Pan Ctrl-RightMouseButton = Dolly Zoom MouseWheel = Zoom Parameters ---------- parent: type of QWidget, such as QWidget.main_splitter() for the type of View its has See Also -------- Examples -------- For details on these and other miscellaneous methods, see below. """ def __init__(self, parent=None): """ On initialization, we need to bind the Ctrl/command key to enable manipulation of the view. """ QGraphicsView.__init__(self, parent) self.setTransformationAnchor(QGraphicsView.AnchorUnderMouse) self.setRubberBandSelectionMode(Qt.IntersectsItemShape) self.setStyleSheet("QGraphicsView { background-color: rgb(96.5%, 96.5%, 96.5%); }") self._no_drag = QGraphicsView.RubberBandDrag self._yes_drag = QGraphicsView.ScrollHandDrag # reset things that are state dependent self.clearGraphicsView() self._x0 = 0 self._y0 = 0 self._scale_size = 1.0 self._scale_limit_max = 4.0 self._scale_limit_min = 0.15 self._scale_up_rate = 0.01 self._scale_down_rate = 0.01 self._scale_fit_factor = 1 # sets initial zoom level self._show_details = True self._last_scale_factor = 0.0 self.scene_root_item = None # the item to transform # Keyboard panning self._key_pan_delta_x = styles.PATH_BASE_WIDTH * 21 self._key_pan_delta_y = styles.PATH_HELIX_HEIGHT + styles.PATH_HELIX_PADDING/2 # Modifier keys and buttons self._key_mod = Qt.Key_Control self._key_select = Qt.Key_Shift self._button_pan = Qt.LeftButton self._button_pan_alt = Qt.MidButton self._button_zoom = Qt.RightButton self.toolbar = None # custom hack for the paint tool palette self._name = None if GL: self.setViewport(QGLWidget(QGLFormat(QGL.SampleBuffers))) self.setViewportUpdateMode(QGraphicsView.FullViewportUpdate) else: self.setViewportUpdateMode(QGraphicsView.MinimalViewportUpdate) # self.setViewportUpdateMode(QGraphicsView.SmartViewportUpdate) # self.setFocusPolicy(Qt.ClickFocus) # end def levelOfDetailChangedSignal = pyqtSignal(bool) def __repr__(self): clsName = self.__class__.__name__ objId = self._name if self._name else str(id(self))[-4:] return "<%s %s>" % (clsName, objId) def setName(self, name): self._name = name # end def def setViewportUpdateOn(self, is_enabled): if is_enabled: self.setViewportUpdateMode(QGraphicsView.MinimalViewportUpdate) else: self.setViewportUpdateMode(QGraphicsView.NoViewportUpdate) # end def def activateSelection(self, is_active): if self._selection_lock: self._selection_lock.clearSelection(False) self.clearSelectionLockAndCallbacks() if is_active: self._no_drag = QGraphicsView.RubberBandDrag else: self._no_drag = QGraphicsView.NoDrag if self.dragMode() != self._yes_drag: self.setDragMode(self._no_drag) # end def def clearGraphicsView(self): # Event handling self._has_focus = False # Misc self.clearSelectionLockAndCallbacks() # Pan and dolly defaults self._transform_enable = False self._dolly_zoom_enable = False self.setDragMode(self._no_drag) # end def def clearSelectionLockAndCallbacks(self): self._selection_lock = None # a selection group to limit types of items selected self._press_list = [] # bookkeeping to handle passing mouseReleaseEvents to QGraphicsItems that don't get them # end def def setGLView(self, boolval): scene = self.scene() if boolval and self.is_GL == False: self.is_GL = True # scene.drawBackground = self.drawBackgroundGL # self.setViewport(QGLWidget(QGLFormat(QGL.SampleBuffers))) # self.setViewportUpdateMode(QGraphicsView.FullViewportUpdate) elif not boolval and self.is_GL == True: self.is_GL = False # scene.drawBackground = self.drawBackgroundNonGL # self.setViewport(QWidget()) # self.setViewportUpdateMode(QGraphicsView.MinimalViewportUpdate) # end def def setupGL(self,mainWindow): scene = self.scene() win = mainWindow self.is_GL = True self.is_GL_switch_allowed = True self.qTimer = QTimer() # self.drawBackgroundNonGL = scene.drawBackground # scene.drawBackground = self.drawBackgroundGL # format = QGLFormat(QGL.SampleBuffers) # format.setSamples(16) # print "# of samples", format.samples(), format.sampleBuffers() # self.setViewport(QGLWidget(format)) # self.setViewportUpdateMode(QGraphicsView.FullViewportUpdate) # end def def resetGL(self): scale_factor = self.transform().m11() # print "scale_factor", scale_factor self.scene_root_item.window().statusBar().showMessage("%0.2f" % scale_factor) if scale_factor < .15:# and self.is_GL_switch_allowed: # self.is_GL_switch_allowed = False self.setGLView(True) self._show_details = False self.levelOfDetailChangedSignal.emit(False) # zoomed out self.qTimer.singleShot(500, self.allowGLSwitch) elif scale_factor > .2:# and self.is_GL_switch_allowed: # self.is_GL_switch_allowed = False self.setGLView(False) self._show_details = True self.levelOfDetailChangedSignal.emit(True) # zoomed in self.qTimer.singleShot(500, self.allowGLSwitch) # end def def shouldShowDetails(self): return self._show_details # end def def allowGLSwitch(self): self.is_GL_switch_allowed = True # end def def drawBackgroundGL(self, painter, rect): """ This method is for overloading the QGraphicsScene. """ if painter.paintEngine().type() != QPaintEngine.OpenGL and \ painter.paintEngine().type() != QPaintEngine.OpenGL2: qWarning("OpenGLScene: drawBackground needs a QGLWidget to be set as viewport on the graphics view"); return # end if painter.beginNativePainting() GL.glDisable(GL.GL_DEPTH_TEST) # disable for 2D drawing GL.glClearColor(1.0, 1.0, 1.0, 1.0) GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT) painter.endNativePainting() # end def def focusInEvent(self, event): self._has_focus = True def focusOutEvent(self, event): self._transform_enable = False self._dolly_zoom_enable = False self._has_focus = False self._transform_enable = False # end def def setSelectionLock(self, selection_lock): self._selection_lock = selection_lock # end def def selectionLock(self): return self._selection_lock # end def def setScaleFitFactor(self, value): """docstring for setScaleFitFactor""" self._scale_fit_factor = value # end def def setKeyPan(self, button): """Set the class pan button remotely""" self._button_pan = button # end def def addToPressList(self, item): """docstring for addToPressList""" # self._press_list[self._press_list_idx].append(item) self._press_list.append(item) # end def def keyPanDeltaX(self): """Returns the distance in scene space to move the scene_root_item when panning left or right.""" # PyQt isn't aware that QGraphicsObject isa QGraphicsItem and so # it returns a separate python object if, say, childItems() returns # a QGraphicsObject casted to a QGraphicsItem. If this is the case, # we can still find the QGraphicsObject thusly: candidateDxDeciders = list(self.scene_root_item.childItems()) candidateDxDeciders = candidateDxDeciders +\ [cd.toGraphicsObject() for cd in candidateDxDeciders] for cd in candidateDxDeciders: if cd == None: continue keyPanDXMethod = getattr(cd, 'keyPanDeltaX', None) if keyPanDXMethod != None: return keyPanDXMethod() return 100 def keyPanDeltaY(self): """Returns the distance in scene space to move the scene_root_item when panning left or right.""" candidateDyDeciders = list(self.scene_root_item.childItems()) candidateDyDeciders = candidateDyDeciders +\ [cd.toGraphicsObject() for cd in candidateDyDeciders] for cd in candidateDyDeciders: if cd == None: continue keyPanDYMethod = getattr(cd, 'keyPanDeltaY', None) if keyPanDYMethod != None: return keyPanDYMethod() return 100 def keyPressEvent(self, event): """ Handle key presses for mouse-drag transforms and arrow-key panning. """ if not self._has_focus: # we don't have focus -> ignore keypress return if event.key() == self._key_mod: self._transform_enable = True QGraphicsView.keyPressEvent(self, event) elif event.key() == Qt.Key_Left: transform = self.scene_root_item.transform() transform.translate(self.keyPanDeltaX(), 0) self.scene_root_item.setTransform(transform) elif event.key() == Qt.Key_Up: transform = self.scene_root_item.transform() transform.translate(0, self.keyPanDeltaY()) self.scene_root_item.setTransform(transform) elif event.key() == Qt.Key_Right: transform = self.scene_root_item.transform() transform.translate(-self.keyPanDeltaX(), 0) self.scene_root_item.setTransform(transform) elif event.key() == Qt.Key_Down: transform = self.scene_root_item.transform() transform.translate(0, -self.keyPanDeltaY()) self.scene_root_item.setTransform(transform) elif event.key() == Qt.Key_Plus: self.zoomIn(0.3) elif event.key() == Qt.Key_Minus: self.zoomIn(0.03) else: return QGraphicsView.keyPressEvent(self, event) # end else # end def def keyReleaseEvent(self, event): """docstring for keyReleaseEvent""" if event.key() == self._key_mod: self._transform_enable = False self._dolly_zoom_enable = False self._panDisable() # end if else: QGraphicsView.keyReleaseEvent(self, event) # end else # end def def enterEvent(self, event): # self.setFocus() # this call robs selection from key focus self.setDragMode(self._no_drag) QGraphicsView.enterEvent(self, event) def leaveEvent(self, event): self.clearFocus() QGraphicsView.leaveEvent(self, event) def mouseMoveEvent(self, event): """ Must reimplement mouseMoveEvent of QGraphicsView to allow ScrollHandDrag due to the fact that events are intercepted breaks this feature. """ if self._transform_enable == True: if self.dragMode() == self._yes_drag: # Add stuff to handle the pan event posf = event.localPos() xf = posf.x() yf = posf.y() factor = self.transform().m11() transform = self.scene_root_item.transform() transform.translate((xf - self._x0)/factor,\ (yf - self._y0)/factor) self.scene_root_item.setTransform(transform) self._x0 = xf self._y0 = yf elif self._dolly_zoom_enable == True: self.dollyZoom(event) # adding this allows events to be passed to items underneath QGraphicsView.mouseMoveEvent(self, event) # end def def mousePressEvent(self, event): """docstring for mousePressEvent""" if self._transform_enable == True and qApp.keyboardModifiers(): which_buttons = event.buttons() if which_buttons in [self._button_pan, self._button_pan_alt]: self._panEnable() posf = event.localPos() self._x0 = posf.x() self._y0 = posf.y() elif which_buttons == self._button_zoom: self._dolly_zoom_enable = True self._last_scale_factor = 0 # QMouseEvent.y() returns the position of the mouse cursor # relative to the widget self._y0 = event.localPos().y() else: QGraphicsView.mousePressEvent(self, event) else: QGraphicsView.mousePressEvent(self, event) #end def def mouseReleaseEvent(self, event): """If panning, stop. If handles were pressed, release them.""" if self._transform_enable == True: # QMouseEvent.button() returns the button that triggered the event which_button = event.button() if which_button in [self._button_pan, self._button_pan_alt]: self._panDisable() elif which_button == self._button_zoom: self._dolly_zoom_enable = False else: return QGraphicsView.mouseReleaseEvent(self, event) # end if else: if len(self._press_list): # Notify any pressed items to release event_pos = event.pos() for item in self._press_list: #try: # print("item release", item) item.customMouseRelease(event) #except: # item.mouseReleaseEvent(event) #end for self._press_list = [] # end if if self._selection_lock: self._selection_lock.processPendingToAddList() return QGraphicsView.mouseReleaseEvent(self, event) #end def def _panEnable(self): """Enable ScrollHandDrag Mode in QGraphicsView (displays a hand pointer)""" self.setDragMode(self._yes_drag) # end def def _panDisable(self): """Disable ScrollHandDrag Mode in QGraphicsView (displays a hand pointer)""" self.setDragMode(self._no_drag) # end def def fname(self): """docstring for fname""" pass def wheelEvent(self, event): self.safeScale(event.angleDelta().y()) # end def def safeScale(self, delta): current_scale_level = self.transform().m11() scale_factor = 1 + delta * \ (self._scale_down_rate if delta < 0 else self._scale_up_rate) * \ (app().prefs.zoom_speed/100.) new_scale_level = current_scale_level * scale_factor new_scale_level = util.clamp(current_scale_level * scale_factor,\ self._scale_limit_min,\ self._scale_limit_max) scale_change = new_scale_level / current_scale_level self.scale(scale_change, scale_change) self.resetGL() # end def def zoomIn(self, fraction_of_max=0.5): current_scale_level = self.transform().m11() scale_change = (fraction_of_max * self._scale_limit_max) / current_scale_level self.scale(scale_change, scale_change) # end def def zoomOut(self, fraction_of_min=1): current_scale_level = self.transform().m11() scale_change = (fraction_of_min * self._scale_limit_min) / current_scale_level self.scale(scale_change, scale_change) # end def def dollyZoom(self, event): """docstring for dollyZoom""" # QMouseEvent.y() returns the position of the mouse cursor relative # to the widget yf = event.y() denom = abs(yf - self._y0) if denom > 0: scale_factor = (self.height() / 2) % denom if self._last_scale_factor != scale_factor: self._last_scale_factor = scale_factor # zoom in if mouse y position is getting bigger if yf - self._y0 > 0: self.safeScale(yf - self._y0) # end else else: # else id smaller zoom out self.safeScale(yf - self._y0) # end else # end if # end def def resetScale(self): """reset the scale to 1""" # use the transform value if you want to get how much the view # has been scaled self._scale_size = self.transform().m11() # self._scale_limit_min = 0.41*self._scale_size # make it so fitting in view is zoomed minimum # still gives you one zoom level out before violates limit self._scale_limit_min = self._scale_size*self._scale_fit_factor # use this if you want to reset the zoom in limit # self._scale_limit_max = 3.0*self._scale_size self._last_scale_factor = 0.0 # end def def zoomToFit(self): # print("zoom to fit", self._name) # Auto zoom to center the scene thescene = self.scene_root_item.scene() # order matters? self.scene_root_item.resetTransform() # zero out translations self.resetTransform() # zero out scaling if self.toolbar: # HACK: move toolbar so it doesn't affect sceneRect self.toolbar.setPos(0, 0) thescene.setSceneRect(thescene.itemsBoundingRect()) scene_rect = thescene.sceneRect() if self.toolbar: # HACK, pt2: move toolbar back self.toolbar.setPos(self.mapToScene(0, 0)) self.fitInView(scene_rect, Qt.KeepAspectRatio) # fit in view self.resetScale() # adjust scaling so that translation works # adjust scaling so that the items don't fill 100% of the view # this is good for selection self.scale(self._scale_fit_factor, self._scale_fit_factor) self._scale_size *= self._scale_fit_factor self.resetGL() # end def def paintEvent(self, event): if self.toolbar: self.toolbar.setPos(self.mapToScene(0, 0)) QGraphicsView.paintEvent(self, event) #end class
# -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from elasticsearch import Elasticsearch, helpers # from elasticsearch import Elasticsearch, RequestsHttpConnection, serializer, compat, exceptions, helpers from scrapy.utils.project import get_project_settings # from scrapy.conf import settings from scrapy.exceptions import DropItem # from scrapy import log from datetime import datetime import types import pysolr import logging class XiaomiappPipeline(object): def process_item(self, item, spider): return item class XiaomiElasticSearchPipeline(object): items_buffer = [] def __init__(self): self.settings = get_project_settings() # settings = get_project_settings() # self.settings = settings uri = "{}:{}".format(self.settings['ELASTICSEARCH_SERVER'], self.settings['ELASTICSEARCH_PORT']) self.es = Elasticsearch([uri]) # uri = "%s:%d" % (self.settings['ELASTICSEARCH_SERVER'], self.settings['ELASTICSEARCH_PORT']) # self.es = Elasticsearch(, serializer=JSONSerializerPython2()) # print uri # print type(settings) def index_item(self, item): index_name = self.settings['ELASTICSEARCH_INDEX'] index_suffix_format = self.settings.get('ELASTICSEARCH_INDEX_DATE_FORMAT', None) if index_suffix_format: index_name += "-" + datetime.strftime(datetime.now(), index_suffix_format) index_action = { '_index': index_name, '_type': self.settings['ELASTICSEARCH_TYPE'], '_source': dict(item) } self.items_buffer.append(index_action) # index_name = self.settings['ELASTICSEARCH_INDEX'] # self.es.index(index_name, doc_type="test-type", body=dict(item), id=item['appid'], op_type='create') if len(self.items_buffer) == self.settings.get('ELASTICSEARCH_BUFFER_LENGTH', 500): self.send_items() self.items_buffer = [] def send_items(self): helpers.bulk(self.es, self.items_buffer) def process_item(self, item, spider): if isinstance(item, types.GeneratorType) or isinstance(item, types.ListType): for each in item: self.process_item(each, spider) else: self.index_item(item) # index_name = self.settings['ELASTICSEARCH_INDEX'] # self.es.index(dict(item), index_name, self.settings['ELASTICSEARCH_TYPE'], op_type='create') # if self.es.search_exists(self.settings['ELASTICSEARCH_INDEX'], self.settings['ELASTICSEARCH_TYPE']): # self.es.delete(self.settings['ELASTICSEARCH_INDEX'], self.settings['ELASTICSEARCH_TYPE'], id=item['appid'], ) # print self.es.search(self.settings['ELASTICSEARCH_INDEX']) logging.info("Remove old values in Elasticsearch if exit") self.es.delete(self.settings['ELASTICSEARCH_INDEX'], self.settings['ELASTICSEARCH_TYPE'], id=item['appid'], ignore=[400, 404]) self.es.index(self.settings['ELASTICSEARCH_INDEX'], self.settings['ELASTICSEARCH_TYPE'], dict(item), id=item['appid'], op_type='create', ) # self.es.index() return item def close_spider(self, spider): if not self.items_buffer: self.send_items() class XiaomiSolrPipeline(object): def __init__(self): settings = get_project_settings() self.mapping = settings['SOLR_MAPPING'].items() self.ignore = settings['SOLR_IGNORE_DUPLICATES'] or False self.keys = settings['SOLR_DUPLICATES_KEY_FIELDS'] if self.ignore and not self.keys: raise RuntimeError('SOLR_DUPLICATES_KEY_FIELDS has to be defined') self.solr = pysolr.Solr(settings['SOLR_URL'], timeout=10) # self.solr.delete(q='*:*') print self.mapping # print settings['SOLR_MAPPING'] def process_item(self, item, spider): if self.ignore: duplicates = [str(name) + ':' + '"' + self.get_value(item, value) + '"' for name, value in self.mapping if name in self.keys] query = " ".join(duplicates) result = self.solr.search(query) print query # print result # element = [self.get_value(item, value) for name, value in self.mapping] element = [self.get_value(item, value) for name, value in self.mapping] print element # result = None if result: # logging.info("Skip duplicates in Solr") # return item logging.info("Remove old values in Solr") self.solr.delete(q=query) # self.solr.delete(q='*:*') # print type(self.keys) results = {} for name, value in self.mapping: results[name] = self.get_value(item, value) # self.solr.delete() self.solr.add([results]) # print self.solr.search(q=query) # print self.solr.add([{'bananas': '1'}]) # print duplicates # print query # print results # flag = 'appid' in item # print flag # print type(item) return item def get_value(self, item, value): # if type(value) is str: # return item[value] if value in item else None # elif type(value) is list: # return [item[key] if key in item else None for key in value] # else: # raise TypeError('Only string and list are valid sources') return item[value] if value in item else None class XiaomiMongoDBPipeline(object): def __init__(self): settings = get_project_settings() connection = pymongo.MongoClient( settings['MONGODB_SERVER'], settings['MONGODB_PORT'] ) db = connection[settings['MONGODB_DB']] self.collection = db[settings['MONGODB_COLLECTION']] self.key = settings['MONGODB_UNIQUE_KEY'] # self.collection.drop() def process_item(self, item, spider): valid = True for data in item: if not data: valid = False raise DropItem("Missing {0}!".format(data)) if valid: # duplicate = str(self.key) + ':' + '"' + item[self.key] + '"' # print duplicate result = {} result[self.key] = item[self.key] # print result search_result = self.collection.find_one(result) # print search_result if search_result: # logging.info("Skip duplicates") # return item # else: # if True: logging.info("Remove old values in MongoDB") self.collection.delete_one(result) # else: # self.collection.update(result, dict(item)) # logging.info("Item added to MongoDB database!") # log.msg("Item added to MongoDB database!", # level=log.DEBUG, spider=spider) logging.debug("Item added to MongoDB database!") self.collection.insert(dict(item)) return item # def __get_itemvalue__(self, item, value): # if type(value) is str: # return item[value] if value in item else None # elif type(value) is list: # return [item[key] if key in item else None for key in value] # else: # raise TypeError('Only string and list are valid sources')
import xadmin from xadmin import views from .models import * from xadmin.layout import Main, TabHolder, Tab, Fieldset, Row, Col, AppendedText, Side from xadmin.plugins.inline import Inline from xadmin.plugins.batch import BatchChangeAction class MainDashboard(object): widgets = [ [ {"type": "html", "title": "Welcome!", "content": "<h3>Welcome to our system!</h3>"}, {"type": "list", "model": "app.papers"}, ], [ {"type": "qbutton", "title": "Quick Start", "btns": [{'model': Papers}, {'model':Keywords}]}, {"type": "addform", "model": Papers} #{"type": "chart", "model": "app.ViewEachPerson", 'chart': 'user_count', 'params': {'_p_date__gte': '2013-01-08', 'p': 1, '_p_date__lt': '2013-01-29'}}, ] ] xadmin.site.register(views.website.IndexView, MainDashboard) class BaseSetting(object): enable_themes = False use_bootswatch = True xadmin.site.register(views.BaseAdminView, BaseSetting) class GlobalSetting(object): global_search_models = [People,Papers,Conferences,Organizations,Journals]##todo global_models_icon = { Host: 'fa fa-laptop', IDC: 'fa fa-cloud' } #possible values: accordion,default menu_style = 'accordion'#'default' xadmin.site.register(views.CommAdminView, GlobalSetting) class MaintainInline(object): model = MaintainLog extra = 1 style = 'accordion' class IDCAdmin(object): list_display = ('name', 'description', 'create_time') list_display_links = ('name',) wizard_form_list = [ ('First\'s Form', ('name', 'description')), ('Second Form', ('contact', 'telphone', 'address')), ('Thread Form', ('customer_id',)) ] search_fields = ['name'] relfield_style = 'fk-ajax' reversion_enable = True actions = [BatchChangeAction, ] batch_fields = ('contact', 'create_time') class HostAdmin(object): def open_web(self, instance): return "<a href='http://%s' target='_blank'>Open</a>" % instance.ip open_web.short_description = "Acts" open_web.allow_tags = True open_web.is_column = True list_display = ('name', 'idc', 'guarantee_date', 'service_type', 'status', 'open_web', 'description') list_display_links = ('name',) raw_id_fields = ('idc',) style_fields = {'system': "radio-inline"} search_fields = ['name', 'ip', 'description'] list_filter = ['idc', 'guarantee_date', 'status', 'brand', 'model', 'cpu', 'core_num', 'hard_disk', 'memory', ('service_type',xadmin.filters.MultiSelectFieldListFilter)] list_quick_filter = ['service_type',{'field':'idc__name','limit':10}] list_bookmarks = [{'title': "Need Guarantee", 'query': {'status__exact': 2}, 'order': ('-guarantee_date',), 'cols': ('brand', 'guarantee_date', 'service_type')}] show_detail_fields = ('idc',) list_editable = ( 'name', 'idc', 'guarantee_date', 'service_type', 'description') save_as = True aggregate_fields = {"guarantee_date": "min"} grid_layouts = ('table', 'thumbnails') form_layout = ( Main( TabHolder( Tab('Comm Fields', Fieldset('Company data', 'name', 'idc', description="some comm fields, required" ), Inline(MaintainLog), ), Tab('Extend Fields', Fieldset('Contact details', 'service_type', Row('brand', 'model'), Row('cpu', 'core_num'), Row(AppendedText( 'hard_disk', 'G'), AppendedText('memory', "G")), 'guarantee_date' ), ), ), ), Side( Fieldset('Status data', 'status', 'ssh_port', 'ip' ), ) ) inlines = [MaintainInline] reversion_enable = True data_charts = { "host_service_type_counts": {'title': u"Host service type count", "x-field": "service_type", "y-field": ("service_type",), "option": { "series": {"bars": {"align": "center", "barWidth": 0.8,'show':True}}, "xaxis": {"aggregate": "count", "mode": "categories"}, }, }, } class HostGroupAdmin(object): list_display = ('name', 'description') list_display_links = ('name',) search_fields = ['name'] style_fields = {'hosts': 'checkbox-inline'} class MaintainLogAdmin(object): list_display = ( 'host', 'maintain_type', 'hard_type', 'time', 'operator', 'note') list_display_links = ('host',) list_filter = ['host', 'maintain_type', 'hard_type', 'time', 'operator'] search_fields = ['note'] form_layout = ( Col("col2", Fieldset('Record data', 'time', 'note', css_class='unsort short_label no_title' ), span=9, horizontal=True ), Col("col1", Fieldset('Comm data', 'host', 'maintain_type' ), Fieldset('Maintain details', 'hard_type', 'operator' ), span=3 ) ) reversion_enable = True class AccessRecordAdmin(object): def avg_count(self, instance): return int(instance.view_count / instance.user_count) avg_count.short_description = "Avg Count" avg_count.allow_tags = True avg_count.is_column = True list_display = ('date', 'user_count', 'view_count', 'avg_count') list_display_links = ('date',) list_filter = ['date', 'user_count', 'view_count'] actions = None aggregate_fields = {"user_count": "sum", 'view_count': "sum"} refresh_times = (3, 5, 10) data_charts = { "user_count": {'title': u"User Report", "x-field": "date", "y-field": ("user_count", "view_count"), "order": ('date',)}, "avg_count": {'title': u"Avg Report", "x-field": "date", "y-field": ('avg_count',), "order": ('date',)}, "per_month": {'title': u"Monthly Users", "x-field": "_chart_month", "y-field": ("user_count", ), "option": { "series": {"bars": {"align": "center", "barWidth": 0.8,'show':True}}, "xaxis": {"aggregate": "sum", "mode": "categories"}, }, }, } def _chart_month(self,obj): return obj.date.strftime("%B") #xadmin.site.register(Host, HostAdmin) #xadmin.site.register(HostGroup, HostGroupAdmin) #xadmin.site.register(MaintainLog, MaintainLogAdmin) #xadmin.site.register(IDC, IDCAdmin) #xadmin.site.register(AccessRecord, AccessRecordAdmin) ######################################## # User-defined area ######################################## def register_admin(model): def handle_func(o): xadmin.site.register(model, o) return handle_func @register_admin(Affiliation) class AffiliationAdmin(object): fields = ['personid','organizationid','startyear','title'] list_display = ['get_people_name','get_organization_name','startyear','title'] list_filter = ['title','startyear'] list_editable = ['startyear','title'] show_detail_fields = ['startyear','title'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','organizationid__name','startyear','title'] def get_people_name(self,obj): return obj.personid get_people_name.short_description = 'People Name' def get_organization_name(self,obj): return obj.organizationid get_organization_name.short_description = 'Organization Name' @register_admin(Analysis) class AnalysisAdmin(object): fields = ['personid','paperid','surveyid'] list_display = ['get_people_name','get_paper_title','get_survey_name'] list_filter = ['surveyid'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','paperid__title','surveyid__name'] def get_people_name(self,obj): return obj.personid get_people_name.short_description = 'People Name' def get_paper_title(self,obj): return obj.paperid get_paper_title.short_description = 'Paper Title' def get_survey_name(self,obj): return obj.surveyid get_survey_name.short_description = 'Survey Name' @register_admin(Answer) class AnswerAdmin(object): fields = ['surveyid','questionid','content'] list_display = ['get_survey_name','get_question_name','content'] list_filter = ['surveyid'] search_fields = ['content','surveyid__name','questionid__description'] def get_survey_name(self,obj): return obj.surveyid get_survey_name.short_description = 'Survey Name' def get_question_name(self,obj): return obj.questionid get_question_name.short_description = 'Question' @register_admin(Authorship) class AuthorshipAdmin(object): fields = ['personid','paperid','authororder'] list_display = ['get_people_name','get_paper_title','authororder'] list_filter = ['authororder'] search_fields = ['authororder','personid__firstname','personid__middlename','personid__lastname','paperid__title'] def get_people_name(self,obj): return obj.personid get_people_name.short_description = 'People Name' def get_paper_title(self,obj): return obj.paperid get_paper_title.short_description = 'Paper Title' @register_admin(BelongTo) class BelongToAdmin(object): fields = ['paperid','indexid'] list_display = ['get_paper_title','indexid'] list_filter = ['indexid'] search_fields = ['paperid__title','indexid__name'] def get_paper_title(self,obj): return obj.paperid get_paper_title.short_description = 'Paper Title' @register_admin(Cite) class CiteAdmin(object): fields = ['citingpaperid','citedpaperid'] list_display = ['get_citing_paper','get_cited_paper'] #list_filter = ['citedpaperid'] search_fields = ['citedpaperid__title','citingpaperid__title'] def get_citing_paper(self,obj): return obj.citingpaperid get_citing_paper.short_description = 'Citing Paper' def get_cited_paper(self,obj): return obj.citedpaperid get_cited_paper.short_description = 'Cited Paper' @register_admin(Committee) class CommitteeAdmin(object): fields = ['personid','conferenceid','position'] list_display = ['get_people_name','get_conference_name','position'] list_filter = ['position','conferenceid'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','position','conferenceid__confid__name'] def get_people_name(self,obj): return obj.personid get_people_name.short_description = 'People Name' def get_conference_name(self,obj): return obj.conferenceid get_conference_name.short_description = 'Conference Name' @register_admin(ConferenceInstances) class ConferenceInstancesAdmin(object): fields = ['confid','startdate','theme','city','country'] list_display = ['get_conference_name','startdate','theme','city','country'] list_filter = ['startdate','theme','city','country'] search_fields = ['confid__name','startdate','theme','city','country'] def get_conference_name(self,obj): return obj.confid get_conference_name.short_description = 'Conference Name' @register_admin(ConferencePapers) class ConferencePapersAdmin(object): #fields = ['paperid','track','conferenceid'] list_display = ['paperid','track','conferenceid'] list_filter = ['conferenceid'] search_fields = ['paperid__title','track','conferenceid__confid__name'] @register_admin(Conferences) class ConferencesAdmin(object): list_display = ['name','description'] list_filter = ['name'] search_fields = ['name','descripton'] @register_admin(Editor) class EditorAdmin(object): list_display = ['personid','jourid','boardposition'] list_filter = ['jourid','boardposition'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','boardposition','jourid__name'] @register_admin(Employment) class EmploymentAdmin(object): list_display = ['personid','jourid','position'] list_filter = ['jourid','position'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','position','jourid__name'] @register_admin(ExperimentSummaries) class ExperimentSummariesAdmin(object): list_display = ['name','description'] list_filter = [] search_fields = ['name','description'] @register_admin(ExpertAt) class ExpertAtAdmin(object): list_display = ['personid','expertiseid'] list_filter = ['expertiseid'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','expertiseid__name'] @register_admin(Expertise) class ExpertiseAdmin(object): list_display = ['name','description'] list_filter = [] search_fields = ['name','description'] @register_admin(Funding) class FundingAdmin(object): list_display = ['paperid','organizationid','fundingname','fundingamount','startdate'] list_filter = ['startdate'] search_fields = ['paperid__title','organizationid__name','fundingname','fundingamount','startdate'] @register_admin(Implement) class ImplementAdmin(object): #fields = [''] list_display = ['personid','paperid','experimentid'] list_filter = [] search_fields = ['personid__firstname','personid__middlename','personid__lastname','paperid__title','experimentid__name'] @register_admin(Include) class IncludeAdmin(object): list_display = ['paperid','keywordid'] list_filter = ['keywordid'] search_fields = ['paperid__title','keywordid__name'] @register_admin(Indexes) class IndexesAdmin(object): list_display = ['name'] list_filter = ['name'] search_fields = ['name'] @register_admin(JournalIssues) class JournalIssuesAdmin(object): list_display = ['volumenumber','issuenumber','year','jourid'] list_filter = ['year','jourid'] search_fields = ['volumenumber','issuenumber','year','jourid__name'] @register_admin(JournalPapers) class JournalPapersAdmin(object): list_display = ['paperid','issueid'] list_filter = [] search_fields = ['paperid__title','issueid__jourid__name'] @register_admin(Journals) class JournalsAdmin(object): list_display = ['name','description','organizationid'] list_filter = [] search_fields = ['name','description','organizationid__name'] @register_admin(Keywords) class KeywordsAdmin(object): list_display = ['name','description'] list_filter = [] search_fields = ['name','description'] @register_admin(Methodologies) class MethodologiesAdmin(object): fields = ['name','description'] list_display = ['name','description'] list_filter = [] search_fields = ['name','description'] @register_admin(OrganizationType) class OrganizationTypeAdmin(object): list_display = ['organizationid','type'] list_filter = ['type'] search_fields = ['organizationid__name','type'] @register_admin(Organizations) class OrganizationsAdmin(object): list_display = ['name','description','city','country'] list_filter = ['city','country'] search_fields = ['name','description','city','country'] @register_admin(PaperSummaries) class PaperSummariesAdmin(object): list_display = ['personid','paperid','result','attitudetoecig'] list_filter = ['attitudetoecig'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','paper__title','result','attitudetoecig'] @register_admin(PaperTypes) class PaperTypesAdmin(object): list_display = ['name','description'] list_filter = [] search_fields = ['name','description'] @register_admin(Papers) class PapersAdmin(object): list_display = ['title','typeid','startpage','endpage'] list_filter = ['typeid'] search_fields = ['title','typeid__name','startpage','endpage'] @register_admin(People) class PeopleAdmin(object): list_display = ['firstname','middlename','lastname'] list_filter = [] search_fields = ['firstname','middlename','lastname'] list_editable = ['firstname','lastname'] show_detail_fields = ['firstname','lastname'] @register_admin(PeopleType) class PeopleTypeAdmin(object): list_display = ['personid','type'] list_filter = ['type'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','type'] @register_admin(Questions) class QuestionsAdmin(object): list_display = ['description'] list_filter = [] search_fields = ['description'] @register_admin(Speech) class SpeechAdmin(object): list_display = ['personid','conferenceid','speechid'] list_filter = ['conferenceid'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','conferenceid__confid__name','speechid'] @register_admin(Sponsor) class SponsorAdmin(object): list_display = ['conferenceid','organizationid','fundingamount'] list_filter = ['conferenceid'] search_fields = ['conferenceid__confid__name','organizationid__name','fundingamount'] @register_admin(SurveySummaries) class SurveySummariesAdmin(object): list_display = ['name','description'] list_filter = [] search_fields = ['name','description'] @register_admin(UseMethodology) class UseMethodologyAdmin(object): list_display = ['personid','paperid','methodologyid'] list_filter = ['methodologyid'] search_fields = ['personid__firstname','personid__middlename','personid__lastname','paperid__title','methodologyid__name'] ################ #view area ################ @register_admin(ViewEachInstitution) class ViewEachInstitutionAdmin(object): list_display = ['univ_name','univ_city','univ_country','paper_num','survey_num','experiment_num'] list_filter = ['univ_name','univ_city','univ_country','paper_num','survey_num','experiment_num'] search_fields = ['univ_name','univ_city','univ_country','paper_num','survey_num','experiment_num'] list_display_links = ('id') @register_admin(ViewEachAttitude) class ViewEachAttitudeAdmin(object): list_display = ['attitudetoecig','paper_num','research_num','review_num','comments_num','jour_num','conf_num'] list_filter = ['attitudetoecig','paper_num','research_num','review_num','comments_num','jour_num','conf_num'] search_fields = ['attitudetoecig','paper_num','research_num','review_num','comments_num','jour_num','conf_num'] list_display_links = ('id') @register_admin(ViewEachJournal) class ViewEachJournalAdmin(object): list_display = ['jour_name','description','paper_num','research_num','review_num','comments_num'] list_filter = ['jour_name','description','paper_num','research_num','review_num','comments_num'] search_fields = ['jour_name','description','paper_num','research_num','review_num','comments_num'] list_display_links = ('id') @register_admin(ViewEachKeyword) class ViewEachKeywordAdmin(object): list_display = ['keyword','description','paper_num','jour_num','conf_num','research_num','review_num','comments_num'] list_filter = ['keyword','description','paper_num','jour_num','conf_num','research_num','review_num','comments_num'] search_fields = ['keyword','description','paper_num','jour_num','conf_num','research_num','review_num','comments_num'] list_display_links = ('id') @register_admin(ViewEachMethodology) class ViewEachMethodologyAdmin(object): list_display = ['method_name','description','paper_num','jour_num','conf_num'] list_filter = ['method_name','description','paper_num','jour_num','conf_num'] search_fields = ['method_name','description','paper_num','jour_num','conf_num'] list_display_links = ('id') @register_admin(ViewEachPaper) class ViewEachPaperAdmin(object): list_display = ['paperid','title','paper_type','type_desc','author_name','univ_name','result','attitudetoecig'] list_filter = ['paperid','title','paper_type','type_desc','author_name','univ_name','result','attitudetoecig'] search_fields = ['paperid','title','paper_type','type_desc','author_name','univ_name','result','attitudetoecig'] list_display_links = ('id') @register_admin(ViewEachPerson) class ViewEachPersonAdmin(object): list_display = ['name','title','univ_name','paper','research_article','review_article','comments'] list_filter = ['name','title','univ_name','paper','research_article','review_article','comments'] search_fields = ['name','title','univ_name','paper','research_article','review_article','comments'] list_display_links = ('id') @register_admin(ViewEachPublisher) class ViewEachPublisherAmdin(object): list_display = ['publisher','pub_desc','city','country','research_num','review_num','comments_num'] list_filter = ['publisher','pub_desc','city','country','research_num','review_num','comments_num'] search_fields = ['publisher','pub_desc','city','country','research_num','review_num','comments_num'] list_display_links = ('id') @register_admin(ViewEachSponsor) class ViewEachSponsor(object): list_display = ['org_name','sponsor_desc','city','country','conf_name','conf_desc','funding'] list_filter = ['org_name','sponsor_desc','city','country','conf_name','conf_desc','funding'] search_fields = ['org_name','sponsor_desc','city','country','conf_name','conf_desc','funding'] list_display_links = ('id') @register_admin(ViewFrequentExpertise) class ViewFrequentExpertiseAdmin(object): list_display = ['expertise_name','researcher_num'] list_filter = ['expertise_name','researcher_num'] search_fields = ['expertise_name','researcher_num'] list_display_links = ('id') @register_admin(ViewMostProductiveEditor) class ViewMostProductiveEditorAdmin(object): list_display = ['personid','fullname','jour_name','paper_num'] list_filter = ['personid','fullname','jour_name','paper_num'] search_fields = ['personid','fullname','jour_name','paper_num'] list_display_links = ('id')
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import copy from boto3.compat import collections_abc from boto3.dynamodb.types import TypeSerializer, TypeDeserializer from boto3.dynamodb.conditions import ConditionBase from boto3.dynamodb.conditions import ConditionExpressionBuilder from boto3.docs.utils import DocumentModifiedShape def register_high_level_interface(base_classes, **kwargs): base_classes.insert(0, DynamoDBHighLevelResource) def copy_dynamodb_params(params, **kwargs): return copy.deepcopy(params) class DynamoDBHighLevelResource(object): def __init__(self, *args, **kwargs): super(DynamoDBHighLevelResource, self).__init__(*args, **kwargs) # Apply handler that creates a copy of the user provided dynamodb # item such that it can be modified. self.meta.client.meta.events.register( 'provide-client-params.dynamodb', copy_dynamodb_params, unique_id='dynamodb-create-params-copy' ) self._injector = TransformationInjector() # Apply the handler that generates condition expressions including # placeholders. self.meta.client.meta.events.register( 'before-parameter-build.dynamodb', self._injector.inject_condition_expressions, unique_id='dynamodb-condition-expression') # Apply the handler that serializes the request from python # types to dynamodb types. self.meta.client.meta.events.register( 'before-parameter-build.dynamodb', self._injector.inject_attribute_value_input, unique_id='dynamodb-attr-value-input') # Apply the handler that deserializes the response from dynamodb # types to python types. self.meta.client.meta.events.register( 'after-call.dynamodb', self._injector.inject_attribute_value_output, unique_id='dynamodb-attr-value-output') # Apply the documentation customizations to account for # the transformations. attr_value_shape_docs = DocumentModifiedShape( 'AttributeValue', new_type='valid DynamoDB type', new_description=( '- The value of the attribute. The valid value types are ' 'listed in the ' ':ref:`DynamoDB Reference Guide<ref_valid_dynamodb_types>`.' ), new_example_value=( '\'string\'|123|Binary(b\'bytes\')|True|None|set([\'string\'])' '|set([123])|set([Binary(b\'bytes\')])|[]|{}') ) key_expression_shape_docs = DocumentModifiedShape( 'KeyExpression', new_type=( 'condition from :py:class:`boto3.dynamodb.conditions.Key` ' 'method' ), new_description=( 'The condition(s) a key(s) must meet. Valid conditions are ' 'listed in the ' ':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.' ), new_example_value='Key(\'mykey\').eq(\'myvalue\')' ) con_expression_shape_docs = DocumentModifiedShape( 'ConditionExpression', new_type=( 'condition from :py:class:`boto3.dynamodb.conditions.Attr` ' 'method' ), new_description=( 'The condition(s) an attribute(s) must meet. Valid conditions ' 'are listed in the ' ':ref:`DynamoDB Reference Guide<ref_dynamodb_conditions>`.' ), new_example_value='Attr(\'myattribute\').eq(\'myvalue\')' ) self.meta.client.meta.events.register( 'docs.*.dynamodb.*.complete-section', attr_value_shape_docs.replace_documentation_for_matching_shape, unique_id='dynamodb-attr-value-docs') self.meta.client.meta.events.register( 'docs.*.dynamodb.*.complete-section', key_expression_shape_docs.replace_documentation_for_matching_shape, unique_id='dynamodb-key-expression-docs') self.meta.client.meta.events.register( 'docs.*.dynamodb.*.complete-section', con_expression_shape_docs.replace_documentation_for_matching_shape, unique_id='dynamodb-cond-expression-docs') class TransformationInjector(object): """Injects the transformations into the user provided parameters.""" def __init__(self, transformer=None, condition_builder=None, serializer=None, deserializer=None): self._transformer = transformer if transformer is None: self._transformer = ParameterTransformer() self._condition_builder = condition_builder if condition_builder is None: self._condition_builder = ConditionExpressionBuilder() self._serializer = serializer if serializer is None: self._serializer = TypeSerializer() self._deserializer = deserializer if deserializer is None: self._deserializer = TypeDeserializer() def inject_condition_expressions(self, params, model, **kwargs): """Injects the condition expression transformation into the parameters This injection includes transformations for ConditionExpression shapes and KeyExpression shapes. It also handles any placeholder names and values that are generated when transforming the condition expressions. """ self._condition_builder.reset() generated_names = {} generated_values = {} # Create and apply the Condition Expression transformation. transformation = ConditionExpressionTransformation( self._condition_builder, placeholder_names=generated_names, placeholder_values=generated_values, is_key_condition=False ) self._transformer.transform( params, model.input_shape, transformation, 'ConditionExpression') # Create and apply the Key Condition Expression transformation. transformation = ConditionExpressionTransformation( self._condition_builder, placeholder_names=generated_names, placeholder_values=generated_values, is_key_condition=True ) self._transformer.transform( params, model.input_shape, transformation, 'KeyExpression') expr_attr_names_input = 'ExpressionAttributeNames' expr_attr_values_input = 'ExpressionAttributeValues' # Now that all of the condition expression transformation are done, # update the placeholder dictionaries in the request. if expr_attr_names_input in params: params[expr_attr_names_input].update(generated_names) else: if generated_names: params[expr_attr_names_input] = generated_names if expr_attr_values_input in params: params[expr_attr_values_input].update(generated_values) else: if generated_values: params[expr_attr_values_input] = generated_values def inject_attribute_value_input(self, params, model, **kwargs): """Injects DynamoDB serialization into parameter input""" self._transformer.transform( params, model.input_shape, self._serializer.serialize, 'AttributeValue') def inject_attribute_value_output(self, parsed, model, **kwargs): """Injects DynamoDB deserialization into responses""" if model.output_shape is not None: self._transformer.transform( parsed, model.output_shape, self._deserializer.deserialize, 'AttributeValue' ) class ConditionExpressionTransformation(object): """Provides a transformation for condition expressions The ``ParameterTransformer`` class can call this class directly to transform the condition expressions in the parameters provided. """ def __init__(self, condition_builder, placeholder_names, placeholder_values, is_key_condition=False): self._condition_builder = condition_builder self._placeholder_names = placeholder_names self._placeholder_values = placeholder_values self._is_key_condition = is_key_condition def __call__(self, value): if isinstance(value, ConditionBase): # Create a conditional expression string with placeholders # for the provided condition. built_expression = self._condition_builder.build_expression( value, is_key_condition=self._is_key_condition) self._placeholder_names.update( built_expression.attribute_name_placeholders) self._placeholder_values.update( built_expression.attribute_value_placeholders) return built_expression.condition_expression # Use the user provided value if it is not a ConditonBase object. return value class ParameterTransformer(object): """Transforms the input to and output from botocore based on shape""" def transform(self, params, model, transformation, target_shape): """Transforms the dynamodb input to or output from botocore It applies a specified transformation whenever a specific shape name is encountered while traversing the parameters in the dictionary. :param params: The parameters structure to transform. :param model: The operation model. :param transformation: The function to apply the parameter :param target_shape: The name of the shape to apply the transformation to """ self._transform_parameters( model, params, transformation, target_shape) def _transform_parameters(self, model, params, transformation, target_shape): type_name = model.type_name if type_name in ['structure', 'map', 'list']: getattr(self, '_transform_%s' % type_name)( model, params, transformation, target_shape) def _transform_structure(self, model, params, transformation, target_shape): if not isinstance(params, collections_abc.Mapping): return for param in params: if param in model.members: member_model = model.members[param] member_shape = member_model.name if member_shape == target_shape: params[param] = transformation(params[param]) else: self._transform_parameters( member_model, params[param], transformation, target_shape) def _transform_map(self, model, params, transformation, target_shape): if not isinstance(params, collections_abc.Mapping): return value_model = model.value value_shape = value_model.name for key, value in params.items(): if value_shape == target_shape: params[key] = transformation(value) else: self._transform_parameters( value_model, params[key], transformation, target_shape) def _transform_list(self, model, params, transformation, target_shape): if not isinstance(params, collections_abc.MutableSequence): return member_model = model.member member_shape = member_model.name for i, item in enumerate(params): if member_shape == target_shape: params[i] = transformation(item) else: self._transform_parameters( member_model, params[i], transformation, target_shape)
import sys import os import argparse import json from pytz import timezone, utc from datetime import datetime from dateutil import parser, relativedelta from simple_salesforce import Salesforce, SalesforceLogin, SFType # define arguments # will read default from ~/.hosd.yml argp = argparse.ArgumentParser(description='Clone Occurrence.') argp.add_argument('config', nargs='+', help='json file that contain occurrences config') argp.add_argument('--username', help='username USERNAME') argp.add_argument('--password', help='password PASSWORD') argp.add_argument('--token', help='token SECURITY_TOKEN') argp.add_argument('--occurrence', help='occurrence OCCURRENCE_NAME') argp.add_argument('--date', help='date OCCURRENCE_DATE(yyyy-mm-dd)') argp.add_argument('--timezone', help='timezone TIMEZONE(eg.US/Pacific)') argp.add_argument('--dry', help='dry run', action='store_true') argp.add_argument('--debug', help='debug', action='store_true') #sf = Salesforce(username='myemail@example.com', password='password', security_token='token') def clone_occurrence(sf, oc_name, new_date, tz, dry_run=False, debug=False): # query for occurrence_name qres = sf.query("SELECT Id FROM HOC__Occurrence__c where Name = '%s'" % (oc_name)) if ('records' not in qres) or (len(qres['records']) < 1) or ('Id' not in qres['records'][0]): print "Occurence %s not found !" % (oc_name) return -1 # create Occurrence data type Occurrence = SFType('HOC__Occurrence__c', sf.session_id, sf.sf_instance) oc = Occurrence.get(qres['records'][0]['Id']) if not oc: print "Failed to retrieve Occurrence %s" % (qres['records'][0]['Id']) return # get Opportunity VolunteerOpportunity = SFType('HOC__Volunteer_Opportunity__c', sf.session_id, sf.sf_instance) op = VolunteerOpportunity.get(oc['HOC__Volunteer_Opportunity__c']) if not op: print "Failed to retrieve Volunteer Opportunity%s" % (oc['HOC__Volunteer_Opportunity__c']) return # do date calculation old_start_datetime = parser.parse(oc['HOC__Start_Date_Time__c']) old_end_datetime = parser.parse(oc['HOC__End_Date_Time__c']) # need to make sure we calculate delta date in the right timezone, # otherwise it can mess up the calculation delta = new_date - old_start_datetime.astimezone(tz).date() # this weird formula is to add delta while maintaining the correct timezone # first add delta, then remove timezone (to maintain the same hour) # then add back the timezone, so we can calculate utc timezone correctly afterward new_start_datetime = tz.localize((old_start_datetime.astimezone(tz) + delta).replace(tzinfo=None)).astimezone(utc) new_start_datetime_str = new_start_datetime.isoformat() new_end_datetime = tz.localize((old_end_datetime.astimezone(tz) + delta).replace(tzinfo=None)).astimezone(utc) new_end_datetime_str = new_end_datetime.isoformat() new_start_tz = new_start_datetime.astimezone(tz) print "=========================================" print "Occurrence Id: " + oc_name print "Project Name: " + op['Name'] print "Volunteer Coordinator Name: " + oc['HOC__Volunteer_Coordinator_Name__c'] print "Volunteer Coordinator Email: " + oc['HOC__Volunteer_Coordinator_Email__c'] print "Days Time Needed: " + oc['HOC__Days_Times_Needed__c'] print "Clone to date (UTC): " + str(new_start_datetime) if debug: print "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" print "Original Data" print "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" print json.dumps(oc, sort_keys=True, indent=4, separators=(',',':')) print "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # process properties new_oc = {} # we will go through the oc, and calling it one by one, for every key in the oc we will # check the modifier. # If the value exists: # accept lambda to do some processing to it, will be passed the oc and new_oc as parameter # lambda (key, old_oc, new_oc) # otherwise if it is None, it will be removed # if the value doesn't exist, it will be copied as is oc_modifier = { u'attributes': None, u'Id': None, # u'OwnerId': None, u'IsDeleted': None, u'Name': None, u'CreatedDate': None, u'CreatedById': None, u'LastModifiedDate': None, u'LastModifiedById': None, u'SystemModstamp': None, u'LastActivityDate': None, u'ConnectionReceivedId': None, u'ConnectionSentId': None, u'HOC__City__c': None, u'HOC__Country__c': None, #u'HOC__Days_Times_Needed__c': None, u'HOC__End_Date_Time__c': lambda k,ooc,noc:new_end_datetime_str, u'HOC__Google_Map_URL__c': None, u'HOC__HOC_Domain_Name__c': None, u'HOC__HOC_ID__c': None, u'HOC__Import_ID__c': None, #u'HOC__Location__c': None, u'HOC__Managing_Organization_Name__c': None, #u'HOC__Maximum_Attendance__c': None, #u'HOC__Minimum_Attendance__c': None, u'HOC__Occurrence_URL__c': None, #u'HOC__Opportunity_Approval_Manager_Email__c': None, #u'HOC__Partner_Staff_Email__c': None, u'HOC__Posting_Status__c': None, #u'HOC__Recurrence__c': None, u'HOC__Registration_Deadline__c': None, u'HOC__Registration_Start_Date__c': None, u'HOC__Schedule_Type__c': None, u'HOC__Serial_Number__c': None, u'HOC__Start_Date_Time__c': lambda k,ooc,noc:new_start_datetime_str, u'HOC__State_Province__c': None, #u'HOC__Status__c': None, u'HOC__Street__c': None, u'HOC__Total_Attended__c': None, u'HOC__Total_Hours_Served__c': None, #u'HOC__Volunteer_Coordinator_Email__c': None, #u'HOC__Volunteer_Coordinator_Name__c': None, #u'HOC__Volunteer_Leader_Needed__c': None, u'HOC__Volunteer_Opportunity_Type__c': None, #u'HOC__Volunteer_Opportunity__c': None, u'HOC__Volunteers_Still_Needed__c': None, u'HOC__Zip_Postal_Code__c': None, u'HOC__Guest_Volunteer_Hours_Served__c': None, u'HOC__Guest_Volunteers_Attended__c': None, u'HOC__Total_Confirmed__c': None, u'HOC__Total_Connections__c': None, u'HOC__Total_Declined__c': None, u'HOC__Total_Not_Attended__c': None, u'HOC__Total_Pending__c': None, u'HOC__Total_Unreported__c': None, u'HOC__Volunteer_Hours_Served__c': None, u'HOC__Volunteers_Attended__c': None, u'HOC__Guest_Volunteer_Number_Hours_Served__c': None, #u'HOC__Opportunity_Coordinator__c': None, u'HOC__Total_Number_Hours_Served__c': None, u'HOC__Update_Connections_Status__c': None, u'HOC__Volunteer_Number_Hours_Served__c': None, u'HOC__CreationSource__c': None, u'HOC__Number_of_Occurrences__c': None, u'HOC__HOC_Backend_Domain_Name__c': None, u'HOC__LastModifiedByV2__c': None, u'HOC__OwnerIdV2__c': None, u'HOC__Grouped_Occurrences__c': None, #u'HOC__Include_Pending_for_Max_Attendance__c': None, u'HOC__Locations_Details_Page__c': None, #u'HOC__Maximum_Waitlist__c': None, #u'HOC__Turn_off_teams__c': None, #u'HOC__Turn_off_waitlist__c' # IMPACT "Additional_Impact__c": None, "Animals_Served_Cared_For__c": None, "ConnectionReceivedId": None, "ConnectionSentId": None, "Craft_Items_Created_Constructed__c": None, "Facilities_Maintained_Revitalized__c": None, "For_Follow_Up__c": None, "Gardens_Maintained_Created__c": None, "Individuals_Received_Donations__c": None, "Individuals_Served_Engaged__c": None, "Mi_Trail_Beach_Park_Maintained_Created__c": None, "Potential_Volunteer_Leaders__c": None, "Pounds_of_Trash_Debris_Collected__c": None, "Share_a_Story__c": None, } for k in oc.keys(): if k in oc_modifier: if oc_modifier[k] is None: # skip the data pass else: # assume this is lambda new_oc[k] = oc_modifier[k](k, oc, new_oc) else: new_oc[k] = oc[k] if debug: print "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" print "Modified Data" print "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" print json.dumps(new_oc, sort_keys=True, indent=4, separators=(',',':')) print "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # double check the time, there should be no two occurence within the same date - to make this Idempotent check = sf.query("SELECT Id FROM HOC__Occurrence__c where HOC__Volunteer_Opportunity__c = '%s' and HOC__Start_Date_Time__c = %s" % ( oc['HOC__Volunteer_Opportunity__c'], new_start_datetime_str)) if check['totalSize'] > 0: print "Skipping - duplicate record found for %s, "%(new_start_tz.strftime('%A')) + str(new_start_tz) print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" return else: print "Clone to date: %s, "%(new_start_tz.strftime('%A')) + str(new_start_tz) print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" if dry_run: print("DRY RUN ..") print "=========================================" else: print("CREATING OCCURRENCE ..") result = Occurrence.create(new_oc) print result print "=========================================" return 0 def main(argv=None): if argv is None: argv = sys.argv[1:] config = {} args = argp.parse_args(argv) try: for cfg in args.config: with open(cfg) as datafile: config.update(json.load(datafile)) except IOError as e: pass config['schedule']=config.get('schedule',[]) # ensure schedule exists username = args.username or config.get('username', 'UNKNOWN') password = args.password or config.get('password', 'UNKNOWN') token = args.token or config.get('token', 'UNKNOWN') mytz = timezone(args.timezone or config.get('timezone', 'US/Pacific')) dry_run = args.dry debug = args.debug if args.occurrence is not None and args.date is not None: config['schedule'].append({ 'occurence':args.occurence, 'date':datetime.strptime(args.date, '%Y-%m-%d') }) if len(config['schedule'])==0: print 'No occurence scheduled ..' return try: print 'Logging in as %s'%(username) session_id, instance = SalesforceLogin(username=username, password=password, security_token=token) except Exception, e: print 'Failed to login : %s' % (str(e)) return 1 sf = Salesforce(instance=instance, session_id=session_id) for sched in config['schedule']: new_date = datetime.strptime(str(sched['date']), '%Y-%m-%d').date() clone_occurrence(sf, sched['occurence'], new_date, mytz, dry_run, debug) return 0 if __name__ == "__main__": sys.exit(main())
#!/usr/bin/env python # Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Titan version control system, including atomic commits of groups of files. Documentation: http://code.google.com/p/titan-files/wiki/VersionsService """ import logging import re from google.appengine.ext import ndb from titan.common import strong_counters from titan.common import utils from titan.files import files CHANGESET_NEW = 'new' CHANGESET_PRE_SUBMIT = 'pre-submit' CHANGESET_SUBMITTED = 'submitted' CHANGESET_DELETED = 'deleted' CHANGESET_DELETED_BY_SUBMIT = 'deleted-by-submit' FILE_CREATED = 'created' FILE_EDITED = 'edited' FILE_DELETED = 'deleted' VERSIONS_PATH_BASE_REGEX = re.compile('^/_titan/ver/([0-9]+)') # For formating "/_titan/ver/123/some/file/path" VERSIONS_PATH_FORMAT = '/_titan/ver/%d%s' _CHANGESET_COUNTER_NAME = 'num_changesets' class Error(Exception): pass class ChangesetError(Error): pass class InvalidChangesetError(ChangesetError): pass class FileVersionError(Error): pass class CommitError(Error): pass class FileVersioningMixin(files.File): """Mixin to provide versioned file handling. If created without an associated changeset, this object will dynamically determine the real file location from it's latest commited changeset. """ @utils.ComposeMethodKwargs def __init__(self, **kwargs): # If given, this File represents the file at the given changeset. # If not, this File represents the lastest committed file version, # but it cannot be written or changed (since that must happen with an # associated changeset). self.changeset = kwargs.pop('changeset', None) self._disable_root_copy = kwargs.pop('_disable_root_copy', False) super(FileVersioningMixin, self).__init__(**kwargs) # Support initing with a /_titan/ver path instead of a changeset number. versioned_path_match = VERSIONS_PATH_BASE_REGEX.match(kwargs['path']) if versioned_path_match: self.changeset = int(versioned_path_match.group(1)) # Replace the path argument. kwargs['path'] = VERSIONS_PATH_BASE_REGEX.sub('', kwargs['path']) self._path = kwargs['path'] self._real_path = None if self.changeset and isinstance(self.changeset, int): # Support integer changeset argument. self.changeset = Changeset(self.changeset) def __repr__(self): return '<File %s (cs:%r)>' % (self._path, getattr(self, 'changeset', None)) @property def _file(self): """Handle dynamic determination of correct file entity.""" if not self.changeset: # No associated changeset. Dynamically pick the file entity based on # the latest FilePointers. root_file_pointer = _FilePointer.GetRootKey() file_pointer = _FilePointer.get_by_id(self.path, parent=root_file_pointer) if file_pointer: # Associate to the committed changeset. self.changeset = Changeset(file_pointer.changeset_num) else: raise files.BadFileError('File does not exist: %s' % self._path) # A changeset exists, so real_path will resolve correctly. Fall through to # finding the file entity normally. return super(FileVersioningMixin, self)._file @property def real_path(self): """Override the storage location of the file to the versioned path.""" if not self.changeset: raise InvalidChangesetError( 'File modification requires an associated changeset.') if not self._real_path: self._real_path = _MakeVersionedPath(self._path, self.changeset) return self._real_path @property def versioned_path(self): return self.real_path @utils.ComposeMethodKwargs def Write(self, **kwargs): """Write method. See superclass docstring.""" if not self.changeset: raise InvalidChangesetError( 'File modification requires an associated changeset.') delete = kwargs.pop('delete', False) _VerifyIsNewChangeset(self.changeset) self.changeset.AssociateFile(self) # Update meta data. kwargs['meta'] = kwargs.get('meta') or {} if delete: kwargs['content'] = '' kwargs['meta']['status'] = FILE_DELETED # This will orphan blobs if a large file is uploaded many times in a # changeset without committing, but that's better than losing the data. # TODO(user): add a flag to entities signifying if they have been # copied or deleted, so that we can notice and delete orphaned blobs. kwargs['_delete_old_blob'] = False else: # The first time the versioned file is created (or un-deleted), we have # to branch all content and properties from the current root file version. if not self._disable_root_copy: _CopyFileFromRoot(self.path, self.changeset) kwargs['meta']['status'] = FILE_EDITED kwargs['_delete_old_blob'] = False return super(FileVersioningMixin, self).Write(**kwargs) @utils.ComposeMethodKwargs def Delete(self, **kwargs): if not self.changeset: raise InvalidChangesetError( 'File modification requires an associated changeset.') # A delete in the files world is a revert in the versions world. # The file should be removed entirely from the staging changeset. _VerifyIsNewChangeset(self.changeset) self.changeset.DisassociateFile(self) return super(FileVersioningMixin, self).Delete(**kwargs) # ------------------------------------------------------------------------------ class Changeset(object): """Unit of consistency over a group of files. Attributes: num: An integer of the changeset number. created: datetime.datetime object of when the changeset was created. created_by: The User object of who created this changeset. status: An integer of one of the CHANGESET_* constants. base_path: The path prefix for all files in this changeset, for example: '/_titan/ver/123' linked_changeset_base_path: Same as base_path, but for the linked changeset. exists: If the given changeset exists. """ def __init__(self, num, changeset_ent=None): self._changeset_ent = changeset_ent self._num = int(num) self._associated_files = [] self._finalized_files = False def __eq__(self, other): """Compare equality of two Changeset objects.""" return isinstance(other, Changeset) and self.num == other.num def __repr__(self): return '<Changeset %d evaluated: %s>' % (self._num, bool(self._changeset_ent)) @property def changeset_ent(self): """Lazy-load the _Changeset entity.""" if not self._changeset_ent: self._changeset_ent = _Changeset.get_by_id( str(self._num), parent=_Changeset.GetRootKey()) if not self._changeset_ent: raise ChangesetError('Changeset %s does not exist.' % self._num) return self._changeset_ent @property def num(self): return self._num @property def created(self): return self.changeset_ent.created @property def status(self): return self.changeset_ent.status @property def base_path(self): return VERSIONS_PATH_FORMAT % (self.num, '') @property def linked_changeset_base_path(self): if self.linked_changeset: return VERSIONS_PATH_FORMAT % (self.linked_changeset_num, '') @property def linked_changeset(self): if self.linked_changeset_num: return Changeset(num=self.linked_changeset_num) @property def linked_changeset_num(self): if self.status not in (CHANGESET_NEW, CHANGESET_DELETED): return int(self.changeset_ent.linked_changeset.id()) @property def created_by(self): return self.changeset_ent.created_by @property def exists(self): try: return bool(self.changeset_ent) except ChangesetError: return False def GetFiles(self): """Get all files associated with this changeset. Guarantees strong consistency, but requires that associated file paths have been finalized on this specific Changeset instance. Raises: ChangesetError: If associated file paths have not been finalized. Returns: A populated files.Files object. """ if not self._finalized_files: raise ChangesetError( 'Cannot guarantee strong consistency when associated file paths ' 'have not been finalized. Perhaps you want ListFiles?') return files.Files(files=self._associated_files) def ListFiles(self): """Queries and returns a Files object containing this changeset's files. This method is always eventually consistent and may not contain recently changed files. Returns: A populated files.Files object. """ changeset = self if changeset.status == CHANGESET_SUBMITTED: # The files stored for submitted changesets are actually stored under the # the staging changeset's number, since they are never moved. changeset = changeset.linked_changeset versioned_files = files.Files.List(changeset.base_path, recursive=True) versioned_files.Load() # Recreate a Files object to get rid of versioned paths in the keys: return files.Files(files=versioned_files.values()) def Serialize(self): """Serializes changeset data into simple types.""" data = { 'num': self.num, 'created': self.created, 'status': self.status, 'base_path': self.base_path, 'linked_changeset_base_path': self.linked_changeset_base_path, 'linked_changeset_num': self.linked_changeset_num, 'created_by': str(self.created_by) if self.created_by else None, } return data def AssociateFile(self, titan_file): """Associate a file temporally to this changeset object before commit. Args: titan_file: File object. """ self._associated_files.append(titan_file) self._finalized_files = False def DisassociateFile(self, titan_file): """Disassociate a file from this changeset object before commit. Args: titan_file: File object. """ self._associated_files.remove(titan_file) self._finalized_files = False def FinalizeAssociatedFiles(self): """Indicate that this specific Changeset object was used for all operations. This flag is used during commit to indicate if this object can be trusted for strong consistency guarantees of which files paths will be committed. Only call this method if you are sure that this same Changeset instance was passed in for all file operations associated with this changeset. Raises: ChangesetError: if no files have been associated. """ if not self._associated_files: raise ChangesetError('Cannot finalize: no associated file objects.') self._finalized_files = True class _Changeset(ndb.Model): """Model representing a changeset. Attributes: num: Integer of the entity's key.id(). created: datetime.datetime object of when this entity was created. status: A string status of the changeset. linked_changeset: A reference between staging and finalized changesets. created_by: A users.User object of the user who created the changeset. """ num = ndb.IntegerProperty(required=True) created = ndb.DateTimeProperty(auto_now_add=True) status = ndb.StringProperty(choices=[CHANGESET_NEW, CHANGESET_PRE_SUBMIT, CHANGESET_SUBMITTED, CHANGESET_DELETED, CHANGESET_DELETED_BY_SUBMIT]) linked_changeset = ndb.KeyProperty(kind='_Changeset') created_by = ndb.UserProperty(auto_current_user_add=True) def __repr__(self): return '<_Changeset %d status:%s>' % (self.num, self.status) @staticmethod def GetRootKey(): """Get the root key, the parent of all changeset entities.""" # All changesets are in the same entity group by being children of the # arbitrary, non-existent "0" changeset. return ndb.Key('_Changeset', '0') class FileVersion(object): """Metadata about a committed file version. NOTE: Always trust FileVersions as the canonical source of a file's revision history metadata. Don't use the 'status' meta property or other properties of File objects as authoritative. Attributes: path: The committed file path. Example: /foo.html versioned_path: The path of the versioned file. Ex: /_titan/ver/123/foo.html changeset: A final Changeset object. created: datetime.datetime object of when the file version was created. status: The edit type of the affected file. """ def __init__(self, path, changeset, file_version_ent=None): self._path = path self._file_version_ent = file_version_ent self._changeset = changeset if isinstance(changeset, int): self._changeset = Changeset(changeset) @property def _file_version(self): """Lazy-load the _FileVersion entity.""" if not self._file_version_ent: file_version_id = _FileVersion.MakeKeyName(self._changeset, self._path) self._file_version_ent = _FileVersion.get_by_id( file_version_id, parent=self._changeset.changeset_ent.key) if not self._file_version_ent: raise FileVersionError('No file version of %s at %s.' % (self._path, self._changeset.num)) return self._file_version_ent def __repr__(self): return ('<FileVersion path: %s versioned_path: %s created: %s ' 'status: %s>' % (self.path, self.versioned_path, self.created, self.status)) @property def path(self): return self._path @property def versioned_path(self): return VERSIONS_PATH_FORMAT % (self._changeset.linked_changeset_num, self._path) @property def changeset(self): return self._changeset @property def changeset_created_by(self): return self._file_version.changeset_created_by @property def created(self): return self._file_version.created @property def status(self): return self._file_version.status def Serialize(self): """Serializes a FileVersion into native types.""" created_by = self.changeset_created_by result = { 'path': self.path, 'versioned_path': self.versioned_path, 'created': self.created, 'status': self.status, 'changeset_num': self._changeset.num, 'changeset_created_by': str(created_by) if created_by else None, 'linked_changeset_num': self.changeset.linked_changeset_num, } return result class _FileVersion(ndb.Model): """Model representing metadata about a committed file version. A _FileVersion entity will only exist for committed file changes. Attributes: key.id(): '<changeset num>:<path>', such as '123:/foo.html'. path: The Titan File path. changeset_num: The changeset number in which the file was changed. changeset_created_by: A users.User object of who created the changeset. created: datetime.datetime object of when the entity was created. status: The edit type of the file at this version. """ # NOTE: This model should be kept as lightweight as possible. Anything # else added here increases the amount of time that Commit() will take, # and decreases the number of files that can be committed at once. path = ndb.StringProperty() changeset_num = ndb.IntegerProperty() changeset_created_by = ndb.UserProperty() created = ndb.DateTimeProperty(auto_now_add=True) status = ndb.StringProperty(required=True, choices=[FILE_CREATED, FILE_EDITED, FILE_DELETED]) def __repr__(self): return ('<_FileVersion id:%s path:%s changeset_num:%s created:%s ' 'status:%s>' % (self.key.id(), self.path, self.changeset_num, self.created, self.status)) @staticmethod def MakeKeyName(changeset, path): return ':'.join([str(changeset.num), path]) class _FilePointer(ndb.Model): """Pointer from a root file path to its current file version. All _FilePointers are in the same entity group. As such, the entities are updated atomically to point a set of files at new versions. Attributes: key.id(): Root file path string. Example: '/foo.html' changeset_num: An integer pointing to the file's latest committed changeset. versioned_path: Versioned file path. Example: '/_titan/ver/1/foo.html' """ # NOTE: This model should be kept as lightweight as possible. Anything # else added here increases the amount of time that Commit() will take, # and decreases the number of files that can be committed at once. changeset_num = ndb.IntegerProperty() def __repr__(self): return '<_FilePointer %s Current changeset: %s>' % (self.key.id(), self.changeset_num) @property def versioned_path(self): return VERSIONS_PATH_FORMAT % (self.changeset_num, self.key.id()) @staticmethod def GetRootKey(): # The parent of all _FilePointers is a non-existent _FilePointer arbitrarily # named '/', since no file path can be a single slash. return ndb.Key('_FilePointer', '/') class VersionControlService(object): """A service object providing version control methods.""" def NewStagingChangeset(self, created_by=None): """Create a new staging changeset with a unique number ID. Args: created_by: A users.User object, will default to the current user. Returns: A Changeset. """ return self._NewChangeset(status=CHANGESET_NEW, created_by=created_by) def _NewChangeset(self, status, created_by): """Create a changeset with the given status.""" new_changeset_num = strong_counters.Increment(_CHANGESET_COUNTER_NAME) changeset_ent = _Changeset( # NDB can support integer keys, but this needs to be a string for # support of legacy IDs created when using db. id=str(new_changeset_num), num=new_changeset_num, status=status, parent=_Changeset.GetRootKey()) if created_by: changeset_ent.created_by = created_by changeset_ent.put() return Changeset(num=new_changeset_num, changeset_ent=changeset_ent) def GetLastSubmittedChangeset(self): """Returns a Changeset object of the last submitted changeset.""" changeset_root_key = _Changeset.GetRootKey() # Use an ancestor query to maintain strong consistency. changeset_query = _Changeset.query(ancestor=changeset_root_key) changeset_query = changeset_query.filter( _Changeset.status == CHANGESET_SUBMITTED) changeset_query = changeset_query.order(-_Changeset.num) latest_changeset = list(changeset_query.fetch(1)) if not latest_changeset: raise ChangesetError('No changesets have been submitted') return Changeset(num=latest_changeset[0].num) def GetFileVersions(self, path, limit=1000): """Get FileVersion objects of the revisions of this file path. Args: path: An absolute file path. limit: The limit to the number of objects returned. Returns: A list of FileVersion objects, ordered from latest to earliest. """ file_version_ents = _FileVersion.query() file_version_ents = file_version_ents.filter(_FileVersion.path == path) # Order in descending chronological order, which will also happen to # order by changeset_num. file_version_ents = file_version_ents.order(-_FileVersion.created) # Encapsulate all the _FileVersion objects in public FileVersion objects. file_versions = [] for file_version_ent in file_version_ents.fetch(limit=limit): file_versions.append( FileVersion(path=file_version_ent.path, changeset=Changeset(file_version_ent.changeset_num), file_version_ent=file_version_ent)) return file_versions def Commit(self, staged_changeset, force=False): """Commit the given changeset. Args: staged_changeset: A Changeset object with a status of CHANGESET_NEW. force: Commit a changeset even if using an eventually-consistent query. This could cause files recently added to the changeset to be missed on commit. Raises: CommitError: If a changeset contains no files or it is already committed. Returns: The final Changeset object. """ if staged_changeset.status != CHANGESET_NEW: raise CommitError('Cannot commit changeset with status "%s".' % staged_changeset.status) try: staged_files = staged_changeset.GetFiles() except ChangesetError: if not force: raise # Got force=True, get files with an eventually-consistent query. staged_files = staged_changeset.ListFiles() if not staged_files: raise CommitError('Changeset %d contains no file changes.' % staged_changeset.num) # Can't nest transactions, so we get a unique final changeset number here. # This has the potential to orphan a changeset number (if this submit works # but the following transaction does not). However, we don't care. final_changeset = self._NewChangeset( status=CHANGESET_PRE_SUBMIT, created_by=staged_changeset.created_by) transaction_func = ( lambda: self._Commit(staged_changeset, final_changeset, staged_files)) ndb.transaction(transaction_func, xg=True) return final_changeset @staticmethod def _Commit(staged_changeset, final_changeset, staged_files): """Commit a staged changeset.""" manifest = ['%s: %s' % (f.meta.status, f.path) for f in staged_files.values()] logging.info('Submitting changeset %d as changeset %d with %d files:\n%s', staged_changeset.num, final_changeset.num, len(staged_files), '\n'.join(manifest)) # Update status of the staging and final changesets. staged_changeset_ent = staged_changeset.changeset_ent staged_changeset_ent.status = CHANGESET_DELETED_BY_SUBMIT staged_changeset_ent.linked_changeset = final_changeset.changeset_ent.key final_changeset_ent = final_changeset.changeset_ent final_changeset_ent.status = CHANGESET_SUBMITTED final_changeset_ent.linked_changeset = staged_changeset.changeset_ent.key ndb.put_multi([ staged_changeset.changeset_ent, final_changeset.changeset_ent, ]) # Get a mapping of paths to current _FilePointers (or None). file_pointers = {} root_file_pointer = _FilePointer.GetRootKey() ordered_paths = staged_files.keys() file_pointer_keys = [ndb.Key(_FilePointer, path, parent=root_file_pointer) for path in ordered_paths] file_pointer_ents = ndb.get_multi(file_pointer_keys) for i, file_pointer_ent in enumerate(file_pointer_ents): file_pointers[ordered_paths[i]] = file_pointer_ent new_file_versions = [] updated_file_pointers = [] deleted_file_pointers = [] for path, titan_file in staged_files.iteritems(): file_pointer = file_pointers[titan_file.path] # Update "edited" status to be "created" on commit if file doesn't exist. status = titan_file.meta.status if titan_file.meta.status == FILE_EDITED and not file_pointer: status = FILE_CREATED # Create a _FileVersion entity containing revision metadata. new_file_version = _FileVersion( id=_FileVersion.MakeKeyName(final_changeset, titan_file.path), path=titan_file.path, changeset_num=final_changeset.num, changeset_created_by=final_changeset.created_by, status=status, parent=final_changeset.changeset_ent.key) new_file_versions.append(new_file_version) # Create or change the _FilePointer for this file. if not file_pointer and status != FILE_DELETED: # New file, setup the pointer. file_pointer = _FilePointer(id=titan_file.path, parent=root_file_pointer) if file_pointer: # Important: the file pointer is pointed to the staged changeset number, # since a file is not copied on commit from ver/1/file to ver/2/file. file_pointer.changeset_num = staged_changeset.num # Files versions marked as "deleted" should delete the _FilePointer. if status == FILE_DELETED: # Only delete file_pointer if it exists. if file_pointer: deleted_file_pointers.append(file_pointer) else: updated_file_pointers.append(file_pointer) # For all file changes and updated pointers, do the RPCs. if new_file_versions: ndb.put_multi(new_file_versions) if updated_file_pointers: ndb.put_multi(updated_file_pointers) if deleted_file_pointers: ndb.delete_multi([p.key for p in deleted_file_pointers]) logging.info('Submitted changeset %d as changeset %d.', staged_changeset.num, final_changeset.num) def _MakeVersionedPath(path, changeset): """Return a two-tuple of (versioned paths, is_multiple).""" # Make sure we're not accidentally using non-strings, # which could create a path like /_titan/ver/123<Some object> if not isinstance(path, basestring): raise TypeError('path argument must be a string: %r' % path) return VERSIONS_PATH_FORMAT % (changeset.num, path) def _VerifyIsNewChangeset(changeset): """If changeset is committed, don't allow files to be changed.""" if changeset.status != CHANGESET_NEW: raise ChangesetError('Cannot write files in a "%s" changeset.' % changeset.status) def _VerifyRootPaths(paths): """Make sure all given paths are not versioned paths.""" is_multiple = hasattr(paths, '__iter__') for path in paths if is_multiple else [paths]: if VERSIONS_PATH_BASE_REGEX.match(path): raise ValueError('Not a root file path: %s' % path) def _VerifyVersionedPaths(paths): """Make sure all given paths are versioned paths.""" is_multiple = hasattr(paths, '__iter__') for path in paths if is_multiple else [paths]: if not VERSIONS_PATH_BASE_REGEX.match(path): raise ValueError('Not a versioned file path: %s' % path) def _CopyFileFromRoot(root_path, changeset): """Copy a root file (if it exists) to a new versioned path. Args: root_path: An absolute filename. changeset: A Changeset object. Returns: The newly created files.File object or None (if the root path didn't exist). """ root_file = files.File(root_path) versioned_file = files.File(root_path, changeset=changeset, _disable_root_copy=True) if not root_file.exists: return # Copy the root file to the versioned path if: # 1) The root file exists. # 2) The versioned file doesn't exist or it is being un-deleted. if not versioned_file.exists or versioned_file.meta.status == FILE_DELETED: root_file.CopyTo(versioned_file) return versioned_file
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from scipy import stats import tensorflow as tf class LaplaceTest(tf.test.TestCase): def testLaplaceShape(self): with self.test_session(): loc = tf.constant([3.0] * 5) scale = tf.constant(11.0) laplace = tf.contrib.distributions.Laplace(loc=loc, scale=scale) self.assertEqual(laplace.batch_shape().eval(), (5,)) self.assertEqual(laplace.get_batch_shape(), tf.TensorShape([5])) self.assertAllEqual(laplace.event_shape().eval(), []) self.assertEqual(laplace.get_event_shape(), tf.TensorShape([])) def testLaplaceLogPDF(self): with self.test_session(): batch_size = 6 loc = tf.constant([2.0] * batch_size) scale = tf.constant([3.0] * batch_size) loc_v = 2.0 scale_v = 3.0 x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32) laplace = tf.contrib.distributions.Laplace(loc=loc, scale=scale) expected_log_pdf = stats.laplace.logpdf(x, loc_v, scale=scale_v) log_pdf = laplace.log_pdf(x) self.assertEqual(log_pdf.get_shape(), (6,)) self.assertAllClose(log_pdf.eval(), expected_log_pdf) pdf = laplace.pdf(x) self.assertEqual(pdf.get_shape(), (6,)) self.assertAllClose(pdf.eval(), np.exp(expected_log_pdf)) def testLaplaceLogPDFMultidimensional(self): with self.test_session(): batch_size = 6 loc = tf.constant([[2.0, 4.0]] * batch_size) scale = tf.constant([[3.0, 4.0]] * batch_size) loc_v = np.array([2.0, 4.0]) scale_v = np.array([3.0, 4.0]) x = np.array([[2.5, 2.5, 4.0, 0.1, 1.0, 2.0]], dtype=np.float32).T laplace = tf.contrib.distributions.Laplace(loc=loc, scale=scale) expected_log_pdf = stats.laplace.logpdf(x, loc_v, scale=scale_v) log_pdf = laplace.log_pdf(x) log_pdf_values = log_pdf.eval() self.assertEqual(log_pdf.get_shape(), (6, 2)) self.assertAllClose(log_pdf_values, expected_log_pdf) pdf = laplace.pdf(x) pdf_values = pdf.eval() self.assertEqual(pdf.get_shape(), (6, 2)) self.assertAllClose(pdf_values, np.exp(expected_log_pdf)) def testLaplaceLogPDFMultidimensionalBroadcasting(self): with self.test_session(): batch_size = 6 loc = tf.constant([[2.0, 4.0]] * batch_size) scale = tf.constant(3.0) loc_v = np.array([2.0, 4.0]) scale_v = 3.0 x = np.array([[2.5, 2.5, 4.0, 0.1, 1.0, 2.0]], dtype=np.float32).T laplace = tf.contrib.distributions.Laplace(loc=loc, scale=scale) expected_log_pdf = stats.laplace.logpdf(x, loc_v, scale=scale_v) log_pdf = laplace.log_pdf(x) log_pdf_values = log_pdf.eval() self.assertEqual(log_pdf.get_shape(), (6, 2)) self.assertAllClose(log_pdf_values, expected_log_pdf) pdf = laplace.pdf(x) pdf_values = pdf.eval() self.assertEqual(pdf.get_shape(), (6, 2)) self.assertAllClose(pdf_values, np.exp(expected_log_pdf)) def testLaplaceCDF(self): with self.test_session(): batch_size = 6 loc = tf.constant([2.0] * batch_size) scale = tf.constant([3.0] * batch_size) loc_v = 2.0 scale_v = 3.0 x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32) laplace = tf.contrib.distributions.Laplace(loc=loc, scale=scale) expected_cdf = stats.laplace.cdf(x, loc_v, scale=scale_v) cdf = laplace.cdf(x) self.assertEqual(cdf.get_shape(), (6,)) self.assertAllClose(cdf.eval(), expected_cdf) def testLaplaceMean(self): with self.test_session(): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) expected_means = stats.laplace.mean(loc_v, scale=scale_v) self.assertEqual(laplace.mean().get_shape(), (3,)) self.assertAllClose(laplace.mean().eval(), expected_means) def testLaplaceMode(self): with self.test_session(): loc_v = np.array([0.5, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) self.assertEqual(laplace.mode().get_shape(), (3,)) self.assertAllClose(laplace.mode().eval(), loc_v) def testLaplaceVariance(self): with self.test_session(): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) expected_variances = stats.laplace.var(loc_v, scale=scale_v) self.assertEqual(laplace.variance().get_shape(), (3,)) self.assertAllClose(laplace.variance().eval(), expected_variances) def testLaplaceStd(self): with self.test_session(): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) expected_std = stats.laplace.std(loc_v, scale=scale_v) self.assertEqual(laplace.std().get_shape(), (3,)) self.assertAllClose(laplace.std().eval(), expected_std) def testLaplaceEntropy(self): with self.test_session(): loc_v = np.array([1.0, 3.0, 2.5]) scale_v = np.array([1.0, 4.0, 5.0]) expected_entropy = stats.laplace.entropy(loc_v, scale=scale_v) laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) self.assertEqual(laplace.entropy().get_shape(), (3,)) self.assertAllClose(laplace.entropy().eval(), expected_entropy) def testLaplaceSample(self): with tf.Session(): loc_v = 4.0 scale_v = 3.0 loc = tf.constant(loc_v) scale = tf.constant(scale_v) n = 100000 laplace = tf.contrib.distributions.Laplace(loc=loc, scale=scale) samples = laplace.sample_n(n, seed=137) sample_values = samples.eval() self.assertEqual(samples.get_shape(), (n,)) self.assertEqual(sample_values.shape, (n,)) self.assertAllClose(sample_values.mean(), stats.laplace.mean(loc_v, scale=scale_v), rtol=0.05, atol=0.) self.assertAllClose(sample_values.var(), stats.laplace.var(loc_v, scale=scale_v), rtol=0.05, atol=0.) self.assertTrue(self._kstest(loc_v, scale_v, sample_values)) def testLaplaceSampleMultiDimensional(self): with tf.Session(): loc_v = np.array([np.arange(1, 101, dtype=np.float32)]) # 1 x 100 scale_v = np.array([np.arange(1, 11, dtype=np.float32)]).T # 10 x 1 laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) n = 10000 samples = laplace.sample_n(n, seed=137) sample_values = samples.eval() self.assertEqual(samples.get_shape(), (n, 10, 100)) self.assertEqual(sample_values.shape, (n, 10, 100)) zeros = np.zeros_like(loc_v + scale_v) # 10 x 100 loc_bc = loc_v + zeros scale_bc = scale_v + zeros self.assertAllClose( sample_values.mean(axis=0), stats.laplace.mean(loc_bc, scale=scale_bc), rtol=0.35, atol=0.) self.assertAllClose( sample_values.var(axis=0), stats.laplace.var(loc_bc, scale=scale_bc), rtol=0.10, atol=0.) fails = 0 trials = 0 for ai, a in enumerate(np.reshape(loc_v, [-1])): for bi, b in enumerate(np.reshape(scale_v, [-1])): s = sample_values[:, bi, ai] trials += 1 fails += 0 if self._kstest(a, b, s) else 1 self.assertLess(fails, trials * 0.03) def _kstest(self, loc, scale, samples): # Uses the Kolmogorov-Smirnov test for goodness of fit. ks, _ = stats.kstest(samples, stats.laplace(loc, scale=scale).cdf) # Return True when the test passes. return ks < 0.02 def testLaplacePdfOfSampleMultiDims(self): with tf.Session() as sess: laplace = tf.contrib.distributions.Laplace( loc=[7., 11.], scale=[[5.], [6.]]) num = 50000 samples = laplace.sample_n(num, seed=137) pdfs = laplace.pdf(samples) sample_vals, pdf_vals = sess.run([samples, pdfs]) self.assertEqual(samples.get_shape(), (num, 2, 2)) self.assertEqual(pdfs.get_shape(), (num, 2, 2)) self.assertAllClose( stats.laplace.mean([[7., 11.], [7., 11.]], scale=np.array([[5., 5.], [6., 6.]])), sample_vals.mean(axis=0), rtol=0.05, atol=0.) self.assertAllClose( stats.laplace.var([[7., 11.], [7., 11.]], scale=np.array([[5., 5.], [6., 6.]])), sample_vals.var(axis=0), rtol=0.05, atol=0.) self._assertIntegral(sample_vals[:, 0, 0], pdf_vals[:, 0, 0], err=0.02) self._assertIntegral(sample_vals[:, 0, 1], pdf_vals[:, 0, 1], err=0.02) self._assertIntegral(sample_vals[:, 1, 0], pdf_vals[:, 1, 0], err=0.02) self._assertIntegral(sample_vals[:, 1, 1], pdf_vals[:, 1, 1], err=0.02) def _assertIntegral(self, sample_vals, pdf_vals, err=1e-3): s_p = zip(sample_vals, pdf_vals) prev = (0, 0) total = 0 for k in sorted(s_p, key=lambda x: x[0]): pair_pdf = (k[1] + prev[1]) / 2 total += (k[0] - prev[0]) * pair_pdf prev = k self.assertNear(1., total, err=err) def testLaplaceNonPositiveInitializationParamsRaises(self): with self.test_session(): loc_v = tf.constant(0.0, name='loc') scale_v = tf.constant(-1.0, name='scale') laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) with self.assertRaisesOpError('scale'): laplace.mean().eval() loc_v = tf.constant(1.0, name='loc') scale_v = tf.constant(0.0, name='scale') laplace = tf.contrib.distributions.Laplace(loc=loc_v, scale=scale_v) with self.assertRaisesOpError('scale'): laplace.mean().eval() if __name__ == '__main__': tf.test.main()
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for various tensorflow.ops.tf.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.core.framework import node_def_pb2 from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors_impl from tensorflow.python.framework import importer from tensorflow.python.framework import sparse_tensor from tensorflow.python.ops import array_ops from tensorflow.python.ops import gradient_checker from tensorflow.python.ops import gradients_impl from tensorflow.python.platform import test # TODO(zongheng): it'd be great to factor out this function and various random # SparseTensor gen funcs. def _sparsify(x, thresh=0.5, index_dtype=np.int64): x[x < thresh] = 0 non_zero = np.where(x) x_indices = np.vstack(non_zero).astype(index_dtype).T x_values = x[non_zero] x_shape = x.shape return sparse_tensor.SparseTensor( indices=x_indices, values=x_values, dense_shape=x_shape), len(x_values) class ShapeOpsTest(test.TestCase): def _compareShape(self, x, use_gpu=False): np_ans = np.array(np.shape(x)) with self.test_session(use_gpu=use_gpu): tf_ans = array_ops.shape(x) tf_ans_64 = array_ops.shape(x, out_type=dtypes.int64) result = tf_ans.eval() result_64 = tf_ans_64.eval() self.assertAllEqual(np_ans, result) self.assertAllEqual(np_ans, result_64) self.assertShapeEqual(np_ans, tf_ans) def _compareShapeSparse(self, x_np, use_gpu=False): np_ans = np.array(np.shape(x_np)) x_tf, unused_nnz = _sparsify(x_np) with self.test_session(use_gpu=use_gpu): tf_ans = array_ops.shape(x_tf) result = tf_ans.eval() self.assertAllEqual(np_ans, result) self.assertShapeEqual(np_ans, tf_ans) def _compareShapeN(self, x, use_gpu=False): np_ans = np.array(np.shape(x)) with self.test_session(use_gpu=use_gpu) as sess: tf_ans = array_ops.shape_n([x, x, x]) tf_ans_64 = array_ops.shape_n([x, x, x], out_type=dtypes.int64) result = sess.run(tf_ans) result_64 = sess.run(tf_ans_64) for i in range(3): self.assertAllEqual(np_ans, result[i]) self.assertAllEqual(np_ans, result_64[i]) self.assertShapeEqual(np_ans, tf_ans[i]) def _compareRank(self, x, use_gpu=False): np_ans = np.asarray(np.ndim(x)) with self.test_session(use_gpu=use_gpu): tf_ans = array_ops.rank(x) result = tf_ans.eval() self.assertAllEqual(np_ans, result) self.assertShapeEqual(np_ans, tf_ans) def _compareRankSparse(self, x_np, use_gpu=False): np_ans = np.asarray(np.ndim(x_np)) x_tf, unused_nnz = _sparsify(x_np) with self.test_session(use_gpu=use_gpu): tf_ans = array_ops.rank(x_tf) result = tf_ans.eval() self.assertAllEqual(np_ans, result) self.assertShapeEqual(np_ans, tf_ans) def _compareSize(self, x, use_gpu=False): np_ans = np.asarray(np.size(x)) with self.test_session(use_gpu=use_gpu): tf_ans = array_ops.size(x) result = tf_ans.eval() tf_ans_64 = array_ops.size(x, out_type=dtypes.int64) result_64 = tf_ans_64.eval() self.assertAllEqual(np_ans, result) self.assertAllEqual(np_ans, result_64) self.assertShapeEqual(np_ans, tf_ans) def _compareSizeSparse(self, x_np, use_gpu=False): np_ans = np.asarray(np.size(x_np)) x_tf, unused_nnz = _sparsify(x_np) with self.test_session(use_gpu=use_gpu): tf_ans = array_ops.size(x_tf) result = tf_ans.eval() self.assertAllEqual(np_ans, result) self.assertShapeEqual(np_ans, tf_ans) def _testCpu(self, x): self._compareShape(x, use_gpu=False) self._compareShapeN(x, use_gpu=False) self._compareRank(x, use_gpu=False) self._compareSize(x, use_gpu=False) self._compareShapeSparse(x, use_gpu=False) self._compareRankSparse(x, use_gpu=False) self._compareSizeSparse(x, use_gpu=False) def _testGpu(self, x): self._compareShape(x, use_gpu=True) self._compareShapeN(x, use_gpu=True) self._compareRank(x, use_gpu=True) self._compareSize(x, use_gpu=True) self._compareShapeSparse(x, use_gpu=True) self._compareRankSparse(x, use_gpu=True) self._compareSizeSparse(x, use_gpu=True) def _testAll(self, x): self._testCpu(x) self._testGpu(x) def testBasic(self): self._testAll(np.random.randn(2)) self._testAll(np.random.randn(2, 3)) self._testAll(np.random.randn(2, 3, 5)) self._testAll(np.random.randn(2, 3, 5, 7)) self._testAll(np.random.randn(2, 3, 5, 7, 11)) self._testAll(np.random.randn(2, 3, 5, 7, 11, 13)) def testBool(self): self._testAll(np.random.choice((False, True), size=(2,))) self._testAll(np.random.choice((False, True), size=(2, 3))) self._testAll(np.random.choice((False, True), size=(2, 3, 5))) self._testAll(np.random.choice((False, True), size=(2, 3, 5, 7))) self._testAll(np.random.choice((False, True), size=(2, 3, 5, 7, 11))) self._testAll(np.random.choice((False, True), size=(2, 3, 5, 7, 11, 13))) # Disabled because it takes too long to run, but manually verified # as passing at time of writing. def _test64BitOutput(self): with self.test_session(): inp = array_ops.zeros([2**31]) num_elements = array_ops.size_internal( inp, optimize=False, out_type=dtypes.int64) self.assertEqual(2**31, num_elements.eval()) # Too large for tf.int32 output. with self.assertRaises(errors_impl.InvalidArgumentError): with self.test_session(): inp = array_ops.zeros([2**31]) num_elements = array_ops.size_internal( inp, optimize=False, out_type=dtypes.int32) self.assertEqual(2**31, num_elements.eval()) def _compareExpandDims(self, x, dim, use_gpu): np_ans = np.expand_dims(x, axis=dim) with self.test_session(use_gpu=use_gpu): tensor = array_ops.expand_dims(x, dim) tf_ans = tensor.eval() self.assertShapeEqual(np_ans, tensor) self.assertAllEqual(np_ans, tf_ans) def _compareExpandDimsAll(self, x, dim): self._compareExpandDims(x, dim, False) self._compareExpandDims(x, dim, True) def testExpandDims(self): self._compareExpandDimsAll(np.zeros([2]), 0) self._compareExpandDimsAll(np.zeros([2]), 1) self._compareExpandDimsAll(np.zeros([2]), -1) self._compareExpandDimsAll(np.zeros([2, 3]), 0) self._compareExpandDimsAll(np.zeros([2, 3]), 1) self._compareExpandDimsAll(np.zeros([2, 3]), 2) self._compareExpandDimsAll(np.zeros([2, 3]), -1) self._compareExpandDimsAll(np.zeros([2, 3]), -2) self._compareExpandDimsAll(np.zeros([2, 3, 5]), 0) self._compareExpandDimsAll(np.zeros([2, 3, 5]), 1) self._compareExpandDimsAll(np.zeros([2, 3, 5]), 2) self._compareExpandDimsAll(np.zeros([2, 3, 5]), 3) self._compareExpandDimsAll(np.zeros([2, 3, 5]), -1) self._compareExpandDimsAll(np.zeros([2, 3, 5]), -2) self._compareExpandDimsAll(np.zeros([2, 3, 5]), -3) self._compareExpandDimsAll(np.zeros([2, 3, 5]), -4) def testExpandDimsBool(self): choice = lambda s: np.random.choice((False, True), size=s) self._compareExpandDimsAll(choice([2]), 0) self._compareExpandDimsAll(choice([2]), 1) self._compareExpandDimsAll(choice([2]), -1) self._compareExpandDimsAll(choice([2, 3]), 0) self._compareExpandDimsAll(choice([2, 3]), 1) self._compareExpandDimsAll(choice([2, 3]), 2) self._compareExpandDimsAll(choice([2, 3]), -1) self._compareExpandDimsAll(choice([2, 3]), -2) self._compareExpandDimsAll(choice([2, 3, 5]), 0) self._compareExpandDimsAll(choice([2, 3, 5]), 1) self._compareExpandDimsAll(choice([2, 3, 5]), 2) self._compareExpandDimsAll(choice([2, 3, 5]), 3) self._compareExpandDimsAll(choice([2, 3, 5]), -1) self._compareExpandDimsAll(choice([2, 3, 5]), -2) self._compareExpandDimsAll(choice([2, 3, 5]), -3) self._compareExpandDimsAll(choice([2, 3, 5]), -4) def testExpandDimsErrors(self): with self.test_session(): self.assertRaises(ValueError, array_ops.expand_dims, np.zeros([2, 3, 5]), -5) self.assertRaises(ValueError, array_ops.expand_dims, [False, True, True], -5) self.assertRaises(ValueError, array_ops.expand_dims, np.zeros([2, 3, 5]), 4) self.assertRaises(ValueError, array_ops.expand_dims, [False, True, True], 4) def testExpandDimsGradient(self): with self.test_session(): inp = constant_op.constant( np.random.rand(4, 2).astype("f"), dtype=dtypes.float32) squeezed = array_ops.expand_dims(inp, 1) err = gradient_checker.compute_gradient_error(inp, [4, 2], squeezed, [4, 1, 2]) self.assertLess(err, 1e-3) def testExpandDimsScalar(self): with self.test_session(): inp = constant_op.constant(7) self.assertAllEqual([7], array_ops.expand_dims(inp, 0).eval()) self.assertAllEqual([7], array_ops.expand_dims(inp, -1).eval()) inp = constant_op.constant(True) self.assertAllEqual([True], array_ops.expand_dims(inp, 0).eval()) self.assertAllEqual([True], array_ops.expand_dims(inp, -1).eval()) def testExpandDimsDimType(self): for dtype in [dtypes.int32, dtypes.int64]: x = np.zeros([2]) np_ans = np.expand_dims(x, axis=0) with self.test_session(use_gpu=True): tensor = array_ops.expand_dims(x, constant_op.constant(0, dtype)) tf_ans = tensor.eval() self.assertShapeEqual(np_ans, tensor) self.assertAllEqual(np_ans, tf_ans) def _compareSqueeze(self, x, squeeze_dims, use_gpu): with self.test_session(use_gpu=use_gpu): if squeeze_dims: np_ans = np.squeeze(x, axis=tuple(squeeze_dims)) tensor = array_ops.squeeze(x, squeeze_dims) tf_ans = tensor.eval() else: np_ans = np.squeeze(x) tensor = array_ops.squeeze(x) tf_ans = tensor.eval() self.assertShapeEqual(np_ans, tensor) self.assertAllEqual(np_ans, tf_ans) def _compareSqueezeAll(self, x, squeeze_dims=None): if squeeze_dims is None: squeeze_dims = [] self._compareSqueeze(x, squeeze_dims, False) self._compareSqueeze(x, squeeze_dims, True) def testSqueeze(self): # Nothing to squeeze. self._compareSqueezeAll(np.zeros([2])) self._compareSqueezeAll(np.zeros([2, 3])) # Squeeze the middle element away. self._compareSqueezeAll(np.zeros([2, 1, 2])) # Squeeze on both ends. self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1])) def testSqueezeBool(self): choice = lambda s: np.random.choice((False, True), size=s) # Nothing to squeeze. self._compareSqueezeAll(choice([2])) self._compareSqueezeAll(choice([2, 3])) # Squeeze the middle element away. self._compareSqueezeAll(choice([2, 1, 2])) # Squeeze on both ends. self._compareSqueezeAll(choice([1, 2, 1, 3, 1])) def testSqueezeSpecificDimension(self): # Positive squeeze dim index. self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [0]) self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [2, 4]) self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [0, 4, 2]) # Negative squeeze dim index. self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [-1]) self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [-3, -5]) self._compareSqueezeAll(np.zeros([1, 2, 1, 3, 1]), [-3, -5, -1]) def testSqueezeSpecificDimensionBool(self): choice = lambda s: np.random.choice((False, True), size=s) # Positive squeeze dim index. self._compareSqueezeAll(choice([1, 2, 1, 3, 1]), [0]) self._compareSqueezeAll(choice([1, 2, 1, 3, 1]), [2, 4]) self._compareSqueezeAll(choice([1, 2, 1, 3, 1]), [0, 4, 2]) # Negative squeeze dim index. self._compareSqueezeAll(choice([1, 2, 1, 3, 1]), [-1]) self._compareSqueezeAll(choice([1, 2, 1, 3, 1]), [-3, -5]) self._compareSqueezeAll(choice([1, 2, 1, 3, 1]), [-3, -5, -1]) def testSqueezeAllOnes(self): # Numpy squeezes a 1 element tensor into a zero dimensional tensor. # Verify that we do the same. for use_gpu in [False, True]: with self.test_session(use_gpu=use_gpu): tensor = array_ops.squeeze(np.zeros([1, 1, 1]), []) self.assertEqual(np.shape(1), tensor.get_shape()) tf_ans = tensor.eval() self.assertEqual(np.shape(1), tf_ans.shape) def testSqueezeAllOnesBool(self): # Numpy squeezes a 1 element tensor into a zero dimensional tensor. # Verify that we do the same. for use_gpu in [False, True]: with self.test_session(use_gpu=use_gpu): tensor = array_ops.squeeze([[[False]]], []) self.assertEqual(np.shape(1), tensor.get_shape()) tf_ans = tensor.eval() self.assertEqual(np.shape(1), tf_ans.shape) def testSqueezeOnlyOnes(self): for use_gpu in [False, True]: with self.test_session(use_gpu=use_gpu): input_1x1x3 = np.zeros([1, 1, 3]) self._compareSqueezeAll(input_1x1x3) self._compareSqueezeAll(input_1x1x3, [0]) self._compareSqueezeAll(input_1x1x3, [1]) self.assertRaises(ValueError, array_ops.squeeze, input_1x1x3, [2]) def testSqueezeErrors(self): for use_gpu in [False, True]: with self.test_session(use_gpu=use_gpu): self.assertRaises(ValueError, array_ops.squeeze, np.zeros([1, 2, 1]), [-4]) self.assertRaises(ValueError, array_ops.squeeze, np.zeros([1, 2, 1]), [0, -4]) self.assertRaises(ValueError, array_ops.squeeze, np.zeros([1, 2, 1]), [3]) self.assertRaises(ValueError, array_ops.squeeze, np.zeros([1, 2, 1]), [2, 3]) def testSqueezeGradient(self): with self.test_session(): inp = np.random.rand(4, 2).astype("f") a = array_ops.reshape(inp, [4, 1, 2]) squeezed = array_ops.squeeze(a, []) err = gradient_checker.compute_gradient_error(a, [4, 1, 2], squeezed, [4, 2]) self.assertLess(err, 1e-3) def testSqueezeGradientWithSqueezeDims(self): with self.test_session(): inp = np.random.rand(4, 2).astype("f") a = array_ops.reshape(inp, [4, 1, 2, 1]) squeezed = array_ops.squeeze(a, [1]) err = gradient_checker.compute_gradient_error(a, [4, 1, 2, 1], squeezed, [4, 2, 1]) self.assertLess(err, 1e-3) def testSqueezeWithUnknownShape(self): with self.test_session(): a = array_ops.placeholder(dtypes.float32, shape=[2, None]) squeezed = array_ops.squeeze(a, [1]) self.assertEqual([2], squeezed.get_shape().as_list()) squeezed = array_ops.squeeze(a) self.assertEqual(None, squeezed.get_shape()) self.assertRaises(ValueError, array_ops.squeeze, a, [0]) self.assertRaises(ValueError, array_ops.squeeze, a, [100]) class TileTest(test.TestCase): def testScalar(self): for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): a = constant_op.constant(7, shape=[], dtype=dtypes.float32) tiled = array_ops.tile(a, []) result = tiled.eval() self.assertEqual(result.shape, ()) self.assertEqual([], tiled.get_shape()) self.assertEqual(7, result) def testSimple(self): # multiples could be int32 or int64 for dtype in [dtypes.int32, dtypes.int64]: with self.test_session(use_gpu=True): inp = np.random.rand(4, 1).astype(np.float32) a = constant_op.constant(inp) tiled = array_ops.tile(a, constant_op.constant([1, 4], dtype=dtype)) result = tiled.eval() self.assertEqual(result.shape, (4, 4)) self.assertEqual([4, 4], tiled.get_shape()) self.assertTrue((result == np.tile(inp, (1, 4))).all()) def testIdentityTileAndGrad(self): with self.test_session(): inp = np.random.rand(4, 1).astype(np.float32) a = constant_op.constant(inp) tiled = array_ops.tile(a, [1, 1]) result = tiled.eval() self.assertEqual(result.shape, (4, 1)) self.assertEqual([4, 1], tiled.get_shape()) self.assertTrue((result == np.tile(inp, (1, 1))).all()) def testEmpty(self): with self.test_session(): inp = np.random.rand(2, 3).astype(np.float32) a = constant_op.constant(inp) tiled = array_ops.tile(a, [5, 0]) result = tiled.eval() self.assertEqual(result.shape, (10, 0)) self.assertEqual([10, 0], tiled.get_shape()) def testUnknownInputShape(self): """Importing can call _TileShape without shape of <multiples> known.""" with self.test_session(): inp = array_ops.placeholder(dtypes.float32) # unknown shape multiples = constant_op.constant([1, 2, 3, 4], dtype=np.int32) tiled = array_ops.tile(inp, multiples) gdef = tiled.graph.as_graph_def() # Move the tile op to the start of the graph so that shapes of its inputs # are not available when the shape function runs on import. swapped = False for i, n in enumerate(gdef.node): if n.op == "Tile": # Swap tile op to be first in gdef.node assert i != 0 new_node = node_def_pb2.NodeDef() new_node.CopyFrom(gdef.node[i]) gdef.node[i].CopyFrom(gdef.node[0]) gdef.node[0].CopyFrom(new_node) swapped = True assert swapped tiled_imported, = importer.import_graph_def( gdef, return_elements=[tiled.name]) self.assertEqual(4, tiled_imported.get_shape().ndims) def testTypes(self): types_to_test = { "bool": (dtypes.bool, bool), "float32": (dtypes.float32, float), "float64": (dtypes.float64, float), "complex64": (dtypes.complex64, complex), "complex128": (dtypes.complex128, complex), "uint8": (dtypes.uint8, int), "int32": (dtypes.int32, int), "int64": (dtypes.int64, int), bytes: (dtypes.string, bytes) } for dtype_np, (dtype_tf, cast) in types_to_test.items(): with self.test_session(use_gpu=True): inp = np.random.rand(4, 1).astype(dtype_np) a = constant_op.constant( [cast(x) for x in inp.ravel(order="C")], shape=[4, 1], dtype=dtype_tf) tiled = array_ops.tile(a, [1, 4]) result = tiled.eval() self.assertEqual(result.shape, (4, 4)) self.assertEqual([4, 4], tiled.get_shape()) self.assertAllEqual(result, np.tile(inp, (1, 4))) def testInvalidDim(self): with self.test_session(): inp = np.random.rand(4, 1).astype("f") a = constant_op.constant( [float(x) for x in inp.ravel(order="C")], shape=[4, 1], dtype=dtypes.float32) # Wrong length of multiples. with self.assertRaises(ValueError): array_ops.tile(a, [1, 4, 2]) # Wrong rank for multiples. with self.assertRaises(ValueError): array_ops.tile(a, [[2, 3], [3, 4]]).eval() def _RunAndVerifyResult(self, rank, use_gpu): with self.test_session(use_gpu=use_gpu): # Random dims of given rank input_shape = np.random.randint(1, 4, size=rank) inp = np.random.rand(*input_shape).astype("f") a = constant_op.constant( [float(x) for x in inp.ravel(order="C")], shape=input_shape, dtype=dtypes.float32) multiples = np.random.randint(1, 4, size=rank).astype(np.int32) tiled = array_ops.tile(a, multiples) result = tiled.eval() self.assertTrue((np.array(multiples) * np.array(inp.shape) == np.array( result.shape)).all()) self.assertAllEqual(result, np.tile(inp, tuple(multiples))) self.assertShapeEqual(result, tiled) def testRandom(self): # test low rank, like 5 for _ in range(5): self._RunAndVerifyResult(5, use_gpu=False) for _ in range(5): self._RunAndVerifyResult(5, use_gpu=True) # test high rank, like 10 for _ in range(5): self._RunAndVerifyResult(10, use_gpu=False) for _ in range(5): self._RunAndVerifyResult(10, use_gpu=True) def testGradientSimpleReduction(self): with self.test_session(): inp = np.random.rand(4, 1).astype("f") a = constant_op.constant( [float(x) for x in inp.flatten()], shape=[4, 1], dtype=dtypes.float32) tiled = array_ops.tile(a, [1, 4]) grad_shape = [4, 4] grad_inp = np.random.rand(*grad_shape).astype("f") grad_tensor = constant_op.constant( [float(x) for x in grad_inp.flatten()], shape=grad_shape) grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0] self.assertShapeEqual(inp, grad) result = grad.eval() self.assertAllClose(np.sum(grad_inp, axis=1).reshape(4, 1), result, 1e-3) def testGradientStridedReduction(self): with self.test_session(): inp = np.random.rand(4, 2).astype("f") a = constant_op.constant( [float(x) for x in inp.flatten()], shape=[4, 2], dtype=dtypes.float32) tiled = array_ops.tile(a, [1, 2]) grad_shape = [4, 4] grad_inp = np.random.rand(*grad_shape).astype("f") grad_tensor = constant_op.constant( [float(x) for x in grad_inp.flatten()], shape=grad_shape) grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0] self.assertShapeEqual(inp, grad) result = grad.eval() expected_shape = [4, 2] expected = np.zeros(expected_shape) expected[:, 0] = grad_inp[:, 0] + grad_inp[:, 2] expected[:, 1] = grad_inp[:, 1] + grad_inp[:, 3] self.assertTrue((np.abs(expected - result) < 1e-3).all()) def testGradientSimpleReductionOnGPU(self): with self.test_session(use_gpu=True): inp = np.random.rand(4, 1).astype("f") a = constant_op.constant( [float(x) for x in inp.flatten()], shape=[4, 1], dtype=dtypes.float32) tiled = array_ops.tile(a, [1, 4]) grad_shape = [4, 4] grad_inp = np.random.rand(*grad_shape).astype("f") grad_tensor = constant_op.constant( [float(x) for x in grad_inp.flatten()], shape=grad_shape) grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0] result = grad.eval() self.assertAllClose(np.sum(grad_inp, axis=1).reshape(4, 1), result, 1e-3) def testGradientStridedReductionOnGPU(self): with self.test_session(use_gpu=True): inp = np.random.rand(4, 2).astype("f") a = constant_op.constant( [float(x) for x in inp.flatten()], shape=[4, 2], dtype=dtypes.float32) tiled = array_ops.tile(a, [1, 2]) grad_shape = [4, 4] grad_inp = np.random.rand(*grad_shape).astype("f") grad_tensor = constant_op.constant( [float(x) for x in grad_inp.flatten()], shape=grad_shape) grad = gradients_impl.gradients([tiled], [a], [grad_tensor])[0] result = grad.eval() expected_shape = [4, 2] expected = np.zeros(expected_shape) expected[:, 0] = grad_inp[:, 0] + grad_inp[:, 2] expected[:, 1] = grad_inp[:, 1] + grad_inp[:, 3] self.assertAllClose(expected, result, 1e-3) def _RunAndVerifyGradientResult(self, input_shape, multiples): for use_gpu in False, True: with self.test_session(use_gpu=use_gpu): # Random values inp = np.asarray(np.random.rand(*input_shape)) a = constant_op.constant(inp, dtype=dtypes.float64) tiled = array_ops.tile(a, multiples) grad_shape = list(np.array(multiples) * np.array(inp.shape)) err = gradient_checker.compute_gradient_error( a, list(input_shape), tiled, grad_shape, x_init_value=inp) print("tile(float) error = ", err) self.assertLess(err, 1e-3) def testGradientRandomScalar(self): self._RunAndVerifyGradientResult([], []) def testGradientRandom(self): self._RunAndVerifyGradientResult([2, 2, 1, 1, 3], [1, 1, 1, 1, 1]) self._RunAndVerifyGradientResult([2, 2, 1, 1, 3], [1, 2, 1, 3, 1]) self._RunAndVerifyGradientResult([2, 3, 1, 1, 3], [3, 1, 1, 2, 2]) self._RunAndVerifyGradientResult([2, 1, 3, 3, 2], [1, 3, 3, 1, 2]) def testGradientStridedReductionGC(self): with self.test_session(): inp = np.random.rand(4, 2).astype("f") a = constant_op.constant( [float(x) for x in inp.flatten()], shape=[4, 2], dtype=dtypes.float32) tiled = array_ops.tile(a, [1, 2]) err = gradient_checker.compute_gradient_error(a, [4, 2], tiled, [4, 4]) self.assertLess(err, 1e-3) def testShapeFunctionEdgeCases(self): # Unknown multiples shape. inp = constant_op.constant(0.0, shape=[4, 4, 4, 4]) tiled = array_ops.tile(inp, array_ops.placeholder(dtypes.int32)) self.assertEqual([None, None, None, None], tiled.get_shape().as_list()) # Unknown input shape. inp = array_ops.placeholder(dtypes.float32) tiled = array_ops.tile(inp, [2, 2, 2, 2]) self.assertEqual([None, None, None, None], tiled.get_shape().as_list()) # Unknown input and multiples shape. inp = array_ops.placeholder(dtypes.float32) tiled = array_ops.tile(inp, array_ops.placeholder(dtypes.int32)) self.assertIs(None, tiled.get_shape().ndims) # Known input and partially known multiples. inp = constant_op.constant(0.0, shape=[1, 1]) tiled = array_ops.tile(inp, [array_ops.placeholder(dtypes.int32), 7]) self.assertEqual([None, 7], tiled.get_shape().as_list()) # Mismatched input rank and multiples length. inp = array_ops.placeholder(dtypes.float32, shape=[None, None]) with self.assertRaises(ValueError): tiled = array_ops.tile( inp, array_ops.placeholder( dtypes.int32, shape=[3])) if __name__ == "__main__": test.main()
#! /usr/bin/env python import os import time import sys try: from Bio import SeqIO, SeqFeature except ImportError: sys.stdout.write("Python package biopython not found!\n" "You could use \"pip install biopython\" to install it.\n") sys.exit() from optparse import OptionParser from platform import system from glob import glob # Copyright(C) 2017 Jianjun Jin major_version, minor_version = sys.version_info[:2] if major_version == 2 and minor_version >= 7: python_version = "2.7+" elif major_version == 3 and minor_version >= 5: python_version = "3.5+" else: sys.stdout.write("Python version have to be 2.7+ or 3.5+") sys.exit(0) def get_options(): usage = "Usage: get_annotated_regions_from_gb.py gb_files -o out_dir" parser = OptionParser(usage=usage) parser.add_option("-o", dest="out_put", help="Output.") parser.add_option("-t", dest="gene_types", default="CDS,tRNA,rRNA", help="Annotation type taken as gene. Default: CDS,tRNA,rRNA") parser.add_option("--separate-copy", dest="one_copy", default=True, action="store_false", help="By default, only keep one copy (see '--copy-mode' for more) " "if there are several regions with the same name. " "Exception: if there are one copy with intron(s) and another copy without intron, " "they would be both kept. This exception was specially made for the convenience of " "commonly-incorrectly-annotated rps12 gene of plastome.") parser.add_option("--copy-mode", dest="copy_mode", default="leastN_longest", help="first|longest|leastN|leastN_longest (default).") parser.add_option("--separate-exon", dest="combine_exon", default=True, action="store_false", help="By default, combining exons.") parser.add_option("--keys", dest="gene_keys", default="gene,label,product,note", help="The key to the gene name: gene, label, product or other keys in the qualifiers region." "Default: %default.") parser.add_option("--mix", dest="mix", default=False, action="store_true", help="Mix different genes into a single fasta file. " "In this mode, the sequence header will be >gene_name - gb_info") parser.add_option("--case-mode", dest="case_treatment", default="first", help="first: Gene name case-non-sensitive. Consistent to the first appearance. \n" "lower: Gene name case-non-sensitive. All gene name set to lower case. \n" "upper: Gene name case-non-sensitive. All gene name set to Upper case. \n" "raw: Gene name case-sensitive. ") parser.add_option("--ignore-format-error", dest="ignore_format_error", default=False, action="store_true", help="Skip the Error: key \"*\" not found in annotation. Not suggested.") parser.add_option("--translate-to-product", dest="product_to_gene", default=True, action="store_false", help="Translate the tRNA gene name to the form of their product. Default: False") parser.add_option("--overwrite", dest="overwrite", default=False, action="store_true", help="Choose to overwrite previous result.") options, argv = parser.parse_args() if not (options.out_put and bool(len(argv))): parser.print_help() sys.exit() if system() == "Windows": new_argv = [] for input_fn_pattern in argv: new_argv.extend(glob(input_fn_pattern)) argv = new_argv if options.copy_mode not in {"longest", "first", "leastN", "leastN_longest"}: sys.stdout.write("Error: invalid value " + options.copy_mode + " for '--copy-mode'!\n") sys.exit() if options.case_treatment not in {"first", "upper", "lower", "raw"}: sys.stdout.write("Error: invalid value " + options.case_treatment + " for '--case-mode'!\n") sys.exit() return options, argv if python_version == "2.7+": # python2 import string translator = string.maketrans("ATGCRMYKHBDVatgcrmykhbdv", "TACGYKRMDVHBtacgykrmdvhb") def complementary_seq(input_seq): return string.translate(input_seq, translator)[::-1] else: # python3 translator = str.maketrans("ATGCRMYKHBDVatgcrmykhbdv", "TACGYKRMDVHBtacgykrmdvhb") def complementary_seq(input_seq): return str.translate(input_seq, translator)[::-1] def complementary_seqs(input_seq_iter): return tuple([complementary_seq(seq) for seq in input_seq_iter]) missing_base = {"N", "?", "n"} head_ = "_+_" tail_ = "_-_" def count_n(seq): return len([base for base in seq if base in missing_base]) def parse_bio_gb_locations(location_feature): if type(location_feature) == SeqFeature.CompoundLocation: return [parse_bio_gb_locations(location)[0] for location in location_feature.parts] elif type(location_feature) == SeqFeature.FeatureLocation: return [(int(location_feature.start), int(location_feature.end), location_feature.strand)] else: raise ValueError(str(type(location_feature))) def embed_in(candidate_small, candidate_large): small_start, small_end = candidate_small large_start, large_end = candidate_large # both circular if small_start >= small_end and large_start >= large_end: return small_end <= large_end elif small_start >= small_end: return False elif large_start >= large_end: return True else: return small_end <= large_end trna_translate_table = {"Ala": "A", "Arg": "R", "Asn": "N", "Asp": "D", "Cys": "C", "Gln": "Q", "Glu": "E", "Gly": "G", "His": "H", "Ile": "I", "Leu": "L", "Lys": "K", "Met": "M", "fMet": "fM", "Phe": "F", "Pro": "P", "Ser": "S", "Thr": "T", "Trp": "W", "Tyr": "Y", "Val": "V"} gene_name_lower_to_first = {} def modify_gene_name(product_name, translate_product_to_gene_name, case_mode): # treat case if case_mode == "upper": product_name = product_name.upper() elif case_mode == "lower": product_name = product_name.lower() elif case_mode == "first": if product_name.lower() in gene_name_lower_to_first: product_name = gene_name_lower_to_first[product_name.lower()] else: gene_name_lower_to_first[product_name.lower()] = product_name elif case_mode == "raw": pass # treat product name if translate_product_to_gene_name: if product_name.startswith("tRNA-") or product_name.startswith("trna-"): short_name = product_name.replace("tRNA-", "").replace("trna-", "") if short_name[:3] in trna_translate_table: return "trn" + trna_translate_table[short_name[:3]] + "-" + short_name[3:].replace("(", "").replace(")", "") elif short_name[:4] in trna_translate_table: return "trn" + trna_translate_table[short_name[:4]] + "-" + short_name[4:].replace("(", "").replace(")", "") else: return product_name elif "rrna" in product_name or "rRNA" in product_name: res_name = "rrn" + product_name.replace("rrna", "").replace("rRNA", "").replace(" ", "").replace("_", "") if "S" in res_name: if res_name[res_name.index("S") - 1].isdigit(): return res_name.replace("S", "") return res_name else: return product_name else: return product_name def get_seqs(seq_record, accepted_types, gene_keys, ignore_format_error=False, trans_product_to_gene=True, case_m="first"): original_seq = str(seq_record.seq) def get_seq_with_gb_loc(in_location): in_start, in_end, in_strand = in_location if in_start >= in_end: in_seq = original_seq[in_start:] + original_seq[:in_end] else: in_seq = original_seq[in_start: in_end] if in_strand == 1: return in_seq else: return complementary_seq(in_seq) gene_regions = [] name_counter = {} taken_loc = set() for feature in seq_record.features: if feature.type in accepted_types: this_key_found = False location_error = False for gene_key in gene_keys: if gene_key in feature.qualifiers: try: locations = parse_bio_gb_locations(feature.location) except ValueError as e: location_error = True sys.stdout.write("Warning: " + str(e) + "\n") break else: this_name = [modify_gene_name(feature.qualifiers[gene_key][0], trans_product_to_gene, case_m), "", ""] if this_name[0] not in name_counter: name_counter[this_name[0]] = 1 else: name_counter[this_name[0]] += 1 this_name[1] = "__copy" + str(name_counter[this_name[0]]) if len(locations) > 1: for i, loc in enumerate(locations): this_name[2] = "__exon" + str(i + 1) if loc not in taken_loc: gene_regions.append( [tuple(this_name)] + list(loc) + [get_seq_with_gb_loc(loc), feature.type]) taken_loc.add(loc) else: gene_regions.append( [tuple(this_name)] + list(locations[0]) + [get_seq_with_gb_loc(locations[0]), feature.type]) this_key_found = True break if not location_error and not this_key_found and not ignore_format_error: sys.stdout.write("\nError: ") sys.stdout.write("Present key(s) \"" + ",".join(gene_keys) + "\" not found in annotation:\n") sys.stdout.write(str(feature)) raise NotImplementedError gene_regions.sort(key=lambda x: (x[1], -x[2], x[0])) intergenic_regions = [] end_of_last_region = 0 if len(gene_regions) == 1: if gene_regions[0][1] == gene_regions[0][2]: pass else: anchor1 = [gene_regions[0][0][0], gene_regions[0][0][2], tail_ if gene_regions[0][3] == 1 else head_] anchor2 = [gene_regions[0][0][0], gene_regions[0][0][2], head_ if gene_regions[0][3] == 1 else tail_] this_name = sorted([tuple(anchor1), tuple(anchor2)]) + [""] if tuple(this_name[:2]) not in name_counter: name_counter[tuple(this_name[:2])] = 1 else: name_counter[tuple(this_name[:2])] += 1 this_name[2] = "__copy" + str(name_counter[tuple(this_name[:2])]) this_loc = [gene_regions[0][2], gene_regions[0][1], 1 * int(2 * ((anchor1 <= anchor2) - 0.5))] intergenic_regions.append([tuple(this_name)] + this_loc + [get_seq_with_gb_loc(this_loc), "noncoding"]) elif len(gene_regions) > 1: first_region = gene_regions[0] circular_regions = [in_region for in_region in gene_regions if in_region[1] >= in_region[2]] if circular_regions: last_region = sorted(circular_regions, key=lambda x: (-x[2], x[1], x[0]))[0] end_of_last_region = last_region[2] else: last_region = gene_regions[-1] # if both of the terminal annotations across the ends (circular), they apparently overlapped if first_region[1] >= first_region[2] and last_region[1] >= last_region[2]: pass # elif embedded elif first_region[1] >= first_region[2]: pass elif last_region[1] >= last_region[2]: if last_region[2] >= first_region[1]: pass else: anchor1 = [last_region[0][0], last_region[0][2], tail_ if last_region[3] == 1 else head_] anchor2 = [first_region[0][0], first_region[0][2], head_ if first_region[3] == 1 else tail_] this_name = sorted([tuple(anchor1), tuple(anchor2)]) + [""] if tuple(this_name[:2]) not in name_counter: name_counter[tuple(this_name[:2])] = 1 else: name_counter[tuple(this_name[:2])] += 1 this_name[2] = "__copy" + str(name_counter[tuple(this_name[:2])]) this_loc = [last_region[2], first_region[1], 1 * int(2 * ((anchor1 <= anchor2) - 0.5))] intergenic_regions.append([tuple(this_name)] + this_loc + [get_seq_with_gb_loc(this_loc), "noncoding"]) else: anchor1 = [last_region[0][0], last_region[0][2], tail_ if last_region[3] == 1 else head_] anchor2 = [first_region[0][0], first_region[0][2], head_ if first_region[3] == 1 else tail_] this_name = sorted([tuple(anchor1), tuple(anchor2)]) + [""] if tuple(this_name[:2]) not in name_counter: name_counter[tuple(this_name[:2])] = 1 else: name_counter[tuple(this_name[:2])] += 1 this_name[2] = "__copy" + str(name_counter[tuple(this_name[:2])]) this_loc = [last_region[2], first_region[1], 1 * int(2 * ((anchor1 <= anchor2) - 0.5))] intergenic_regions.append([tuple(this_name)] + this_loc + [get_seq_with_gb_loc(this_loc), "noncoding"]) go2 = 0 while go2 < len(gene_regions) - 1: go_add = 1 while go2 + go_add < len(gene_regions) and embed_in(gene_regions[go2 + go_add][1:3], gene_regions[go2][1:3]): go_add += 1 if go2 + go_add == len(gene_regions): break this_region, next_region = gene_regions[go2], gene_regions[go2 + go_add] if this_region[1] >= this_region[2] and next_region[1] >= next_region[2]: pass elif this_region[2] < next_region[1] and end_of_last_region < next_region[1]: anchor1 = [this_region[0][0], this_region[0][2], tail_ if this_region[3] == 1 else head_] anchor2 = [next_region[0][0], next_region[0][2], head_ if next_region[3] == 1 else tail_] this_loc = [this_region[2], next_region[1], 1 * int(2 * ((anchor1 <= anchor2) - 0.5))] this_name = sorted([tuple(anchor1), tuple(anchor2)]) + [""] if tuple(this_name[:2]) not in name_counter: name_counter[tuple(this_name[:2])] = 1 else: name_counter[tuple(this_name[:2])] += 1 this_name[2] = "__copy" + str(name_counter[tuple(this_name[:2])]) intergenic_regions.append([tuple(this_name)] + this_loc + [get_seq_with_gb_loc(this_loc), "noncoding"]) go2 += go_add return gene_regions, intergenic_regions def write_fasta(out_file, seq_dict, overwrite=False): names = sorted(list(seq_dict)) if not overwrite and os.path.exists(out_file): existed_f = [x for x in os.listdir(os.path.split(out_file)[0]) if x.lower() == os.path.split(out_file)[-1].lower()] sys.stdout.write("Warning: running on a case-non-sensitive disk. " "Cannot create " + out_file + " while " + existed_f[0] + " exists! " "Appending seqs to " + existed_f[0] + " ... \n") with open(out_file, "a") as out_put_handler: for name in names: out_put_handler.write(">" + name + "\n" + seq_dict[name] + "\n\n") else: with open(out_file, "w") as out_put_handler: for name in names: out_put_handler.write(">" + name + "\n" + seq_dict[name] + "\n\n") def write_statistics(out_file, base_name_list, gene_dict, intergene_dict): gene_names = sorted(list(gene_dict)) str_gene_names = ["".join(n).replace(" ", "_") for n in gene_names] inter_names = sorted(list(intergene_dict)) str_inter_names = ["--".join(["".join(x) for x in n[:2]]).replace(" ", "_") + n[2] for n in inter_names] with open(out_file, "w") as out_put_handler: out_put_handler.write("\t".join(["gb_name"] + str_gene_names + str_inter_names) + "\n") for gb_name in base_name_list: out_put_handler.write(gb_name) for loci_name in gene_names: if gb_name in gene_dict[loci_name]: out_put_handler.write("\t" + str(len(gene_dict[loci_name][gb_name]))) else: out_put_handler.write("\t-") for loci_name in inter_names: if gb_name in intergene_dict[loci_name]: out_put_handler.write("\t" + str(len(intergene_dict[loci_name][gb_name]))) else: out_put_handler.write("\t-") out_put_handler.write("\n") def main(): time0 = time.time() options, argv = get_options() options.gene_keys = options.gene_keys.split(",") gene_dir = os.path.join(options.out_put, "gene") intergenic_dir = os.path.join(options.out_put, "intergene") if not os.path.exists(options.out_put): os.mkdir(options.out_put) os.mkdir(gene_dir) os.mkdir(intergenic_dir) else: if options.overwrite: if not os.path.exists(gene_dir): os.mkdir(gene_dir) if not os.path.exists(intergenic_dir): os.mkdir(intergenic_dir) else: # raise FileExistsError(options.out_put + " exists!") raise IOError(options.out_put + " exists!") types = set() for this_t in options.gene_types.split(","): types.add(this_t) types.add(this_t.capitalize()) types.add(this_t.lower()) types.add(this_t.upper()) out_gene_dict = {} out_intergenic_dict = {} base_name_list = [] region_name_to_type = {} for this_gb in argv: if os.path.exists(this_gb): gb_base_name = os.path.basename(this_gb).replace(".gb", "").replace(".genbank", "") # base_name_list.append(gb_base_name) try: this_records = list(SeqIO.parse(this_gb, "genbank")) except ValueError as e: sys.stdout.write("Err loc: file " + this_gb + "\n") sys.stdout.write(str(e) + "\n") sys.exit() for go_record, seq_record in enumerate(this_records): try: this_description = seq_record.description.replace("\n", " ").replace("\t", " ").strip() this_seq_name = gb_base_name + \ ("--" + str(go_record + 1)) * int(bool(len(this_records) > 1)) + \ ("--" + this_description) * int(bool(this_description)) gene_regions, intergenic_regions = get_seqs(seq_record, types, options.gene_keys, options.ignore_format_error, options.product_to_gene, options.case_treatment) if options.one_copy: temp_gene_dict = {} for region_name, start, end, strand, this_seq, region_type in gene_regions: # if region_name not in temp_gene_dict: # temp_gene_dict[region_name] = {} temp_gene_dict[region_name] = this_seq region_name_to_type[region_name] = region_type temp_inter_dict = {} for region_name, start, end, strand, this_seq, region_type in intergenic_regions: # if region_name not in temp_inter_dict: # temp_inter_dict[region_name] = {} region_name_to_type[region_name] = region_type temp_inter_dict[region_name] = this_seq # processing gene go_to = 0 sorted_region_names = sorted(list(temp_gene_dict), key=lambda x: (x[0], x[2], x[1])) while go_to < len(sorted_region_names): region_name = sorted_region_names[go_to] go_plus = 1 # if bool(next_loci[1]) == True, multiple copies exist. while go_to + go_plus < len(sorted_region_names): next_loci = sorted_region_names[go_to + go_plus] if (next_loci[0], next_loci[2]) != (region_name[0], region_name[2]): if next_loci[1]: # if next_loci[0], next_loci[2])!=(region_name[0], region_name[2] # then next_loci seemed to be the first copy of a new region # but the first copy of a new region should not have __copy label (next_loci[1]) # so next_loci is not a new region, but the same region with different exon sys.stdout.write("Warning: cannot find " + "".join( [next_loci[0], next_loci[2]]) + " while there's " + "".join(next_loci) + " in " + this_seq_name + "\n") break else: go_plus += 1 if go_plus > 1: this_seqs = [] for go_candidate in range(go_to, go_to + go_plus): this_seqs.append(temp_gene_dict[sorted_region_names[go_candidate]]) if len(set(this_seqs)) > 1: sys.stdout.write("Warning: distinct copies of " + "".join( region_name) + " in " + this_seq_name + "\n") if options.copy_mode == "longest": temp_gene_dict[region_name] = sorted(this_seqs, key=lambda x: -len(x))[0] elif options.copy_mode == "leastN": temp_gene_dict[region_name] = sorted(this_seqs, key=lambda x: count_n(x))[0] elif options.copy_mode == "leastN_longest": temp_gene_dict[region_name] = sorted( this_seqs, key=lambda x: (count_n(x), -len(x)))[0] for go_del in range(go_to + 1, go_to + go_plus): del temp_gene_dict[sorted_region_names[go_del]] go_to += go_plus # processing intergene go_to = 0 sorted_inter_names = sorted(list(temp_inter_dict), key=lambda x: (x[:2], x[2])) while go_to < len(sorted_inter_names): inter_name = sorted_inter_names[go_to] go_plus = 1 while go_to + go_plus < len(sorted_inter_names): next_inter = sorted_inter_names[go_to + go_plus] if inter_name[:2] != next_inter[:2]: if next_inter[2]: sys.stdout.write("Warning: cannot find " + "".join(next_inter[0]) + "--" + "".join(next_inter[1]) + " while there's " + "".join(next_inter[0]) + "--" + "".join(next_inter[1]) + next_inter[2] + " in " + this_seq_name + "\n") break else: go_plus += 1 if go_plus > 1: this_seqs = [] for go_candidate in range(go_to, go_to + go_plus): this_seqs.append(temp_inter_dict[sorted_inter_names[go_candidate]]) if len(set(this_seqs)) > 1: sys.stdout.write( "Warning: distinct copies of " + "".join(inter_name[0]) + "--" + "".join(inter_name[1]) + " in " + this_seq_name + "\n") if options.copy_mode == "longest": temp_inter_dict[inter_name] = sorted(this_seqs, key=lambda x: -len(x))[0] elif options.copy_mode == "leastN": temp_inter_dict[inter_name] = sorted(this_seqs, key=lambda x: count_n(x))[0] elif options.copy_mode == "leastN_longest": temp_inter_dict[inter_name] = sorted( this_seqs, key=lambda x: (count_n(x), -len(x)))[0] for go_del in range(go_to + 1, go_to + go_plus): del temp_inter_dict[sorted_inter_names[go_del]] go_to += go_plus # transfer temp to main dict for region_name in temp_gene_dict: if region_name not in out_gene_dict: out_gene_dict[region_name] = {} out_gene_dict[region_name][this_seq_name] = temp_gene_dict[region_name] for region_name in temp_inter_dict: if region_name not in out_intergenic_dict: out_intergenic_dict[region_name] = {} out_intergenic_dict[region_name][this_seq_name] = temp_inter_dict[region_name] else: for region_name, start, end, strand, this_seq in gene_regions: if region_name not in out_gene_dict: out_gene_dict[region_name] = {} out_gene_dict[region_name][this_seq_name] = this_seq for region_name, start, end, strand, this_seq in intergenic_regions: if region_name not in out_intergenic_dict: out_intergenic_dict[region_name] = {} out_intergenic_dict[region_name][this_seq_name] = this_seq base_name_list.append(this_seq_name) except NotImplementedError as e: sys.stdout.write("Err loc: " + str(go_record + 1) + "th record in file " + this_gb + "\n") sys.stdout.write("\nSolutions: " "\n1. Add suitable key (in above qualifiers part) to \"--keys\".") sys.stdout.write("\n2. Add suitable present key and its value to the problematic annotation record.") sys.stdout.write("\n3. Use \"--ignore-format-error\" to skip this annotation record.\n") sys.exit() else: sys.stdout.write("") if options.combine_exon: regions_with_exon = [x for x in list(out_gene_dict) if x[2]] region_set_dict = {} region_set_types = {} for region_name in regions_with_exon: region_set_name = region_name[:2] exon_num = int(region_name[2].replace("__exon", "")) if region_set_name not in region_set_dict: region_set_dict[region_set_name] = [] region_set_types[region_set_name] = set() region_set_dict[region_set_name].append(exon_num) region_set_types[region_set_name].add(region_name_to_type[region_name]) for region_set_name in region_set_dict: region_set_dict[region_set_name].sort() seq_names = set() for exon_num in region_set_dict[region_set_name]: for gb_name in out_gene_dict[tuple(list(region_set_name) + ["__exon" + str(exon_num)])]: seq_names.add(gb_name) new_name = tuple(list(region_set_name) + [""]) if new_name not in out_gene_dict: out_gene_dict[new_name] = {} region_name_to_type[new_name] = "_".join(sorted(region_set_types[region_set_name])) for gb_name in seq_names: out_gene_dict[new_name][gb_name] = "" for exon_num in region_set_dict[region_set_name]: out_gene_dict[new_name][gb_name] += \ out_gene_dict[tuple(list(region_set_name) + ["__exon" + str(exon_num)])].get(gb_name, "") for exon_num in region_set_dict[region_set_name]: del out_gene_dict[tuple(list(region_set_name) + ["__exon" + str(exon_num)])] if options.mix: gene_mixture = dict() for region_name in out_gene_dict: region_name_str = "".join(region_name).replace(" ", "_") for this_seq_name_ in out_gene_dict[region_name]: new_seq_name_ = region_name_str + " " + region_name_to_type.get(region_name, "region") + \ " - " + this_seq_name_.replace(" ", "_").replace(",", "_") gene_mixture[new_seq_name_] = out_gene_dict[region_name][this_seq_name_] write_fasta(os.path.join(gene_dir, "gene.fasta"), gene_mixture, overwrite=options.overwrite) intergene_mixture = dict() for region_name in out_intergenic_dict: region_name_str = "--".join(["".join(x) for x in region_name[:2]]).replace(" ", "_") + region_name[2] for this_seq_name_ in out_intergenic_dict[region_name]: new_seq_name_ = region_name_str + " " + region_name_to_type.get(region_name, "region") + \ " - " + this_seq_name_.replace(" ", "_").replace(",", "_") intergene_mixture[new_seq_name_] = out_intergenic_dict[region_name][this_seq_name_] write_fasta(os.path.join(intergenic_dir, "intergene.fasta"), intergene_mixture, overwrite=options.overwrite) else: for region_name in out_gene_dict: write_fasta(os.path.join(gene_dir, "".join(region_name).replace(" ", "_") + ".fasta"), out_gene_dict[region_name], overwrite=options.overwrite) for region_name in out_intergenic_dict: write_fasta(os.path.join(intergenic_dir, "--".join(["".join(x) for x in region_name[:2]]).replace(" ", "_") + region_name[2] + ".fasta"), out_intergenic_dict[region_name], overwrite=options.overwrite) write_statistics(os.path.join(options.out_put, "statistics.txt"), base_name_list, out_gene_dict, out_intergenic_dict) sys.stdout.write("Time cost: "+str(time.time() - time0) + "\n") if __name__ == '__main__': sys.stdout.write("By jinjianjun@mail.kib.ac.cn 2017\n") main()
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: waves/order.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from . import amount_pb2 as waves_dot_amount__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='waves/order.proto', package='waves', syntax='proto3', serialized_pb=_b('\n\x11waves/order.proto\x12\x05waves\x1a\x12waves/amount.proto\"<\n\tAssetPair\x12\x17\n\x0f\x61mount_asset_id\x18\x01 \x01(\x0c\x12\x16\n\x0eprice_asset_id\x18\x02 \x01(\x0c\"\xc3\x02\n\x05Order\x12\x10\n\x08\x63hain_id\x18\x01 \x01(\x05\x12\x19\n\x11sender_public_key\x18\x02 \x01(\x0c\x12\x1a\n\x12matcher_public_key\x18\x03 \x01(\x0c\x12$\n\nasset_pair\x18\x04 \x01(\x0b\x32\x10.waves.AssetPair\x12%\n\norder_side\x18\x05 \x01(\x0e\x32\x11.waves.Order.Side\x12\x0e\n\x06\x61mount\x18\x06 \x01(\x03\x12\r\n\x05price\x18\x07 \x01(\x03\x12\x11\n\ttimestamp\x18\x08 \x01(\x03\x12\x12\n\nexpiration\x18\t \x01(\x03\x12\"\n\x0bmatcher_fee\x18\n \x01(\x0b\x32\r.waves.Amount\x12\x0f\n\x07version\x18\x0b \x01(\x05\x12\x0e\n\x06proofs\x18\x0c \x03(\x0c\"\x19\n\x04Side\x12\x07\n\x03\x42UY\x10\x00\x12\x08\n\x04SELL\x10\x01\x42*\n com.wavesplatform.protobuf.order\xaa\x02\x05Wavesb\x06proto3') , dependencies=[waves_dot_amount__pb2.DESCRIPTOR,]) _ORDER_SIDE = _descriptor.EnumDescriptor( name='Side', full_name='waves.Order.Side', filename=None, file=DESCRIPTOR, values=[ _descriptor.EnumValueDescriptor( name='BUY', index=0, number=0, options=None, type=None), _descriptor.EnumValueDescriptor( name='SELL', index=1, number=1, options=None, type=None), ], containing_type=None, options=None, serialized_start=409, serialized_end=434, ) _sym_db.RegisterEnumDescriptor(_ORDER_SIDE) _ASSETPAIR = _descriptor.Descriptor( name='AssetPair', full_name='waves.AssetPair', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='amount_asset_id', full_name='waves.AssetPair.amount_asset_id', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='price_asset_id', full_name='waves.AssetPair.price_asset_id', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=48, serialized_end=108, ) _ORDER = _descriptor.Descriptor( name='Order', full_name='waves.Order', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='chain_id', full_name='waves.Order.chain_id', index=0, number=1, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='sender_public_key', full_name='waves.Order.sender_public_key', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='matcher_public_key', full_name='waves.Order.matcher_public_key', index=2, number=3, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='asset_pair', full_name='waves.Order.asset_pair', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='order_side', full_name='waves.Order.order_side', index=4, number=5, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='amount', full_name='waves.Order.amount', index=5, number=6, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='price', full_name='waves.Order.price', index=6, number=7, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='timestamp', full_name='waves.Order.timestamp', index=7, number=8, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='expiration', full_name='waves.Order.expiration', index=8, number=9, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='matcher_fee', full_name='waves.Order.matcher_fee', index=9, number=10, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='version', full_name='waves.Order.version', index=10, number=11, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='proofs', full_name='waves.Order.proofs', index=11, number=12, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_types=[ _ORDER_SIDE, ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=111, serialized_end=434, ) _ORDER.fields_by_name['asset_pair'].message_type = _ASSETPAIR _ORDER.fields_by_name['order_side'].enum_type = _ORDER_SIDE _ORDER.fields_by_name['matcher_fee'].message_type = waves_dot_amount__pb2._AMOUNT _ORDER_SIDE.containing_type = _ORDER DESCRIPTOR.message_types_by_name['AssetPair'] = _ASSETPAIR DESCRIPTOR.message_types_by_name['Order'] = _ORDER _sym_db.RegisterFileDescriptor(DESCRIPTOR) AssetPair = _reflection.GeneratedProtocolMessageType('AssetPair', (_message.Message,), dict( DESCRIPTOR = _ASSETPAIR, __module__ = 'waves.order_pb2' # @@protoc_insertion_point(class_scope:waves.AssetPair) )) _sym_db.RegisterMessage(AssetPair) Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), dict( DESCRIPTOR = _ORDER, __module__ = 'waves.order_pb2' # @@protoc_insertion_point(class_scope:waves.Order) )) _sym_db.RegisterMessage(Order) DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n com.wavesplatform.protobuf.order\252\002\005Waves')) # @@protoc_insertion_point(module_scope)
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """The WebDriver implementation.""" from abc import ABCMeta import base64 import copy from contextlib import (contextmanager, asynccontextmanager) import importlib import pkgutil import warnings import sys from .command import Command from .errorhandler import ErrorHandler from .file_detector import FileDetector, LocalFileDetector from .mobile import Mobile from .remote_connection import RemoteConnection from .script_key import ScriptKey from .switch_to import SwitchTo from .webelement import WebElement from selenium.common.exceptions import (InvalidArgumentException, JavascriptException, WebDriverException, NoSuchCookieException, UnknownMethodException) from selenium.webdriver.common.by import By from selenium.webdriver.common.timeouts import Timeouts from selenium.webdriver.common.html5.application_cache import ApplicationCache from selenium.webdriver.support.relative_locator import RelativeBy from six import add_metaclass try: str = basestring except NameError: pass cdp = None def import_cdp(): global cdp if cdp is None: cdp = importlib.import_module("selenium.webdriver.common.bidi.cdp") _W3C_CAPABILITY_NAMES = frozenset([ 'acceptInsecureCerts', 'browserName', 'browserVersion', 'platformName', 'pageLoadStrategy', 'proxy', 'setWindowRect', 'timeouts', 'unhandledPromptBehavior', 'strictFileInteractability' ]) _OSS_W3C_CONVERSION = { 'acceptSslCerts': 'acceptInsecureCerts', 'version': 'browserVersion', 'platform': 'platformName' } devtools = None def _make_w3c_caps(caps): """Makes a W3C alwaysMatch capabilities object. Filters out capability names that are not in the W3C spec. Spec-compliant drivers will reject requests containing unknown capability names. Moves the Firefox profile, if present, from the old location to the new Firefox options object. :Args: - caps - A dictionary of capabilities requested by the caller. """ caps = copy.deepcopy(caps) profile = caps.get('firefox_profile') always_match = {} if caps.get('proxy') and caps['proxy'].get('proxyType'): caps['proxy']['proxyType'] = caps['proxy']['proxyType'].lower() for k, v in caps.items(): if v and k in _OSS_W3C_CONVERSION: always_match[_OSS_W3C_CONVERSION[k]] = v.lower() if k == 'platform' else v if k in _W3C_CAPABILITY_NAMES or ':' in k: always_match[k] = v if profile: moz_opts = always_match.get('moz:firefoxOptions', {}) # If it's already present, assume the caller did that intentionally. if 'profile' not in moz_opts: # Don't mutate the original capabilities. new_opts = copy.deepcopy(moz_opts) new_opts['profile'] = profile always_match['moz:firefoxOptions'] = new_opts return {"firstMatch": [{}], "alwaysMatch": always_match} def get_remote_connection(capabilities, command_executor, keep_alive, ignore_local_proxy=False): from selenium.webdriver.chromium.remote_connection import ChromiumRemoteConnection from selenium.webdriver.safari.remote_connection import SafariRemoteConnection from selenium.webdriver.firefox.remote_connection import FirefoxRemoteConnection candidates = [RemoteConnection] + [ChromiumRemoteConnection, SafariRemoteConnection, FirefoxRemoteConnection] handler = next( (c for c in candidates if c.browser_name == capabilities.get('browserName')), RemoteConnection ) return handler(command_executor, keep_alive=keep_alive, ignore_proxy=ignore_local_proxy) @add_metaclass(ABCMeta) class BaseWebDriver(object): """ Abstract Base Class for all Webdriver subtypes. ABC's allow custom implementations of Webdriver to be registered so that isinstance type checks will succeed. """ # TODO: After dropping Python 2, use ABC instead of ABCMeta and remove metaclass decorator. class WebDriver(BaseWebDriver): """ Controls a browser by sending commands to a remote server. This server is expected to be running the WebDriver wire protocol as defined at https://github.com/SeleniumHQ/selenium/wiki/JsonWireProtocol :Attributes: - session_id - String ID of the browser session started and controlled by this WebDriver. - capabilities - Dictionary of effective capabilities of this browser session as returned by the remote server. See https://github.com/SeleniumHQ/selenium/wiki/DesiredCapabilities - command_executor - remote_connection.RemoteConnection object used to execute commands. - error_handler - errorhandler.ErrorHandler object used to handle errors. """ _web_element_cls = WebElement def __init__(self, command_executor='http://127.0.0.1:4444', desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=True, file_detector=None, options=None): """ Create a new driver that will issue commands using the wire protocol. :Args: - command_executor - Either a string representing URL of the remote server or a custom remote_connection.RemoteConnection object. Defaults to 'http://127.0.0.1:4444/wd/hub'. - desired_capabilities - A dictionary of capabilities to request when starting the browser session. Required parameter. - browser_profile - A selenium.webdriver.firefox.firefox_profile.FirefoxProfile object. Only used if Firefox is requested. Optional. - proxy - A selenium.webdriver.common.proxy.Proxy object. The browser session will be started with given proxy settings, if possible. Optional. - keep_alive - Whether to configure remote_connection.RemoteConnection to use HTTP keep-alive. Defaults to True. - file_detector - Pass custom file detector object during instantiation. If None, then default LocalFileDetector() will be used. - options - instance of a driver options.Options class """ capabilities = {} _ignore_local_proxy = False if options is not None: capabilities = options.to_capabilities() _ignore_local_proxy = options._ignore_local_proxy if desired_capabilities is not None: if not isinstance(desired_capabilities, dict): raise WebDriverException("Desired Capabilities must be a dictionary") else: capabilities.update(desired_capabilities) self.command_executor = command_executor if isinstance(self.command_executor, (str, bytes)): self.command_executor = get_remote_connection(capabilities, command_executor=command_executor, keep_alive=keep_alive, ignore_local_proxy=_ignore_local_proxy) self._is_remote = True self.session_id = None self.caps = {} self.pinned_scripts = {} self.error_handler = ErrorHandler() self.start_client() self.start_session(capabilities, browser_profile) self._switch_to = SwitchTo(self) self._mobile = Mobile(self) self.file_detector = file_detector or LocalFileDetector() def __repr__(self): return '<{0.__module__}.{0.__name__} (session="{1}")>'.format( type(self), self.session_id) def __enter__(self): return self def __exit__(self, *args): self.quit() @contextmanager def file_detector_context(self, file_detector_class, *args, **kwargs): """ Overrides the current file detector (if necessary) in limited context. Ensures the original file detector is set afterwards. Example: with webdriver.file_detector_context(UselessFileDetector): someinput.send_keys('/etc/hosts') :Args: - file_detector_class - Class of the desired file detector. If the class is different from the current file_detector, then the class is instantiated with args and kwargs and used as a file detector during the duration of the context manager. - args - Optional arguments that get passed to the file detector class during instantiation. - kwargs - Keyword arguments, passed the same way as args. """ last_detector = None if not isinstance(self.file_detector, file_detector_class): last_detector = self.file_detector self.file_detector = file_detector_class(*args, **kwargs) try: yield finally: if last_detector is not None: self.file_detector = last_detector @property def mobile(self): return self._mobile @property def name(self): """Returns the name of the underlying browser for this instance. :Usage: :: name = driver.name """ if 'browserName' in self.caps: return self.caps['browserName'] else: raise KeyError('browserName not specified in session capabilities') def start_client(self): """ Called before starting a new session. This method may be overridden to define custom startup behavior. """ pass def stop_client(self): """ Called after executing a quit command. This method may be overridden to define custom shutdown behavior. """ pass def start_session(self, capabilities, browser_profile=None): """ Creates a new session with the desired capabilities. :Args: - browser_name - The name of the browser to request. - version - Which browser version to request. - platform - Which platform to request the browser on. - javascript_enabled - Whether the new session should support JavaScript. - browser_profile - A selenium.webdriver.firefox.firefox_profile.FirefoxProfile object. Only used if Firefox is requested. """ if not isinstance(capabilities, dict): raise InvalidArgumentException("Capabilities must be a dictionary") if browser_profile: if "moz:firefoxOptions" in capabilities: capabilities["moz:firefoxOptions"]["profile"] = browser_profile.encoded else: capabilities.update({'firefox_profile': browser_profile.encoded}) w3c_caps = _make_w3c_caps(capabilities) parameters = {"capabilities": w3c_caps, "desiredCapabilities": capabilities} response = self.execute(Command.NEW_SESSION, parameters) if 'sessionId' not in response: response = response['value'] self.session_id = response['sessionId'] self.caps = response.get('value') # if capabilities is none we are probably speaking to # a W3C endpoint if self.caps is None: self.caps = response.get('capabilities') # Double check to see if we have a W3C Compliant browser self.w3c = response.get('status') is None self.command_executor.w3c = self.w3c def _wrap_value(self, value): if isinstance(value, dict): converted = {} for key, val in value.items(): converted[key] = self._wrap_value(val) return converted elif isinstance(value, self._web_element_cls): return {'ELEMENT': value.id, 'element-6066-11e4-a52e-4f735466cecf': value.id} elif isinstance(value, list): return list(self._wrap_value(item) for item in value) else: return value def create_web_element(self, element_id): """Creates a web element with the specified `element_id`.""" return self._web_element_cls(self, element_id, w3c=self.w3c) def _unwrap_value(self, value): if isinstance(value, dict): if 'ELEMENT' in value or 'element-6066-11e4-a52e-4f735466cecf' in value: wrapped_id = value.get('ELEMENT', None) if wrapped_id: return self.create_web_element(value['ELEMENT']) else: return self.create_web_element(value['element-6066-11e4-a52e-4f735466cecf']) else: for key, val in value.items(): value[key] = self._unwrap_value(val) return value elif isinstance(value, list): return list(self._unwrap_value(item) for item in value) else: return value def execute(self, driver_command, params=None): """ Sends a command to be executed by a command.CommandExecutor. :Args: - driver_command: The name of the command to execute as a string. - params: A dictionary of named parameters to send with the command. :Returns: The command's JSON response loaded into a dictionary object. """ if self.session_id is not None: if not params: params = {'sessionId': self.session_id} elif 'sessionId' not in params: params['sessionId'] = self.session_id params = self._wrap_value(params) response = self.command_executor.execute(driver_command, params) if response: self.error_handler.check_response(response) response['value'] = self._unwrap_value( response.get('value', None)) return response # If the server doesn't send a response, assume the command was # a success return {'success': 0, 'value': None, 'sessionId': self.session_id} def get(self, url): """ Loads a web page in the current browser session. """ self.execute(Command.GET, {'url': url}) @property def title(self): """Returns the title of the current page. :Usage: :: title = driver.title """ resp = self.execute(Command.GET_TITLE) return resp['value'] if resp['value'] is not None else "" def find_element_by_id(self, id_): """Finds an element by id. :Args: - id\\_ - The id of the element to be found. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_id('foo') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.ID, value=id_) def find_elements_by_id(self, id_): """ Finds multiple elements by id. :Args: - id\\_ - The id of the elements to be found. :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: :: elements = driver.find_elements_by_id('foo') """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.ID, value=id_) def find_element_by_xpath(self, xpath): """ Finds an element by xpath. :Args: - xpath - The xpath locator of the element to find. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_xpath('//div/td[1]') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.XPATH, value=xpath) def find_elements_by_xpath(self, xpath): """ Finds multiple elements by xpath. :Args: - xpath - The xpath locator of the elements to be found. :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: :: elements = driver.find_elements_by_xpath("//div[contains(@class, 'foo')]") """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.XPATH, value=xpath) def find_element_by_link_text(self, link_text): """ Finds an element by link text. :Args: - link_text: The text of the element to be found. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_link_text('Sign In') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.LINK_TEXT, value=link_text) def find_elements_by_link_text(self, text): """ Finds elements by link text. :Args: - link_text: The text of the elements to be found. :Returns: - list of webelement - a list with elements if any was found. an empty list if not :Usage: :: elements = driver.find_elements_by_link_text('Sign In') """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.LINK_TEXT, value=text) def find_element_by_partial_link_text(self, link_text): """ Finds an element by a partial match of its link text. :Args: - link_text: The text of the element to partially match on. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_partial_link_text('Sign') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_elements_by_partial_link_text(self, link_text): """ Finds elements by a partial match of their link text. :Args: - link_text: The text of the element to partial match on. :Returns: - list of webelement - a list with elements if any was found. an empty list if not :Usage: :: elements = driver.find_elements_by_partial_link_text('Sign') """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_element_by_name(self, name): """ Finds an element by name. :Args: - name: The name of the element to find. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_name('foo') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.NAME, value=name) def find_elements_by_name(self, name): """ Finds elements by name. :Args: - name: The name of the elements to find. :Returns: - list of webelement - a list with elements if any was found. an empty list if not :Usage: :: elements = driver.find_elements_by_name('foo') """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.NAME, value=name) def find_element_by_tag_name(self, name): """ Finds an element by tag name. :Args: - name - name of html tag (eg: h1, a, span) :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_tag_name('h1') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.TAG_NAME, value=name) def find_elements_by_tag_name(self, name): """ Finds elements by tag name. :Args: - name - name of html tag (eg: h1, a, span) :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: :: elements = driver.find_elements_by_tag_name('h1') """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.TAG_NAME, value=name) def find_element_by_class_name(self, name): """ Finds an element by class name. :Args: - name: The class name of the element to find. :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_class_name('foo') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.CLASS_NAME, value=name) def find_elements_by_class_name(self, name): """ Finds elements by class name. :Args: - name: The class name of the elements to find. :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: :: elements = driver.find_elements_by_class_name('foo') """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.CLASS_NAME, value=name) def find_element_by_css_selector(self, css_selector): """ Finds an element by css selector. :Args: - css_selector - CSS selector string, ex: 'a.nav#home' :Returns: - WebElement - the element if it was found :Raises: - NoSuchElementException - if the element wasn't found :Usage: :: element = driver.find_element_by_css_selector('#foo') """ warnings.warn("find_element_by_* commands are deprecated. Please use find_element() instead") return self.find_element(by=By.CSS_SELECTOR, value=css_selector) def find_elements_by_css_selector(self, css_selector): """ Finds elements by css selector. :Args: - css_selector - CSS selector string, ex: 'a.nav#home' :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: :: elements = driver.find_elements_by_css_selector('.foo') """ warnings.warn("find_elements_by_* commands are deprecated. Please use find_elements() instead") return self.find_elements(by=By.CSS_SELECTOR, value=css_selector) def pin_script(self, script): """ """ script_key = ScriptKey() self.pinned_scripts[script_key.id] = script return script_key def unpin(self, script_key): """ """ self.pinned_scripts.pop(script_key.id) def get_pinned_scripts(self): """ """ return list(self.pinned_scripts.keys()) def execute_script(self, script, *args): """ Synchronously Executes JavaScript in the current window/frame. :Args: - script: The JavaScript to execute. - \\*args: Any applicable arguments for your JavaScript. :Usage: :: driver.execute_script('return document.title;') """ if isinstance(script, ScriptKey): try: script = self.pinned_scripts[script.id] except KeyError: raise JavascriptException("Pinned script could not be found") converted_args = list(args) command = None if self.w3c: command = Command.W3C_EXECUTE_SCRIPT else: command = Command.EXECUTE_SCRIPT return self.execute(command, { 'script': script, 'args': converted_args})['value'] def execute_async_script(self, script, *args): """ Asynchronously Executes JavaScript in the current window/frame. :Args: - script: The JavaScript to execute. - \\*args: Any applicable arguments for your JavaScript. :Usage: :: script = "var callback = arguments[arguments.length - 1]; " \\ "window.setTimeout(function(){ callback('timeout') }, 3000);" driver.execute_async_script(script) """ converted_args = list(args) if self.w3c: command = Command.W3C_EXECUTE_SCRIPT_ASYNC else: command = Command.EXECUTE_ASYNC_SCRIPT return self.execute(command, { 'script': script, 'args': converted_args})['value'] @property def current_url(self): """ Gets the URL of the current page. :Usage: :: driver.current_url """ return self.execute(Command.GET_CURRENT_URL)['value'] @property def page_source(self): """ Gets the source of the current page. :Usage: :: driver.page_source """ return self.execute(Command.GET_PAGE_SOURCE)['value'] def close(self): """ Closes the current window. :Usage: :: driver.close() """ self.execute(Command.CLOSE) def quit(self): """ Quits the driver and closes every associated window. :Usage: :: driver.quit() """ try: self.execute(Command.QUIT) finally: self.stop_client() self.command_executor.close() @property def current_window_handle(self): """ Returns the handle of the current window. :Usage: :: driver.current_window_handle """ if self.w3c: return self.execute(Command.W3C_GET_CURRENT_WINDOW_HANDLE)['value'] else: return self.execute(Command.GET_CURRENT_WINDOW_HANDLE)['value'] @property def window_handles(self): """ Returns the handles of all windows within the current session. :Usage: :: driver.window_handles """ if self.w3c: return self.execute(Command.W3C_GET_WINDOW_HANDLES)['value'] else: return self.execute(Command.GET_WINDOW_HANDLES)['value'] def maximize_window(self): """ Maximizes the current window that webdriver is using """ params = None command = Command.W3C_MAXIMIZE_WINDOW if not self.w3c: command = Command.MAXIMIZE_WINDOW params = {'windowHandle': 'current'} self.execute(command, params) def fullscreen_window(self): """ Invokes the window manager-specific 'full screen' operation """ self.execute(Command.FULLSCREEN_WINDOW) def minimize_window(self): """ Invokes the window manager-specific 'minimize' operation """ self.execute(Command.MINIMIZE_WINDOW) def print_page(self, print_options = None): """ Takes PDF of the current page. The driver makes a best effort to return a PDF based on the provided parameters. """ options = {} if print_options: options = print_options.to_dict() return self.execute(Command.PRINT_PAGE, options)['value'] @property def switch_to(self): """ :Returns: - SwitchTo: an object containing all options to switch focus into :Usage: :: element = driver.switch_to.active_element alert = driver.switch_to.alert driver.switch_to.default_content() driver.switch_to.frame('frame_name') driver.switch_to.frame(1) driver.switch_to.frame(driver.find_elements_by_tag_name("iframe")[0]) driver.switch_to.parent_frame() driver.switch_to.window('main') """ return self._switch_to # Navigation def back(self): """ Goes one step backward in the browser history. :Usage: :: driver.back() """ self.execute(Command.GO_BACK) def forward(self): """ Goes one step forward in the browser history. :Usage: :: driver.forward() """ self.execute(Command.GO_FORWARD) def refresh(self): """ Refreshes the current page. :Usage: :: driver.refresh() """ self.execute(Command.REFRESH) # Options def get_cookies(self): """ Returns a set of dictionaries, corresponding to cookies visible in the current session. :Usage: :: driver.get_cookies() """ return self.execute(Command.GET_ALL_COOKIES)['value'] def get_cookie(self, name): """ Get a single cookie by name. Returns the cookie if found, None if not. :Usage: :: driver.get_cookie('my_cookie') """ if self.w3c: try: return self.execute(Command.GET_COOKIE, {'name': name})['value'] except NoSuchCookieException: return None else: cookies = self.get_cookies() for cookie in cookies: if cookie['name'] == name: return cookie return None def delete_cookie(self, name): """ Deletes a single cookie with the given name. :Usage: :: driver.delete_cookie('my_cookie') """ self.execute(Command.DELETE_COOKIE, {'name': name}) def delete_all_cookies(self): """ Delete all cookies in the scope of the session. :Usage: :: driver.delete_all_cookies() """ self.execute(Command.DELETE_ALL_COOKIES) def add_cookie(self, cookie_dict): """ Adds a cookie to your current session. :Args: - cookie_dict: A dictionary object, with required keys - "name" and "value"; optional keys - "path", "domain", "secure", "expiry", "sameSite" Usage: driver.add_cookie({'name' : 'foo', 'value' : 'bar'}) driver.add_cookie({'name' : 'foo', 'value' : 'bar', 'path' : '/'}) driver.add_cookie({'name' : 'foo', 'value' : 'bar', 'path' : '/', 'secure':True}) driver.add_cookie({'name': 'foo', 'value': 'bar', 'sameSite': 'Strict'}) """ if 'sameSite' in cookie_dict: assert cookie_dict['sameSite'] in ['Strict', 'Lax'] self.execute(Command.ADD_COOKIE, {'cookie': cookie_dict}) else: self.execute(Command.ADD_COOKIE, {'cookie': cookie_dict}) # Timeouts def implicitly_wait(self, time_to_wait): """ Sets a sticky timeout to implicitly wait for an element to be found, or a command to complete. This method only needs to be called one time per session. To set the timeout for calls to execute_async_script, see set_script_timeout. :Args: - time_to_wait: Amount of time to wait (in seconds) :Usage: :: driver.implicitly_wait(30) """ if self.w3c: self.execute(Command.SET_TIMEOUTS, { 'implicit': int(float(time_to_wait) * 1000)}) else: self.execute(Command.IMPLICIT_WAIT, { 'ms': float(time_to_wait) * 1000}) def set_script_timeout(self, time_to_wait): """ Set the amount of time that the script should wait during an execute_async_script call before throwing an error. :Args: - time_to_wait: The amount of time to wait (in seconds) :Usage: :: driver.set_script_timeout(30) """ if self.w3c: self.execute(Command.SET_TIMEOUTS, { 'script': int(float(time_to_wait) * 1000)}) else: self.execute(Command.SET_SCRIPT_TIMEOUT, { 'ms': float(time_to_wait) * 1000}) def set_page_load_timeout(self, time_to_wait): """ Set the amount of time to wait for a page load to complete before throwing an error. :Args: - time_to_wait: The amount of time to wait :Usage: :: driver.set_page_load_timeout(30) """ try: self.execute(Command.SET_TIMEOUTS, { 'pageLoad': int(float(time_to_wait) * 1000)}) except WebDriverException: self.execute(Command.SET_TIMEOUTS, { 'ms': float(time_to_wait) * 1000, 'type': 'page load'}) @property def timeouts(self): """ Get all the timeouts that have been set on the current session :Usage: :: driver.timeouts :rtype: Timeout """ timeouts = self.execute(Command.GET_TIMEOUTS)['value'] timeouts["implicit_wait"] = timeouts.pop("implicit") / 1000 timeouts["page_load"] = timeouts.pop("pageLoad") / 1000 timeouts["script"] = timeouts.pop("script") / 1000 return Timeouts(**timeouts) @timeouts.setter def timeouts(self, timeouts): """ Set all timeouts for the session. This will override any previously set timeouts. :Usage: :: my_timeouts = Timeouts() my_timeouts.implicit_wait = 10 driver.timeouts = my_timeouts """ self.execute(Command.SET_TIMEOUTS, timeouts._to_json())['value'] def find_element(self, by=By.ID, value=None): """ Find an element given a By strategy and locator. :Usage: :: element = driver.find_element(By.ID, 'foo') :rtype: WebElement """ if self.w3c: if by == By.ID: by = By.CSS_SELECTOR value = '[id="%s"]' % value elif by == By.TAG_NAME: by = By.CSS_SELECTOR elif by == By.CLASS_NAME: by = By.CSS_SELECTOR value = ".%s" % value elif by == By.NAME: by = By.CSS_SELECTOR value = '[name="%s"]' % value return self.execute(Command.FIND_ELEMENT, { 'using': by, 'value': value})['value'] def find_elements(self, by=By.ID, value=None): """ Find elements given a By strategy and locator. :Usage: :: elements = driver.find_elements(By.CLASS_NAME, 'foo') :rtype: list of WebElement """ if isinstance(by, RelativeBy): _pkg = '.'.join(__name__.split('.')[:-1]) raw_function = pkgutil.get_data(_pkg, 'findElements.js').decode('utf8') find_element_js = "return ({}).apply(null, arguments);".format(raw_function) return self.execute_script(find_element_js, by.to_dict()) if self.w3c: if by == By.ID: by = By.CSS_SELECTOR value = '[id="%s"]' % value elif by == By.TAG_NAME: by = By.CSS_SELECTOR elif by == By.CLASS_NAME: by = By.CSS_SELECTOR value = ".%s" % value elif by == By.NAME: by = By.CSS_SELECTOR value = '[name="%s"]' % value # Return empty list if driver returns null # See https://github.com/SeleniumHQ/selenium/issues/4555 return self.execute(Command.FIND_ELEMENTS, { 'using': by, 'value': value})['value'] or [] @property def desired_capabilities(self): """ returns the drivers current desired capabilities being used """ warnings.warn("desired_capabilities is deprecated. Please call capabilities.", DeprecationWarning, stacklevel=2) return self.caps @property def capabilities(self): """ returns the drivers current capabilities being used. """ return self.caps def get_screenshot_as_file(self, filename): """ Saves a screenshot of the current window to a PNG image file. Returns False if there is any IOError, else returns True. Use full paths in your filename. :Args: - filename: The full path you wish to save your screenshot to. This should end with a `.png` extension. :Usage: :: driver.get_screenshot_as_file('/Screenshots/foo.png') """ if not filename.lower().endswith('.png'): warnings.warn("name used for saved screenshot does not match file " "type. It should end with a `.png` extension", UserWarning) png = self.get_screenshot_as_png() try: with open(filename, 'wb') as f: f.write(png) except IOError: return False finally: del png return True def save_screenshot(self, filename): """ Saves a screenshot of the current window to a PNG image file. Returns False if there is any IOError, else returns True. Use full paths in your filename. :Args: - filename: The full path you wish to save your screenshot to. This should end with a `.png` extension. :Usage: :: driver.save_screenshot('/Screenshots/foo.png') """ return self.get_screenshot_as_file(filename) def get_screenshot_as_png(self): """ Gets the screenshot of the current window as a binary data. :Usage: :: driver.get_screenshot_as_png() """ return base64.b64decode(self.get_screenshot_as_base64().encode('ascii')) def get_screenshot_as_base64(self): """ Gets the screenshot of the current window as a base64 encoded string which is useful in embedded images in HTML. :Usage: :: driver.get_screenshot_as_base64() """ return self.execute(Command.SCREENSHOT)['value'] def set_window_size(self, width, height, windowHandle='current'): """ Sets the width and height of the current window. (window.resizeTo) :Args: - width: the width in pixels to set the window to - height: the height in pixels to set the window to :Usage: :: driver.set_window_size(800,600) """ if self.w3c: if windowHandle != 'current': warnings.warn("Only 'current' window is supported for W3C compatibile browsers.") self.set_window_rect(width=int(width), height=int(height)) else: self.execute(Command.SET_WINDOW_SIZE, { 'width': int(width), 'height': int(height), 'windowHandle': windowHandle}) def get_window_size(self, windowHandle='current'): """ Gets the width and height of the current window. :Usage: :: driver.get_window_size() """ command = Command.GET_WINDOW_SIZE if self.w3c: if windowHandle != 'current': warnings.warn("Only 'current' window is supported for W3C compatibile browsers.") size = self.get_window_rect() else: size = self.execute(command, {'windowHandle': windowHandle}) if size.get('value', None) is not None: size = size['value'] return {k: size[k] for k in ('width', 'height')} def set_window_position(self, x, y, windowHandle='current'): """ Sets the x,y position of the current window. (window.moveTo) :Args: - x: the x-coordinate in pixels to set the window position - y: the y-coordinate in pixels to set the window position :Usage: :: driver.set_window_position(0,0) """ if self.w3c: if windowHandle != 'current': warnings.warn("Only 'current' window is supported for W3C compatibile browsers.") return self.set_window_rect(x=int(x), y=int(y)) else: self.execute(Command.SET_WINDOW_POSITION, { 'x': int(x), 'y': int(y), 'windowHandle': windowHandle }) def get_window_position(self, windowHandle='current'): """ Gets the x,y position of the current window. :Usage: :: driver.get_window_position() """ if self.w3c: if windowHandle != 'current': warnings.warn("Only 'current' window is supported for W3C compatibile browsers.") position = self.get_window_rect() else: position = self.execute(Command.GET_WINDOW_POSITION, {'windowHandle': windowHandle})['value'] return {k: position[k] for k in ('x', 'y')} def get_window_rect(self): """ Gets the x, y coordinates of the window as well as height and width of the current window. :Usage: :: driver.get_window_rect() """ return self.execute(Command.GET_WINDOW_RECT)['value'] def set_window_rect(self, x=None, y=None, width=None, height=None): """ Sets the x, y coordinates of the window as well as height and width of the current window. This method is only supported for W3C compatible browsers; other browsers should use `set_window_position` and `set_window_size`. :Usage: :: driver.set_window_rect(x=10, y=10) driver.set_window_rect(width=100, height=200) driver.set_window_rect(x=10, y=10, width=100, height=200) """ if not self.w3c: raise UnknownMethodException("set_window_rect is only supported for W3C compatible browsers") if (x is None and y is None) and (height is None and width is None): raise InvalidArgumentException("x and y or height and width need values") return self.execute(Command.SET_WINDOW_RECT, {"x": x, "y": y, "width": width, "height": height})['value'] @property def file_detector(self): return self._file_detector @file_detector.setter def file_detector(self, detector): """ Set the file detector to be used when sending keyboard input. By default, this is set to a file detector that does nothing. see FileDetector see LocalFileDetector see UselessFileDetector :Args: - detector: The detector to use. Must not be None. """ if detector is None: raise WebDriverException("You may not set a file detector that is null") if not isinstance(detector, FileDetector): raise WebDriverException("Detector has to be instance of FileDetector") self._file_detector = detector @property def orientation(self): """ Gets the current orientation of the device :Usage: :: orientation = driver.orientation """ return self.execute(Command.GET_SCREEN_ORIENTATION)['value'] @orientation.setter def orientation(self, value): """ Sets the current orientation of the device :Args: - value: orientation to set it to. :Usage: :: driver.orientation = 'landscape' """ allowed_values = ['LANDSCAPE', 'PORTRAIT'] if value.upper() in allowed_values: self.execute(Command.SET_SCREEN_ORIENTATION, {'orientation': value}) else: raise WebDriverException("You can only set the orientation to 'LANDSCAPE' and 'PORTRAIT'") @property def application_cache(self): """ Returns a ApplicationCache Object to interact with the browser app cache""" return ApplicationCache(self) @property def log_types(self): """ Gets a list of the available log types. This only works with w3c compliant browsers. :Usage: :: driver.log_types """ return self.execute(Command.GET_AVAILABLE_LOG_TYPES)['value'] if self.w3c else [] def get_log(self, log_type): """ Gets the log for a given log type :Args: - log_type: type of log that which will be returned :Usage: :: driver.get_log('browser') driver.get_log('driver') driver.get_log('client') driver.get_log('server') """ return self.execute(Command.GET_LOG, {'type': log_type})['value'] @asynccontextmanager async def add_js_error_listener(self): """ Listens for JS errors and when the contextmanager exits check if there were JS Errors :Usage: :: async with driver.add_js_error_listener() as error: driver.find_element(By.ID, "throwing-mouseover").click() assert error is not None assert error.exception_details.stack_trace.call_frames[0].function_name == "onmouseover" """ assert sys.version_info >= (3, 7) global cdp async with self._get_bidi_connection(): global devtools session = cdp.get_session_context('page.enable') await session.execute(devtools.page.enable()) session = cdp.get_session_context('runtime.enable') await session.execute(devtools.runtime.enable()) js_exception = devtools.runtime.ExceptionThrown(None, None) async with session.wait_for(devtools.runtime.ExceptionThrown) as exception: yield js_exception js_exception.timestamp = exception.value.timestamp js_exception.exception_details = exception.value.exception_details @asynccontextmanager async def add_listener(self, event_type): ''' Listens for certain events that are passed in. :Args: - event_type: The type of event that we want to look at. :Usage: :: async with driver.add_listener(Console.log) as messages: driver.execute_script("console.log('I like cheese')") assert messages["message"] == "I love cheese" ''' assert sys.version_info >= (3, 7) global cdp from selenium.webdriver.common.bidi.console import Console async with self._get_bidi_connection(): global devtools session = cdp.get_session_context('page.enable') await session.execute(devtools.page.enable()) session = cdp.get_session_context('runtime.enable') await session.execute(devtools.runtime.enable()) console = { "message": None, "level": None } async with session.wait_for(devtools.runtime.ConsoleAPICalled) as messages: yield console if event_type == Console.ERROR: console["message"] = messages.value.args[0].value console["level"] = messages.value.args[0].type_ if event_type == Console.ALL: console["message"] = messages.value.args[0].value console["level"] = messages.value.args[0].type_ @asynccontextmanager async def _get_bidi_connection(self): global cdp import_cdp() ws_url = None if self.caps.get("se:options"): ws_url = self.caps.get("se:options").get("cdp") else: version, ws_url = self._get_cdp_details() if ws_url is None: raise WebDriverException("Unable to find url to connect to from capabilities") cdp.import_devtools(version) global devtools devtools = importlib.import_module("selenium.webdriver.common.devtools.v{}".format(version)) async with cdp.open_cdp(ws_url) as conn: targets = await conn.execute(devtools.target.get_targets()) target_id = targets[0].target_id async with conn.open_session(target_id) as session: yield session def _get_cdp_details(self): import json import urllib3 http = urllib3.PoolManager() if self.caps.get("browserName") == "chrome": debugger_address = self.caps.get(f"{self.vendor_prefix}:{self.caps.get('browserName')}Options").get("debuggerAddress") else: debugger_address = self.caps.get("moz:debuggerAddress") res = http.request('GET', f"http://{debugger_address}/json/version") data = json.loads(res.data) browser_version = data.get("Browser") websocket_url = data.get("webSocketDebuggerUrl") import re version = re.search(r".*/(\d+)\.", browser_version).group(1) return version, websocket_url
from typing import Any, Dict, Mapping, Optional, Text, Union from django.conf import settings from django.utils.timezone import now as timezone_now from django.utils.translation import ugettext as _ from django.contrib.sessions.models import Session as djSession try: from django.middleware.csrf import _compare_salted_tokens except ImportError: # This function was added in Django 1.10. def _compare_salted_tokens(token1: str, token2: str) -> bool: return token1 == token2 import sockjs.tornado from sockjs.tornado.session import ConnectionInfo import tornado.ioloop import ujson import logging import time from zerver.models import UserProfile, get_user_profile_by_id, get_client from zerver.lib.queue import queue_json_publish from zerver.lib.actions import check_send_message, extract_recipients from zerver.decorator import JsonableError from zerver.middleware import record_request_start_data, record_request_stop_data, \ record_request_restart_data, write_log_line, format_timedelta from zerver.lib.redis_utils import get_redis_client from zerver.lib.sessions import get_session_user from zerver.tornado.event_queue import get_client_descriptor from zerver.tornado.exceptions import BadEventQueueIdError logger = logging.getLogger('zulip.socket') def get_user_profile(session_id: Optional[Text]) -> Optional[UserProfile]: if session_id is None: return None try: djsession = djSession.objects.get(expire_date__gt=timezone_now(), session_key=session_id) except djSession.DoesNotExist: return None try: return get_user_profile_by_id(get_session_user(djsession)) except (UserProfile.DoesNotExist, KeyError): return None connections = dict() # type: Dict[Union[int, str], 'SocketConnection'] def get_connection(id: Union[int, str]) -> Optional['SocketConnection']: return connections.get(id) def register_connection(id: Union[int, str], conn: 'SocketConnection') -> None: # Kill any old connections if they exist if id in connections: connections[id].close() conn.client_id = id connections[conn.client_id] = conn def deregister_connection(conn: 'SocketConnection') -> None: assert conn.client_id is not None del connections[conn.client_id] redis_client = get_redis_client() def req_redis_key(req_id: Text) -> Text: return u'socket_req_status:%s' % (req_id,) class CloseErrorInfo: def __init__(self, status_code: int, err_msg: str) -> None: self.status_code = status_code self.err_msg = err_msg class SocketConnection(sockjs.tornado.SockJSConnection): client_id = None # type: Optional[Union[int, str]] def on_open(self, info: ConnectionInfo) -> None: log_data = dict(extra='[transport=%s]' % (self.session.transport_name,)) record_request_start_data(log_data) ioloop = tornado.ioloop.IOLoop.instance() self.authenticated = False self.session.user_profile = None self.close_info = None # type: Optional[CloseErrorInfo] self.did_close = False try: self.browser_session_id = info.get_cookie(settings.SESSION_COOKIE_NAME).value self.csrf_token = info.get_cookie(settings.CSRF_COOKIE_NAME).value except AttributeError: # The request didn't contain the necessary cookie values. We can't # close immediately because sockjs-tornado doesn't expect a close # inside on_open(), so do it on the next tick. self.close_info = CloseErrorInfo(403, "Initial cookie lacked required values") ioloop.add_callback(self.close) return def auth_timeout() -> None: self.close_info = CloseErrorInfo(408, "Timeout while waiting for authentication") self.close() self.timeout_handle = ioloop.call_later(10, auth_timeout) write_log_line(log_data, path='/socket/open', method='SOCKET', remote_ip=info.ip, email='unknown', client_name='?') def authenticate_client(self, msg: Dict[str, Any]) -> None: if self.authenticated: self.session.send_message({'req_id': msg['req_id'], 'type': 'response', 'response': {'result': 'error', 'msg': 'Already authenticated'}}) return user_profile = get_user_profile(self.browser_session_id) if user_profile is None: raise JsonableError(_('Unknown or missing session')) self.session.user_profile = user_profile if 'csrf_token' not in msg['request']: # Debugging code to help with understanding #6961 logging.error("Invalid websockets auth request: %s" % (msg['request'],)) raise JsonableError(_('CSRF token entry missing from request')) if not _compare_salted_tokens(msg['request']['csrf_token'], self.csrf_token): raise JsonableError(_('CSRF token does not match that in cookie')) if 'queue_id' not in msg['request']: raise JsonableError(_("Missing 'queue_id' argument")) queue_id = msg['request']['queue_id'] client = get_client_descriptor(queue_id) if client is None: raise BadEventQueueIdError(queue_id) if user_profile.id != client.user_profile_id: raise JsonableError(_("You are not the owner of the queue with id '%s'") % (queue_id,)) self.authenticated = True register_connection(queue_id, self) response = {'req_id': msg['req_id'], 'type': 'response', 'response': {'result': 'success', 'msg': ''}} status_inquiries = msg['request'].get('status_inquiries') if status_inquiries is not None: results = {} # type: Dict[str, Dict[str, str]] for inquiry in status_inquiries: status = redis_client.hgetall(req_redis_key(inquiry)) # type: Dict[bytes, bytes] if len(status) == 0: result = {'status': 'not_received'} elif b'response' not in status: result = {'status': status[b'status'].decode('utf-8')} else: result = {'status': status[b'status'].decode('utf-8'), 'response': ujson.loads(status[b'response'])} results[str(inquiry)] = result response['response']['status_inquiries'] = results self.session.send_message(response) ioloop = tornado.ioloop.IOLoop.instance() ioloop.remove_timeout(self.timeout_handle) def on_message(self, msg_raw: str) -> None: log_data = dict(extra='[transport=%s' % (self.session.transport_name,)) record_request_start_data(log_data) msg = ujson.loads(msg_raw) if self.did_close: user_email = 'unknown' if self.session.user_profile is not None: user_email = self.session.user_profile.email logger.info("Received message on already closed socket! transport=%s user=%s client_id=%s" % (self.session.transport_name, user_email, self.client_id)) self.session.send_message({'req_id': msg['req_id'], 'type': 'ack'}) if msg['type'] == 'auth': log_data['extra'] += ']' try: self.authenticate_client(msg) # TODO: Fill in the correct client write_log_line(log_data, path='/socket/auth', method='SOCKET', remote_ip=self.session.conn_info.ip, email=self.session.user_profile.email, client_name='?') except JsonableError as e: response = e.to_json() self.session.send_message({'req_id': msg['req_id'], 'type': 'response', 'response': response}) write_log_line(log_data, path='/socket/auth', method='SOCKET', remote_ip=self.session.conn_info.ip, email='unknown', client_name='?', status_code=403, error_content=ujson.dumps(response)) return else: if not self.authenticated: response = {'result': 'error', 'msg': "Not yet authenticated"} self.session.send_message({'req_id': msg['req_id'], 'type': 'response', 'response': response}) write_log_line(log_data, path='/socket/service_request', method='SOCKET', remote_ip=self.session.conn_info.ip, email='unknown', client_name='?', status_code=403, error_content=ujson.dumps(response)) return redis_key = req_redis_key(msg['req_id']) with redis_client.pipeline() as pipeline: pipeline.hmset(redis_key, {'status': 'received'}) pipeline.expire(redis_key, 60 * 60 * 24) pipeline.execute() record_request_stop_data(log_data) request_environ = dict(REMOTE_ADDR=self.session.conn_info.ip) queue_json_publish("message_sender", dict(request=msg['request'], req_id=msg['req_id'], server_meta=dict(user_id=self.session.user_profile.id, client_id=self.client_id, return_queue="tornado_return", log_data=log_data, request_environ=request_environ))) def on_close(self) -> None: log_data = dict(extra='[transport=%s]' % (self.session.transport_name,)) record_request_start_data(log_data) if self.close_info is not None: write_log_line(log_data, path='/socket/close', method='SOCKET', remote_ip=self.session.conn_info.ip, email='unknown', client_name='?', status_code=self.close_info.status_code, error_content=self.close_info.err_msg) else: deregister_connection(self) email = self.session.user_profile.email \ if self.session.user_profile is not None else 'unknown' write_log_line(log_data, path='/socket/close', method='SOCKET', remote_ip=self.session.conn_info.ip, email=email, client_name='?') self.did_close = True def respond_send_message(data: Mapping[str, Any]) -> None: log_data = data['server_meta']['log_data'] record_request_restart_data(log_data) worker_log_data = data['server_meta']['worker_log_data'] forward_queue_delay = worker_log_data['time_started'] - log_data['time_stopped'] return_queue_delay = log_data['time_restarted'] - data['server_meta']['time_request_finished'] service_time = data['server_meta']['time_request_finished'] - worker_log_data['time_started'] log_data['extra'] += ', queue_delay: %s/%s, service_time: %s]' % ( format_timedelta(forward_queue_delay), format_timedelta(return_queue_delay), format_timedelta(service_time)) client_id = data['server_meta']['client_id'] connection = get_connection(client_id) if connection is None: logger.info("Could not find connection to send response to! client_id=%s" % (client_id,)) else: connection.session.send_message({'req_id': data['req_id'], 'type': 'response', 'response': data['response']}) # TODO: Fill in client name # TODO: Maybe fill in the status code correctly write_log_line(log_data, path='/socket/service_request', method='SOCKET', remote_ip=connection.session.conn_info.ip, email=connection.session.user_profile.email, client_name='?') # We disable the eventsource and htmlfile transports because they cannot # securely send us the zulip.com cookie, which we use as part of our # authentication scheme. sockjs_url = '%s/static/third/sockjs/sockjs-0.3.4.js' % (settings.ROOT_DOMAIN_URI,) sockjs_router = sockjs.tornado.SockJSRouter(SocketConnection, "/sockjs", {'sockjs_url': sockjs_url, 'disabled_transports': ['eventsource', 'htmlfile']}) def get_sockjs_router() -> sockjs.tornado.SockJSRouter: return sockjs_router
from websockets.datastructures import Headers from websockets.exceptions import SecurityError from websockets.http11 import * from websockets.http11 import parse_headers from websockets.streams import StreamReader from .utils import GeneratorTestCase class RequestTests(GeneratorTestCase): def setUp(self): super().setUp() self.reader = StreamReader() def parse(self): return Request.parse(self.reader.read_line) def test_parse(self): # Example from the protocol overview in RFC 6455 self.reader.feed_data( b"GET /chat HTTP/1.1\r\n" b"Host: server.example.com\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" b"Origin: http://example.com\r\n" b"Sec-WebSocket-Protocol: chat, superchat\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n" ) request = self.assertGeneratorReturns(self.parse()) self.assertEqual(request.path, "/chat") self.assertEqual(request.headers["Upgrade"], "websocket") def test_parse_empty(self): self.reader.feed_eof() with self.assertRaises(EOFError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "connection closed while reading HTTP request line", ) def test_parse_invalid_request_line(self): self.reader.feed_data(b"GET /\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "invalid HTTP request line: GET /", ) def test_parse_unsupported_method(self): self.reader.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "unsupported HTTP method: OPTIONS", ) def test_parse_unsupported_version(self): self.reader.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "unsupported HTTP version: HTTP/1.0", ) def test_parse_invalid_header(self): self.reader.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "invalid HTTP header line: Oops", ) def test_parse_body(self): self.reader.feed_data(b"GET / HTTP/1.1\r\nContent-Length: 3\r\n\r\nYo\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "unsupported request body", ) def test_parse_body_with_transfer_encoding(self): self.reader.feed_data(b"GET / HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n") with self.assertRaises(NotImplementedError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "transfer codings aren't supported", ) def test_serialize(self): # Example from the protocol overview in RFC 6455 request = Request( "/chat", Headers( [ ("Host", "server.example.com"), ("Upgrade", "websocket"), ("Connection", "Upgrade"), ("Sec-WebSocket-Key", "dGhlIHNhbXBsZSBub25jZQ=="), ("Origin", "http://example.com"), ("Sec-WebSocket-Protocol", "chat, superchat"), ("Sec-WebSocket-Version", "13"), ] ), ) self.assertEqual( request.serialize(), b"GET /chat HTTP/1.1\r\n" b"Host: server.example.com\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" b"Origin: http://example.com\r\n" b"Sec-WebSocket-Protocol: chat, superchat\r\n" b"Sec-WebSocket-Version: 13\r\n" b"\r\n", ) class ResponseTests(GeneratorTestCase): def setUp(self): super().setUp() self.reader = StreamReader() def parse(self): return Response.parse( self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof, ) def test_parse(self): # Example from the protocol overview in RFC 6455 self.reader.feed_data( b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n" b"Sec-WebSocket-Protocol: chat\r\n" b"\r\n" ) response = self.assertGeneratorReturns(self.parse()) self.assertEqual(response.status_code, 101) self.assertEqual(response.reason_phrase, "Switching Protocols") self.assertEqual(response.headers["Upgrade"], "websocket") self.assertIsNone(response.body) def test_parse_empty(self): self.reader.feed_eof() with self.assertRaises(EOFError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "connection closed while reading HTTP status line", ) def test_parse_invalid_status_line(self): self.reader.feed_data(b"Hello!\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "invalid HTTP status line: Hello!", ) def test_parse_unsupported_version(self): self.reader.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "unsupported HTTP version: HTTP/1.0", ) def test_parse_invalid_status(self): self.reader.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "invalid HTTP status code: OMG", ) def test_parse_unsupported_status(self): self.reader.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "unsupported HTTP status code: 007", ) def test_parse_invalid_reason(self): self.reader.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "invalid HTTP reason phrase: \x7f", ) def test_parse_invalid_header(self): self.reader.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "invalid HTTP header line: Oops", ) def test_parse_body_with_content_length(self): self.reader.feed_data( b"HTTP/1.1 200 OK\r\nContent-Length: 13\r\n\r\nHello world!\n" ) response = self.assertGeneratorReturns(self.parse()) self.assertEqual(response.body, b"Hello world!\n") def test_parse_body_without_content_length(self): self.reader.feed_data(b"HTTP/1.1 200 OK\r\n\r\nHello world!\n") gen = self.parse() self.assertGeneratorRunning(gen) self.reader.feed_eof() response = self.assertGeneratorReturns(gen) self.assertEqual(response.body, b"Hello world!\n") def test_parse_body_with_content_length_too_long(self): self.reader.feed_data(b"HTTP/1.1 200 OK\r\nContent-Length: 1048577\r\n\r\n") with self.assertRaises(SecurityError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "body too large: 1048577 bytes", ) def test_parse_body_without_content_length_too_long(self): self.reader.feed_data(b"HTTP/1.1 200 OK\r\n\r\n" + b"a" * 1048577) with self.assertRaises(SecurityError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "body too large: over 1048576 bytes", ) def test_parse_body_with_transfer_encoding(self): self.reader.feed_data(b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n") with self.assertRaises(NotImplementedError) as raised: next(self.parse()) self.assertEqual( str(raised.exception), "transfer codings aren't supported", ) def test_parse_body_no_content(self): self.reader.feed_data(b"HTTP/1.1 204 No Content\r\n\r\n") response = self.assertGeneratorReturns(self.parse()) self.assertIsNone(response.body) def test_parse_body_not_modified(self): self.reader.feed_data(b"HTTP/1.1 304 Not Modified\r\n\r\n") response = self.assertGeneratorReturns(self.parse()) self.assertIsNone(response.body) def test_serialize(self): # Example from the protocol overview in RFC 6455 response = Response( 101, "Switching Protocols", Headers( [ ("Upgrade", "websocket"), ("Connection", "Upgrade"), ("Sec-WebSocket-Accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="), ("Sec-WebSocket-Protocol", "chat"), ] ), ) self.assertEqual( response.serialize(), b"HTTP/1.1 101 Switching Protocols\r\n" b"Upgrade: websocket\r\n" b"Connection: Upgrade\r\n" b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n" b"Sec-WebSocket-Protocol: chat\r\n" b"\r\n", ) def test_serialize_with_body(self): response = Response( 200, "OK", Headers([("Content-Length", "13"), ("Content-Type", "text/plain")]), b"Hello world!\n", ) self.assertEqual( response.serialize(), b"HTTP/1.1 200 OK\r\n" b"Content-Length: 13\r\n" b"Content-Type: text/plain\r\n" b"\r\n" b"Hello world!\n", ) class HeadersTests(GeneratorTestCase): def setUp(self): super().setUp() self.reader = StreamReader() def parse_headers(self): return parse_headers(self.reader.read_line) def test_parse_invalid_name(self): self.reader.feed_data(b"foo bar: baz qux\r\n\r\n") with self.assertRaises(ValueError): next(self.parse_headers()) def test_parse_invalid_value(self): self.reader.feed_data(b"foo: \x00\x00\x0f\r\n\r\n") with self.assertRaises(ValueError): next(self.parse_headers()) def test_parse_too_long_value(self): self.reader.feed_data(b"foo: bar\r\n" * 257 + b"\r\n") with self.assertRaises(SecurityError): next(self.parse_headers()) def test_parse_too_long_line(self): # Header line contains 5 + 4105 + 2 = 4112 bytes. self.reader.feed_data(b"foo: " + b"a" * 4105 + b"\r\n\r\n") with self.assertRaises(SecurityError): next(self.parse_headers()) def test_parse_invalid_line_ending(self): self.reader.feed_data(b"foo: bar\n\n") with self.assertRaises(EOFError): next(self.parse_headers())
import json import pytest from zarr.attrs import Attributes from zarr.tests.util import CountingDict from zarr.storage import KVStore class TestAttributes(): def init_attributes(self, store, read_only=False, cache=True): return Attributes(store, key='attrs', read_only=read_only, cache=cache) @pytest.mark.parametrize('store_from_dict', [False, True]) def test_storage(self, store_from_dict): if store_from_dict: store = dict() else: store = KVStore(dict()) a = Attributes(store=store, key='attrs') assert isinstance(a.store, KVStore) assert 'foo' not in a assert 'bar' not in a assert dict() == a.asdict() a['foo'] = 'bar' a['baz'] = 42 assert 'attrs' in store assert isinstance(store['attrs'], bytes) d = json.loads(str(store['attrs'], 'ascii')) assert dict(foo='bar', baz=42) == d def test_get_set_del_contains(self): a = self.init_attributes(dict()) assert 'foo' not in a a['foo'] = 'bar' a['baz'] = 42 assert 'foo' in a assert 'baz' in a assert 'bar' == a['foo'] assert 42 == a['baz'] del a['foo'] assert 'foo' not in a with pytest.raises(KeyError): # noinspection PyStatementEffect a['foo'] def test_update_put(self): a = self.init_attributes(dict()) assert 'foo' not in a assert 'bar' not in a assert 'baz' not in a a.update(foo='spam', bar=42, baz=4.2) assert a['foo'] == 'spam' assert a['bar'] == 42 assert a['baz'] == 4.2 a.put(dict(foo='eggs', bar=84)) assert a['foo'] == 'eggs' assert a['bar'] == 84 assert 'baz' not in a def test_iterators(self): a = self.init_attributes(dict()) assert 0 == len(a) assert set() == set(a) assert set() == set(a.keys()) assert set() == set(a.values()) assert set() == set(a.items()) a['foo'] = 'bar' a['baz'] = 42 assert 2 == len(a) assert {'foo', 'baz'} == set(a) assert {'foo', 'baz'} == set(a.keys()) assert {'bar', 42} == set(a.values()) assert {('foo', 'bar'), ('baz', 42)} == set(a.items()) def test_read_only(self): store = dict() a = self.init_attributes(store, read_only=True) store['attrs'] = json.dumps(dict(foo='bar', baz=42)).encode('ascii') assert a['foo'] == 'bar' assert a['baz'] == 42 with pytest.raises(PermissionError): a['foo'] = 'quux' with pytest.raises(PermissionError): del a['foo'] with pytest.raises(PermissionError): a.update(foo='quux') def test_key_completions(self): a = self.init_attributes(dict()) d = a._ipython_key_completions_() assert 'foo' not in d assert '123' not in d assert 'baz' not in d assert 'asdf;' not in d a['foo'] = 42 a['123'] = 4.2 a['asdf;'] = 'ghjkl;' d = a._ipython_key_completions_() assert 'foo' in d assert '123' in d assert 'asdf;' in d assert 'baz' not in d def test_caching_on(self): # caching is turned on by default # setup store store = CountingDict() assert 0 == store.counter['__getitem__', 'attrs'] assert 0 == store.counter['__setitem__', 'attrs'] store['attrs'] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') assert 0 == store.counter['__getitem__', 'attrs'] assert 1 == store.counter['__setitem__', 'attrs'] # setup attributes a = self.init_attributes(store) # test __getitem__ causes all attributes to be cached assert a['foo'] == 'xxx' assert 1 == store.counter['__getitem__', 'attrs'] assert a['bar'] == 42 assert 1 == store.counter['__getitem__', 'attrs'] assert a['foo'] == 'xxx' assert 1 == store.counter['__getitem__', 'attrs'] # test __setitem__ updates the cache a['foo'] = 'yyy' assert 2 == store.counter['__getitem__', 'attrs'] assert 2 == store.counter['__setitem__', 'attrs'] assert a['foo'] == 'yyy' assert 2 == store.counter['__getitem__', 'attrs'] assert 2 == store.counter['__setitem__', 'attrs'] # test update() updates the cache a.update(foo='zzz', bar=84) assert 3 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs'] assert a['foo'] == 'zzz' assert a['bar'] == 84 assert 3 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs'] # test __contains__ uses the cache assert 'foo' in a assert 3 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs'] assert 'spam' not in a assert 3 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs'] # test __delitem__ updates the cache del a['bar'] assert 4 == store.counter['__getitem__', 'attrs'] assert 4 == store.counter['__setitem__', 'attrs'] assert 'bar' not in a assert 4 == store.counter['__getitem__', 'attrs'] assert 4 == store.counter['__setitem__', 'attrs'] # test refresh() store['attrs'] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') assert 4 == store.counter['__getitem__', 'attrs'] a.refresh() assert 5 == store.counter['__getitem__', 'attrs'] assert a['foo'] == 'xxx' assert 5 == store.counter['__getitem__', 'attrs'] assert a['bar'] == 42 assert 5 == store.counter['__getitem__', 'attrs'] def test_caching_off(self): # setup store store = CountingDict() assert 0 == store.counter['__getitem__', 'attrs'] assert 0 == store.counter['__setitem__', 'attrs'] store['attrs'] = json.dumps(dict(foo='xxx', bar=42)).encode('ascii') assert 0 == store.counter['__getitem__', 'attrs'] assert 1 == store.counter['__setitem__', 'attrs'] # setup attributes a = self.init_attributes(store, cache=False) # test __getitem__ assert a['foo'] == 'xxx' assert 1 == store.counter['__getitem__', 'attrs'] assert a['bar'] == 42 assert 2 == store.counter['__getitem__', 'attrs'] assert a['foo'] == 'xxx' assert 3 == store.counter['__getitem__', 'attrs'] # test __setitem__ a['foo'] = 'yyy' assert 4 == store.counter['__getitem__', 'attrs'] assert 2 == store.counter['__setitem__', 'attrs'] assert a['foo'] == 'yyy' assert 5 == store.counter['__getitem__', 'attrs'] assert 2 == store.counter['__setitem__', 'attrs'] # test update() a.update(foo='zzz', bar=84) assert 6 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs'] assert a['foo'] == 'zzz' assert a['bar'] == 84 assert 8 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs'] # test __contains__ assert 'foo' in a assert 9 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs'] assert 'spam' not in a assert 10 == store.counter['__getitem__', 'attrs'] assert 3 == store.counter['__setitem__', 'attrs']
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_router_access_list6 short_description: Configure IPv6 access lists in Fortinet's FortiOS and FortiGate. description: - This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the user to set and modify router feature and access_list6 category. Examples include all parameters and values need to be adjusted to datasources before usage. Tested with FOS v6.0.5 version_added: "2.9" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate IP address. type: str required: false username: description: - FortiOS or FortiGate username. type: str required: false password: description: - FortiOS or FortiGate password. type: str default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. type: str default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol. type: bool default: true ssl_verify: description: - Ensures FortiGate certificate must be verified by a proper CA. type: bool default: true state: description: - Indicates whether to create or remove the object. type: str required: true choices: - present - absent router_access_list6: description: - Configure IPv6 access lists. default: null type: dict suboptions: comments: description: - Comment. type: str name: description: - Name. required: true type: str rule: description: - Rule. type: list suboptions: action: description: - Permit or deny this IP address and netmask prefix. type: str choices: - permit - deny exact_match: description: - Enable/disable exact prefix match. type: str choices: - enable - disable flags: description: - Flags. type: int id: description: - Rule ID. required: true type: int prefix6: description: - IPv6 prefix to define regular filter criteria, such as "any" or subnets. type: str ''' EXAMPLES = ''' - hosts: localhost vars: host: "192.168.122.40" username: "admin" password: "" vdom: "root" ssl_verify: "False" tasks: - name: Configure IPv6 access lists. fortios_router_access_list6: host: "{{ host }}" username: "{{ username }}" password: "{{ password }}" vdom: "{{ vdom }}" https: "False" state: "present" router_access_list6: comments: "<your_own_value>" name: "default_name_4" rule: - action: "permit" exact_match: "enable" flags: "8" id: "9" prefix6: "<your_own_value>" ''' RETURN = ''' build: description: Build number of the fortigate image returned: always type: str sample: '1547' http_method: description: Last method used to provision the content into FortiGate returned: always type: str sample: 'PUT' http_status: description: Last result given by FortiGate on last operation applied returned: always type: str sample: "200" mkey: description: Master key (id) used in the last call to FortiGate returned: success type: str sample: "id" name: description: Name of the table used to fulfill the request returned: always type: str sample: "urlfilter" path: description: Path of the table used to fulfill the request returned: always type: str sample: "webfilter" revision: description: Internal revision number returned: always type: str sample: "17.0.2.10658" serial: description: Serial number of the unit returned: always type: str sample: "FGVMEVYYQT3AB5352" status: description: Indication of the operation's result returned: always type: str sample: "success" vdom: description: Virtual domain used returned: always type: str sample: "root" version: description: Version of the FortiGate returned: always type: str sample: "v5.6.3" ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.connection import Connection from ansible.module_utils.network.fortios.fortios import FortiOSHandler from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG def login(data, fos): host = data['host'] username = data['username'] password = data['password'] ssl_verify = data['ssl_verify'] fos.debug('on') if 'https' in data and not data['https']: fos.https('off') else: fos.https('on') fos.login(host, username, password, verify=ssl_verify) def filter_router_access_list6_data(json): option_list = ['comments', 'name', 'rule'] dictionary = {} for attribute in option_list: if attribute in json and json[attribute] is not None: dictionary[attribute] = json[attribute] return dictionary def underscore_to_hyphen(data): if isinstance(data, list): for elem in data: elem = underscore_to_hyphen(elem) elif isinstance(data, dict): new_data = {} for k, v in data.items(): new_data[k.replace('_', '-')] = underscore_to_hyphen(v) data = new_data return data def router_access_list6(data, fos): vdom = data['vdom'] state = data['state'] router_access_list6_data = data['router_access_list6'] filtered_data = underscore_to_hyphen(filter_router_access_list6_data(router_access_list6_data)) if state == "present": return fos.set('router', 'access-list6', data=filtered_data, vdom=vdom) elif state == "absent": return fos.delete('router', 'access-list6', mkey=filtered_data['name'], vdom=vdom) def is_successful_status(status): return status['status'] == "success" or \ status['http_method'] == "DELETE" and status['http_status'] == 404 def fortios_router(data, fos): if data['router_access_list6']: resp = router_access_list6(data, fos) return not is_successful_status(resp), \ resp['status'] == "success", \ resp def main(): fields = { "host": {"required": False, "type": "str"}, "username": {"required": False, "type": "str"}, "password": {"required": False, "type": "str", "default": "", "no_log": True}, "vdom": {"required": False, "type": "str", "default": "root"}, "https": {"required": False, "type": "bool", "default": True}, "ssl_verify": {"required": False, "type": "bool", "default": True}, "state": {"required": True, "type": "str", "choices": ["present", "absent"]}, "router_access_list6": { "required": False, "type": "dict", "default": None, "options": { "comments": {"required": False, "type": "str"}, "name": {"required": True, "type": "str"}, "rule": {"required": False, "type": "list", "options": { "action": {"required": False, "type": "str", "choices": ["permit", "deny"]}, "exact_match": {"required": False, "type": "str", "choices": ["enable", "disable"]}, "flags": {"required": False, "type": "int"}, "id": {"required": True, "type": "int"}, "prefix6": {"required": False, "type": "str"} }} } } } module = AnsibleModule(argument_spec=fields, supports_check_mode=False) # legacy_mode refers to using fortiosapi instead of HTTPAPI legacy_mode = 'host' in module.params and module.params['host'] is not None and \ 'username' in module.params and module.params['username'] is not None and \ 'password' in module.params and module.params['password'] is not None if not legacy_mode: if module._socket_path: connection = Connection(module._socket_path) fos = FortiOSHandler(connection) is_error, has_changed, result = fortios_router(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) else: try: from fortiosapi import FortiOSAPI except ImportError: module.fail_json(msg="fortiosapi module is required") fos = FortiOSAPI() login(module.params, fos) is_error, has_changed, result = fortios_router(module.params, fos) fos.logout() if not is_error: module.exit_json(changed=has_changed, meta=result) else: module.fail_json(msg="Error in repo", meta=result) if __name__ == '__main__': main()
"""Populate measurement info """ # Author: Eric Larson <larson.eric.d<gmail.com> # # License: BSD (3-clause) from time import strptime from calendar import timegm import numpy as np from ...utils import logger, warn from ...transforms import (apply_trans, _coord_frame_name, invert_transform, combine_transforms) from ..meas_info import _empty_info from ..write import get_new_file_id from ..ctf_comp import _add_kind, _calibrate_comp from ..constants import FIFF from .constants import CTF def _pick_isotrak_and_hpi_coils(res4, coils, t): """Pick the HPI coil locations given in device coordinates""" if coils is None: return list(), list() dig = list() hpi_result = dict(dig_points=list()) n_coil_dev = 0 n_coil_head = 0 for p in coils: if p['valid']: if p['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_DEVICE: if t is None or t['t_ctf_dev_dev'] is None: raise RuntimeError('No coordinate transformation ' 'available for HPI coil locations') d = dict(kind=FIFF.FIFFV_POINT_HPI, ident=p['kind'], r=apply_trans(t['t_ctf_dev_dev'], p['r']), coord_frame=FIFF.FIFFV_COORD_UNKNOWN) hpi_result['dig_points'].append(d) n_coil_dev += 1 elif p['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_HEAD: if t is None or t['t_ctf_head_head'] is None: raise RuntimeError('No coordinate transformation ' 'available for (virtual) Polhemus data') d = dict(kind=FIFF.FIFFV_POINT_HPI, ident=p['kind'], r=apply_trans(t['t_ctf_head_head'], p['r']), coord_frame=FIFF.FIFFV_COORD_HEAD) dig.append(d) n_coil_head += 1 if n_coil_head > 0: logger.info(' Polhemus data for %d HPI coils added' % n_coil_head) if n_coil_dev > 0: logger.info(' Device coordinate locations for %d HPI coils added' % n_coil_dev) return dig, [hpi_result] def _convert_time(date_str, time_str): """Convert date and time strings to float time""" for fmt in ("%d/%m/%Y", "%d-%b-%Y", "%a, %b %d, %Y"): try: date = strptime(date_str, fmt) except ValueError: pass else: break else: raise RuntimeError( 'Illegal date: %s.\nIf the language of the date does not ' 'correspond to your local machine\'s language try to set the ' 'locale to the language of the date string:\n' 'locale.setlocale(locale.LC_ALL, "en_US")' % date_str) for fmt in ('%H:%M:%S', '%H:%M'): try: time = strptime(time_str, fmt) except ValueError: pass else: break else: raise RuntimeError('Illegal time: %s' % time_str) # MNE-C uses mktime which uses local time, but here we instead decouple # conversion location from the process, and instead assume that the # acquisiton was in GMT. This will be wrong for most sites, but at least # the value we obtain here won't depend on the geographical location # that the file was converted. res = timegm((date.tm_year, date.tm_mon, date.tm_mday, time.tm_hour, time.tm_min, time.tm_sec, date.tm_wday, date.tm_yday, date.tm_isdst)) return res def _get_plane_vectors(ez): """Get two orthogonal vectors orthogonal to ez (ez will be modified)""" assert ez.shape == (3,) ez_len = np.sqrt(np.sum(ez * ez)) if ez_len == 0: raise RuntimeError('Zero length normal. Cannot proceed.') if np.abs(ez_len - np.abs(ez[2])) < 1e-5: # ez already in z-direction ex = np.array([1., 0., 0.]) else: ex = np.zeros(3) if ez[1] < ez[2]: ex[0 if ez[0] < ez[1] else 1] = 1. else: ex[0 if ez[0] < ez[2] else 2] = 1. ez /= ez_len ex -= np.dot(ez, ex) * ez ex /= np.sqrt(np.sum(ex * ex)) ey = np.cross(ez, ex) return ex, ey def _at_origin(x): """Determine if a vector is at the origin""" return (np.sum(x * x) < 1e-8) def _check_comp_ch(cch, kind, desired=None): if 'reference' in kind.lower(): if cch['grad_order_no'] != 0: raise RuntimeError('%s channel with non-zero compensation grade %s' % (kind, cch['grad_order_no'])) else: if desired is None: desired = cch['grad_order_no'] if cch['grad_order_no'] != desired: raise RuntimeError('%s channel with inconsistent compensation ' 'grade %s, should be %s' % (kind, cch['grad_order_no'], desired)) return desired def _convert_channel_info(res4, t, use_eeg_pos): """Convert CTF channel information to fif format""" nmeg = neeg = nstim = nmisc = nref = 0 chs = list() this_comp = None for k, cch in enumerate(res4['chs']): cal = float(1. / (cch['proper_gain'] * cch['qgain'])) ch = dict(scanno=k + 1, range=1., cal=cal, loc=np.zeros(12), unit_mul=FIFF.FIFF_UNITM_NONE, ch_name=cch['ch_name'][:15], coil_type=FIFF.FIFFV_COIL_NONE) del k chs.append(ch) # Create the channel position information pos = dict(r0=ch['loc'][:3], ex=ch['loc'][3:6], ey=ch['loc'][6:9], ez=ch['loc'][9:12]) if cch['sensor_type_index'] in (CTF.CTFV_REF_MAG_CH, CTF.CTFV_REF_GRAD_CH, CTF.CTFV_MEG_CH): # Extra check for a valid MEG channel if np.sum(cch['coil']['pos'][0] ** 2) < 1e-6 or \ np.sum(cch['coil']['norm'][0] ** 2) < 1e-6: nmisc += 1 ch.update(logno=nmisc, coord_frame=FIFF.FIFFV_COORD_UNKNOWN, kind=FIFF.FIFFV_MISC_CH, unit=FIFF.FIFF_UNIT_V) text = 'MEG' if cch['sensor_type_index'] != CTF.CTFV_MEG_CH: text += ' ref' warn('%s channel %s did not have position assigned, so ' 'it was changed to a MISC channel' % (text, ch['ch_name'])) continue ch['unit'] = FIFF.FIFF_UNIT_T # Set up the local coordinate frame pos['r0'][:] = cch['coil']['pos'][0] pos['ez'][:] = cch['coil']['norm'][0] # It turns out that positive proper_gain requires swapping # of the normal direction if cch['proper_gain'] > 0.0: pos['ez'] *= -1 # Check how the other vectors should be defined off_diag = False # Default: ex and ey are arbitrary in the plane normal to ez if cch['sensor_type_index'] == CTF.CTFV_REF_GRAD_CH: # The off-diagonal gradiometers are an exception: # # We use the same convention for ex as for Neuromag planar # gradiometers: ex pointing in the positive gradient direction diff = cch['coil']['pos'][0] - cch['coil']['pos'][1] size = np.sqrt(np.sum(diff * diff)) if size > 0.: diff /= size # Is ez normal to the line joining the coils? if np.abs(np.dot(diff, pos['ez'])) < 1e-3: off_diag = True # Handle the off-diagonal gradiometer coordinate system pos['r0'] -= size * diff / 2.0 pos['ex'][:] = diff pos['ey'][:] = np.cross(pos['ez'], pos['ex']) else: pos['ex'][:], pos['ey'][:] = _get_plane_vectors(pos['ez']) else: pos['ex'][:], pos['ey'][:] = _get_plane_vectors(pos['ez']) # Transform into a Neuromag-like device coordinate system pos['r0'][:] = apply_trans(t['t_ctf_dev_dev'], pos['r0']) for key in ('ex', 'ey', 'ez'): pos[key][:] = apply_trans(t['t_ctf_dev_dev'], pos[key], move=False) # Set the coil type if cch['sensor_type_index'] == CTF.CTFV_REF_MAG_CH: ch['kind'] = FIFF.FIFFV_REF_MEG_CH _check_comp_ch(cch, 'Reference magnetometer') ch['coil_type'] = FIFF.FIFFV_COIL_CTF_REF_MAG nref += 1 ch['logno'] = nref elif cch['sensor_type_index'] == CTF.CTFV_REF_GRAD_CH: ch['kind'] = FIFF.FIFFV_REF_MEG_CH if off_diag: _check_comp_ch(cch, 'Reference off-diagonal gradiometer') ch['coil_type'] = FIFF.FIFFV_COIL_CTF_OFFDIAG_REF_GRAD else: _check_comp_ch(cch, 'Reference gradiometer') ch['coil_type'] = FIFF.FIFFV_COIL_CTF_REF_GRAD nref += 1 ch['logno'] = nref else: this_comp = _check_comp_ch(cch, 'Gradiometer', this_comp) ch['kind'] = FIFF.FIFFV_MEG_CH ch['coil_type'] = FIFF.FIFFV_COIL_CTF_GRAD nmeg += 1 ch['logno'] = nmeg # Encode the software gradiometer order ch['coil_type'] = ch['coil_type'] | (cch['grad_order_no'] << 16) ch['coord_frame'] = FIFF.FIFFV_COORD_DEVICE elif cch['sensor_type_index'] == CTF.CTFV_EEG_CH: coord_frame = FIFF.FIFFV_COORD_HEAD if use_eeg_pos: # EEG electrode coordinates may be present but in the # CTF head frame pos['r0'][:] = cch['coil']['pos'][0] if not _at_origin(pos['r0']): if t['t_ctf_head_head'] is None: warn('EEG electrode (%s) location omitted because of ' 'missing HPI information' % ch['ch_name']) pos['r0'][:] = np.zeros(3) coord_frame = FIFF.FIFFV_COORD_CTF_HEAD else: pos['r0'][:] = apply_trans(t['t_ctf_head_head'], pos['r0']) neeg += 1 ch.update(logno=neeg, kind=FIFF.FIFFV_EEG_CH, unit=FIFF.FIFF_UNIT_V, coord_frame=coord_frame) elif cch['sensor_type_index'] == CTF.CTFV_STIM_CH: nstim += 1 ch.update(logno=nstim, coord_frame=FIFF.FIFFV_COORD_UNKNOWN, kind=FIFF.FIFFV_STIM_CH, unit=FIFF.FIFF_UNIT_V) else: nmisc += 1 ch.update(logno=nmisc, coord_frame=FIFF.FIFFV_COORD_UNKNOWN, kind=FIFF.FIFFV_MISC_CH, unit=FIFF.FIFF_UNIT_V) return chs def _comp_sort_keys(c): """This is for sorting the compensation data""" return (int(c['coeff_type']), int(c['scanno'])) def _check_comp(comp): """Check that conversion to named matrices is, indeed possible""" ref_sens = None kind = -1 for k, c_k in enumerate(comp): if c_k['coeff_type'] != kind: c_ref = c_k ref_sens = c_ref['sensors'] kind = c_k['coeff_type'] elif not c_k['sensors'] == ref_sens: raise RuntimeError('Cannot use an uneven compensation matrix') def _conv_comp(comp, first, last, chs): """Add a new converted compensation data item""" ccomp = dict(ctfkind=np.array([comp[first]['coeff_type']]), save_calibrated=False) _add_kind(ccomp) n_col = comp[first]['ncoeff'] n_row = last - first + 1 col_names = comp[first]['sensors'][:n_col] row_names = [comp[p]['sensor_name'] for p in range(first, last + 1)] data = np.empty((n_row, n_col)) for ii, coeffs in enumerate(comp[first:last + 1]): # Pick the elements to the matrix data[ii, :] = coeffs['coeffs'][:] ccomp['data'] = dict(row_names=row_names, col_names=col_names, data=data, nrow=len(row_names), ncol=len(col_names)) mk = ('proper_gain', 'qgain') _calibrate_comp(ccomp, chs, row_names, col_names, mult_keys=mk, flip=True) return ccomp def _convert_comp_data(res4): """Convert the compensation data into named matrices""" if res4['ncomp'] == 0: return # Sort the coefficients in our favorite order res4['comp'] = sorted(res4['comp'], key=_comp_sort_keys) # Check that all items for a given compensation type have the correct # number of channels _check_comp(res4['comp']) # Create named matrices first = 0 kind = -1 comps = list() for k in range(len(res4['comp'])): if res4['comp'][k]['coeff_type'] != kind: if k > 0: comps.append(_conv_comp(res4['comp'], first, k - 1, res4['chs'])) kind = res4['comp'][k]['coeff_type'] first = k comps.append(_conv_comp(res4['comp'], first, k, res4['chs'])) return comps def _pick_eeg_pos(c): """Pick EEG positions""" eeg = dict(coord_frame=FIFF.FIFFV_COORD_HEAD, assign_to_chs=False, labels=list(), ids=list(), rr=list(), kinds=list(), np=0) for ch in c['chs']: if ch['kind'] == FIFF.FIFFV_EEG_CH and not _at_origin(ch['loc'][:3]): eeg['labels'].append(ch['ch_name']) eeg['ids'].append(ch['logno']) eeg['rr'].append(ch['loc'][:3]) eeg['kinds'].append(FIFF.FIFFV_POINT_EEG) eeg['np'] += 1 if eeg['np'] == 0: return None logger.info('Picked positions of %d EEG channels from channel info' % eeg['np']) return eeg def _add_eeg_pos(eeg, t, c): """Pick the (virtual) EEG position data""" if eeg is None: return if t is None or t['t_ctf_head_head'] is None: raise RuntimeError('No coordinate transformation available for EEG ' 'position data') eeg_assigned = 0 if eeg['assign_to_chs']: for k in range(eeg['np']): # Look for a channel name match for ch in c['chs']: if ch['ch_name'].lower() == eeg['labels'][k].lower(): r0 = ch['loc'][:3] r0[:] = eeg['rr'][k] if eeg['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_HEAD: r0[:] = apply_trans(t['t_ctf_head_head'], r0) elif eeg['coord_frame'] != FIFF.FIFFV_COORD_HEAD: raise RuntimeError( 'Illegal coordinate frame for EEG electrode ' 'positions : %s' % _coord_frame_name(eeg['coord_frame'])) # Use the logical channel number as an identifier eeg['ids'][k] = ch['logno'] eeg['kinds'][k] = FIFF.FIFFV_POINT_EEG eeg_assigned += 1 break # Add these to the Polhemus data fid_count = eeg_count = extra_count = 0 for k in range(eeg['np']): d = dict(r=eeg['rr'][k].copy(), kind=eeg['kinds'][k], ident=eeg['ids'][k], coord_frame=FIFF.FIFFV_COORD_HEAD) c['dig'].append(d) if eeg['coord_frame'] == FIFF.FIFFV_MNE_COORD_CTF_HEAD: d['r'] = apply_trans(t['t_ctf_head_head'], d['r']) elif eeg['coord_frame'] != FIFF.FIFFV_COORD_HEAD: raise RuntimeError('Illegal coordinate frame for EEG electrode ' 'positions: %s' % _coord_frame_name(eeg['coord_frame'])) if eeg['kinds'][k] == FIFF.FIFFV_POINT_CARDINAL: fid_count += 1 elif eeg['kinds'][k] == FIFF.FIFFV_POINT_EEG: eeg_count += 1 else: extra_count += 1 if eeg_assigned > 0: logger.info(' %d EEG electrode locations assigned to channel info.' % eeg_assigned) for count, kind in zip((fid_count, eeg_count, extra_count), ('fiducials', 'EEG locations', 'extra points')): if count > 0: logger.info(' %d %s added to Polhemus data.' % (count, kind)) _filt_map = {CTF.CTFV_FILTER_LOWPASS: 'lowpass', CTF.CTFV_FILTER_HIGHPASS: 'highpass'} def _compose_meas_info(res4, coils, trans, eeg): """Create meas info from CTF data""" info = _empty_info(res4['sfreq']) # Collect all the necessary data from the structures read info['meas_id'] = get_new_file_id() info['meas_id']['usecs'] = 0 info['meas_id']['secs'] = _convert_time(res4['data_date'], res4['data_time']) info['experimenter'] = res4['nf_operator'] info['subject_info'] = dict(his_id=res4['nf_subject_id']) for filt in res4['filters']: if filt['type'] in _filt_map: info[_filt_map[filt['type']]] = filt['freq'] info['dig'], info['hpi_results'] = _pick_isotrak_and_hpi_coils( res4, coils, trans) if trans is not None: if len(info['hpi_results']) > 0: info['hpi_results'][0]['coord_trans'] = trans['t_ctf_head_head'] if trans['t_dev_head'] is not None: info['dev_head_t'] = trans['t_dev_head'] info['dev_ctf_t'] = combine_transforms( trans['t_dev_head'], invert_transform(trans['t_ctf_head_head']), FIFF.FIFFV_COORD_DEVICE, FIFF.FIFFV_MNE_COORD_CTF_HEAD) if trans['t_ctf_head_head'] is not None: info['ctf_head_t'] = trans['t_ctf_head_head'] info['chs'] = _convert_channel_info(res4, trans, eeg is None) info['comps'] = _convert_comp_data(res4) if eeg is None: # Pick EEG locations from chan info if not read from a separate file eeg = _pick_eeg_pos(info) _add_eeg_pos(eeg, trans, info) logger.info(' Measurement info composed.') info._update_redundant() return info
import logging import logging.config import traceback import urwid from gamegirl.memory import Rom # This code is meant to work, not meant to be pretty. # I'll maybe make it better someday. line_divider = urwid.Divider(div_char=u'\u2500') blank_divider = urwid.Divider() def block_text(text, align='left', style=None, right=0, left=0): if not isinstance(text, urwid.Text): text = urwid.Text(text, align=align) widget = urwid.Padding(text, width=('relative', 100), right=right, left=left) if style: widget = urwid.AttrMap(widget, style) return widget def sidebar_title(text): return urwid.Padding(urwid.Text(('sidebar_title', ' {0} '.format(text))), left=2) def sidebar_value(text): return urwid.Padding(urwid.Text(('sidebar_value', ' {0} '.format(text))), width=('relative', 100), right=2, left=4) class DebuggerLogHandler(logging.Handler): def __init__(self, *args, **kwargs): self.debugger = kwargs.pop('debugger') super(DebuggerLogHandler, self).__init__(*args, **kwargs) def emit(self, record): self.debugger.log(record.getMessage(), lineno=record.levelname, walker=self.debugger.log_walker) class DebuggerInterface(object): def __init__(self, cpu): self.cpu = cpu self.mode = None cpu.debug = True rom = cpu.memory.rom self.stopped = False self.palette = [ ('titlebar', 'black', 'light gray'), ('helpbar', 'black', 'dark cyan'), ('background', 'yellow', 'dark blue'), ('gutter', 'black', 'dark cyan'), ('register_name', 'white', 'dark cyan'), ('register_value', 'yellow', 'dark blue'), ('sidebar', 'black', 'light gray'), ('sidebar_title', 'white', 'dark cyan'), ('sidebar_value', 'yellow', 'dark blue'), ] self.titlebar = urwid.AttrMap(urwid.Text(('titlebar', 'GameGirl'), align='center'), 'titlebar') self.help_text = urwid.Text(( 'helpbar', '' ), align='left') self.helpbar = urwid.AttrMap(self.help_text, 'helpbar') # Instruction log self.instruction_walker = urwid.SimpleFocusListWalker([]) self.instruction_list = urwid.ListBox(self.instruction_walker) # Debug log self.log_walker = urwid.SimpleFocusListWalker([]) self.log_list = urwid.ListBox(self.log_walker) # Memory view self.memory_walker = urwid.SimpleFocusListWalker([]) self.memory_list = urwid.ListBox(self.memory_walker) # Sidebar register_grid = [] for register in ('A', 'B', 'C', 'D', 'E', 'F', 'H', 'L', 'SP', 'PC'): text_widget = urwid.Text('_', align='center') setattr(self, 'register_' + register, text_widget) register_grid.append(block_text(register, align='center', style='register_name')) register_grid.append(block_text(text_widget, style='register_value')) flag_grid = [] for flag in ('Z', 'N', 'H', 'C'): text_widget = urwid.Text('_', align='center') setattr(self, 'flag_' + flag, text_widget) flag_grid.append(block_text(flag, align='center', style='register_name')) flag_grid.append(block_text(text_widget, style='register_value')) if rom.gbc_compatible == Rom.GBC_INCOMPATIBLE: gbc_status = 'Incompatible' elif rom.gbc_compatible == Rom.GBC_COMPATIBLE: gbc_status = 'Compatible' elif rom.gbc_compatible == Rom.GBC_EXCLUSIVE: gbc_status = 'Exclusive' else: gbc_status = 'Unknown' self.sidebar = urwid.ListBox(urwid.SimpleListWalker([ urwid.Text('Registers', align='center'), line_divider, urwid.GridFlow(register_grid, cell_width=7, h_sep=1, v_sep=1, align='center'), blank_divider, urwid.Text('Flags', align='center'), line_divider, urwid.GridFlow(flag_grid, cell_width=7, h_sep=1, v_sep=1, align='center'), blank_divider, urwid.Text('ROM', align='center'), line_divider, sidebar_title('Game'), sidebar_value(rom.title), blank_divider, sidebar_title('Game Code'), sidebar_value(rom.game_code), blank_divider, sidebar_title('Start address'), sidebar_value('${0:04x}'.format(rom.start_address)), blank_divider, sidebar_title('Gameboy Color'), sidebar_value(gbc_status), blank_divider, sidebar_title('Maker code'), sidebar_value(rom.maker_code), blank_divider, sidebar_title('Super Gameboy'), sidebar_value('Yes' if rom.super_gameboy else 'No'), blank_divider, sidebar_title('ROM Size'), sidebar_value(rom.rom_size[1]), blank_divider, sidebar_title('Destination'), sidebar_value('Other' if rom.destination == Rom.DESTINATION_OTHER else 'Japan'), blank_divider, sidebar_title('Mask ROM Version'), sidebar_value(rom.mask_rom_version), blank_divider, sidebar_title('Complement check'), sidebar_value('Passed' if rom.passed_complement_check else 'Failed'), blank_divider, sidebar_title('Checksum'), sidebar_value('${0:04x}'.format(rom.checksum)), blank_divider, ])) # Main layout and loop self.top_columns = urwid.Columns([ ('weight', 2, self.instruction_list), (35, urwid.AttrMap(self.sidebar, 'sidebar')), ]) self.top_frame = urwid.Frame(self.top_columns, header=self.titlebar, footer=self.helpbar) self.top = urwid.AttrMap(self.top_frame, 'background') self.loop = urwid.MainLoop(self.top, self.palette, unhandled_input=self.unhandled_input) self.enter_instruction_mode() self.update_sidebar() logging.config.dictConfig({ 'version': 1, 'handlers': { 'debugger': { 'class': 'gamegirl.debugger.DebuggerLogHandler', 'level': 'DEBUG', 'debugger': self } }, 'root': { 'level': 'DEBUG', 'handlers': ['debugger'] } }) def start(self): self.loop.run() def enter_instruction_mode(self): self.set_main(self.instruction_list) self.set_help( '(N)ext instruction', '(C)ontinue', '(W)atch', '(M)emory mode', '(L)og mode', '(Q)uit' ) self.mode = 'instruction' def enter_memory_mode(self): self.update_memory_view() self.set_main(self.memory_list) self.set_help('(I)nstruction mode', '(L)og mode', '(Q)uit') self.mode = 'memory' def enter_log_mode(self): self.set_main(self.log_list) self.set_help('(I)nstruction mode', '(M)emory mode', '(Q)uit') self.log_focus_bottom(walker=self.log_walker) self.mode = 'log' def set_main(self, widget): self.top_columns.contents[0] = (widget, self.top_columns.options('weight', 2)) def set_help(self, *items): self.help_text.set_text(' ' + ', '.join(items)) def update_sidebar(self): for register in ('A', 'B', 'C', 'D', 'E', 'F', 'H', 'L', 'SP', 'PC'): if len(register) == 1: format_string = '${0:02x}' else: format_string = '${0:04x}' widget = getattr(self, 'register_' + register) widget.set_text(format_string.format(getattr(self.cpu, register))) for flag in ('Z', 'N', 'H', 'C'): widget = getattr(self, 'flag_' + flag) widget.set_text(unicode(getattr(self.cpu, 'flag_' + flag))) def log(self, text, lineno='', bytes=None, walker=None, limit=None): gutter_length = max(9, len(lineno)) gutter = block_text(lineno, style='gutter', right=1, align='right') columns = [(gutter_length, gutter), urwid.Text(text)] if bytes: byte_gutter_length = max(6, len(bytes) * 2) + 3 byte_string = '$' + ''.join(['{0:02x}'.format(b) for b in bytes]) byte_gutter = block_text(byte_string, style='gutter', left=1, align='left') columns.append((byte_gutter_length, byte_gutter)) walker = walker if walker is not None else self.instruction_walker walker.append(urwid.Columns(columns, dividechars=1)) if limit and len(walker) > limit: walker.pop(0) def log_divider(self): self.instruction_walker.append(urwid.Divider(div_char='-')) def log_focus_bottom(self, walker=None): walker = walker if walker is not None else self.instruction_walker walker.set_focus(len(walker) - 1) def execute(self): if self.stopped: self.log('Execution has stopped, cannot continue.') else: try: lineno = '${0:04x}'.format(self.cpu.PC) result, debug_bytes = self.cpu.read_and_execute() self.log(result, lineno=lineno, bytes=debug_bytes, limit=1000) self.log_focus_bottom() except Exception: self.log(traceback.format_exc()) self.stopped = True def unhandled_input(self, key): if key in ('q', 'Q'): raise urwid.ExitMainLoop() if self.mode == 'instruction': if key in ('n', 'N'): self.execute() self.update_sidebar() if key in ('c', 'C', 'w', 'W'): screen = self.loop.screen user_stop = False watch = key in ('w', 'W') self.set_help('Running instructions, hit S to (S)top') self.loop.draw_screen() self.execute() while not self.stopped and not user_stop: self.execute() if watch: self.update_sidebar() self.loop.draw_screen() # Since we're not running the main loop during this # command we need to manually handle input. keys, raw = screen.parse_input(None, None, screen.get_available_raw_input()) for key in keys: if key in ('s', 'S'): user_stop = True self.update_sidebar() self.enter_instruction_mode() if key in ('m', 'M'): self.enter_memory_mode() if key in ('i', 'I'): self.enter_instruction_mode() if key in ('l', 'L'): self.enter_log_mode() if key in ('d', 'D'): import pudb pudb.set_trace() def update_memory_view(self): del self.memory_walker[:] memory_string = '' for addr in range(0x10000): try: memory_string += '{0:02x} '.format(self.cpu.memory.read_byte(addr)) except ValueError: memory_string += '-- ' if addr % 16 == 15: self.log(memory_string, walker=self.memory_walker, lineno='${0:04x}'.format(addr - 15)) memory_string = ''
# # Copyright (c) 2008-2015 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response from nssrc.com.citrix.netscaler.nitro.service.options import options from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util class nshttpprofile(base_resource) : """ Configuration for HTTP profile resource. """ def __init__(self) : self._name = "" self._dropinvalreqs = "" self._markhttp09inval = "" self._markconnreqinval = "" self._cmponpush = "" self._conmultiplex = "" self._maxreusepool = 0 self._dropextracrlf = "" self._incomphdrdelay = 0 self._websocket = "" self._rtsptunnel = "" self._reqtimeout = 0 self._adpttimeout = "" self._reqtimeoutaction = "" self._dropextradata = "" self._weblog = "" self._clientiphdrexpr = "" self._maxreq = 0 self._persistentetag = "" self._spdy = "" self._reusepooltimeout = 0 self._maxheaderlen = 0 self._refcnt = 0 self._builtin = [] self.___count = 0 @property def name(self) : """Name for an HTTP profile. Must begin with a letter, number, or the underscore \(_\) character. Other characters allowed, after the first character, are the hyphen \(-\), period \(.\), hash \(\#\), space \( \), at \(@\), and equal \(=\) characters. The name of a HTTP profile cannot be changed after it is created. CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks \(for example, "my http profile" or 'my http profile'\).<br/>Minimum length = 1<br/>Maximum length = 127. """ try : return self._name except Exception as e: raise e @name.setter def name(self, name) : """Name for an HTTP profile. Must begin with a letter, number, or the underscore \(_\) character. Other characters allowed, after the first character, are the hyphen \(-\), period \(.\), hash \(\#\), space \( \), at \(@\), and equal \(=\) characters. The name of a HTTP profile cannot be changed after it is created. CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks \(for example, "my http profile" or 'my http profile'\).<br/>Minimum length = 1<br/>Maximum length = 127 """ try : self._name = name except Exception as e: raise e @property def dropinvalreqs(self) : """Drop invalid HTTP requests or responses.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._dropinvalreqs except Exception as e: raise e @dropinvalreqs.setter def dropinvalreqs(self, dropinvalreqs) : """Drop invalid HTTP requests or responses.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._dropinvalreqs = dropinvalreqs except Exception as e: raise e @property def markhttp09inval(self) : """Mark HTTP/0.9 requests as invalid.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._markhttp09inval except Exception as e: raise e @markhttp09inval.setter def markhttp09inval(self, markhttp09inval) : """Mark HTTP/0.9 requests as invalid.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._markhttp09inval = markhttp09inval except Exception as e: raise e @property def markconnreqinval(self) : """Mark CONNECT requests as invalid.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._markconnreqinval except Exception as e: raise e @markconnreqinval.setter def markconnreqinval(self, markconnreqinval) : """Mark CONNECT requests as invalid.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._markconnreqinval = markconnreqinval except Exception as e: raise e @property def cmponpush(self) : """Start data compression on receiving a TCP packet with PUSH flag set.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._cmponpush except Exception as e: raise e @cmponpush.setter def cmponpush(self, cmponpush) : """Start data compression on receiving a TCP packet with PUSH flag set.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._cmponpush = cmponpush except Exception as e: raise e @property def conmultiplex(self) : """Reuse server connections for requests from more than one client connections.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._conmultiplex except Exception as e: raise e @conmultiplex.setter def conmultiplex(self, conmultiplex) : """Reuse server connections for requests from more than one client connections.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED """ try : self._conmultiplex = conmultiplex except Exception as e: raise e @property def maxreusepool(self) : """Maximum limit on the number of connections, from the NetScaler to a particular server that are kept in the reuse pool. This setting is helpful for optimal memory utilization and for reducing the idle connections to the server just after the peak time.<br/>Maximum length = 360000. """ try : return self._maxreusepool except Exception as e: raise e @maxreusepool.setter def maxreusepool(self, maxreusepool) : """Maximum limit on the number of connections, from the NetScaler to a particular server that are kept in the reuse pool. This setting is helpful for optimal memory utilization and for reducing the idle connections to the server just after the peak time.<br/>Maximum length = 360000 """ try : self._maxreusepool = maxreusepool except Exception as e: raise e @property def dropextracrlf(self) : """Drop any extra 'CR' and 'LF' characters present after the header.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._dropextracrlf except Exception as e: raise e @dropextracrlf.setter def dropextracrlf(self, dropextracrlf) : """Drop any extra 'CR' and 'LF' characters present after the header.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED """ try : self._dropextracrlf = dropextracrlf except Exception as e: raise e @property def incomphdrdelay(self) : """Maximum time to wait, in milliseconds, between incomplete header packets. If the header packets take longer to arrive at NetScaler, the connection is silently dropped.<br/>Default value: 7000<br/>Maximum length = 360000. """ try : return self._incomphdrdelay except Exception as e: raise e @incomphdrdelay.setter def incomphdrdelay(self, incomphdrdelay) : """Maximum time to wait, in milliseconds, between incomplete header packets. If the header packets take longer to arrive at NetScaler, the connection is silently dropped.<br/>Default value: 7000<br/>Maximum length = 360000 """ try : self._incomphdrdelay = incomphdrdelay except Exception as e: raise e @property def websocket(self) : """HTTP connection to be upgraded to a web socket connection. Once upgraded, NetScaler does not process Layer 7 traffic on this connection.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._websocket except Exception as e: raise e @websocket.setter def websocket(self, websocket) : """HTTP connection to be upgraded to a web socket connection. Once upgraded, NetScaler does not process Layer 7 traffic on this connection.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._websocket = websocket except Exception as e: raise e @property def rtsptunnel(self) : """Allow RTSP tunnel in HTTP. Once application/x-rtsp-tunnelled is seen in Accept or Content-Type header, NetScaler does not process Layer 7 traffic on this connection.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._rtsptunnel except Exception as e: raise e @rtsptunnel.setter def rtsptunnel(self, rtsptunnel) : """Allow RTSP tunnel in HTTP. Once application/x-rtsp-tunnelled is seen in Accept or Content-Type header, NetScaler does not process Layer 7 traffic on this connection.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._rtsptunnel = rtsptunnel except Exception as e: raise e @property def reqtimeout(self) : """Time, in seconds, within which the HTTP request must complete. If the request does not complete within this time, the specified request timeout action is executed.<br/>Maximum length = 86400. """ try : return self._reqtimeout except Exception as e: raise e @reqtimeout.setter def reqtimeout(self, reqtimeout) : """Time, in seconds, within which the HTTP request must complete. If the request does not complete within this time, the specified request timeout action is executed.<br/>Maximum length = 86400 """ try : self._reqtimeout = reqtimeout except Exception as e: raise e @property def adpttimeout(self) : """Adapts the configured request timeout based on flow conditions. The timeout is increased or decreased internally and applied on the flow.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._adpttimeout except Exception as e: raise e @adpttimeout.setter def adpttimeout(self, adpttimeout) : """Adapts the configured request timeout based on flow conditions. The timeout is increased or decreased internally and applied on the flow.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._adpttimeout = adpttimeout except Exception as e: raise e @property def reqtimeoutaction(self) : """Action to take when the HTTP request does not complete within the specified request timeout duration. You can configure the following actions: * RESET - Send RST (reset) to client when timeout occurs. * DROP - Drop silently when timeout occurs. * Custom responder action - Name of the responder action to trigger when timeout occurs, used to send custom message. """ try : return self._reqtimeoutaction except Exception as e: raise e @reqtimeoutaction.setter def reqtimeoutaction(self, reqtimeoutaction) : """Action to take when the HTTP request does not complete within the specified request timeout duration. You can configure the following actions: * RESET - Send RST (reset) to client when timeout occurs. * DROP - Drop silently when timeout occurs. * Custom responder action - Name of the responder action to trigger when timeout occurs, used to send custom message. """ try : self._reqtimeoutaction = reqtimeoutaction except Exception as e: raise e @property def dropextradata(self) : """Drop any extra data when server sends more data than the specified content-length.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._dropextradata except Exception as e: raise e @dropextradata.setter def dropextradata(self, dropextradata) : """Drop any extra data when server sends more data than the specified content-length.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._dropextradata = dropextradata except Exception as e: raise e @property def weblog(self) : """Enable or disable web logging.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._weblog except Exception as e: raise e @weblog.setter def weblog(self, weblog) : """Enable or disable web logging.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED """ try : self._weblog = weblog except Exception as e: raise e @property def clientiphdrexpr(self) : """Name of the header that contains the real client IP address. """ try : return self._clientiphdrexpr except Exception as e: raise e @clientiphdrexpr.setter def clientiphdrexpr(self, clientiphdrexpr) : """Name of the header that contains the real client IP address. """ try : self._clientiphdrexpr = clientiphdrexpr except Exception as e: raise e @property def maxreq(self) : """Maximum requests allowed on a single connection.<br/>Default value: 0<br/>Maximum length = 65534. """ try : return self._maxreq except Exception as e: raise e @maxreq.setter def maxreq(self, maxreq) : """Maximum requests allowed on a single connection.<br/>Default value: 0<br/>Maximum length = 65534 """ try : self._maxreq = maxreq except Exception as e: raise e @property def persistentetag(self) : """Generate the persistent NetScaler specific ETag for the HTTP response with ETag header.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED. """ try : return self._persistentetag except Exception as e: raise e @persistentetag.setter def persistentetag(self, persistentetag) : """Generate the persistent NetScaler specific ETag for the HTTP response with ETag header.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED """ try : self._persistentetag = persistentetag except Exception as e: raise e @property def spdy(self) : """Enable SPDYv2 or SPDYv3 or both over SSL vserver. SSL will advertise SPDY support during NPN Handshake. Both SPDY versions are enabled when this parameter is set to BOTH.<br/>Default value: DISABLED<br/>Possible values = DISABLED, ENABLED, V2, V3. """ try : return self._spdy except Exception as e: raise e @spdy.setter def spdy(self, spdy) : """Enable SPDYv2 or SPDYv3 or both over SSL vserver. SSL will advertise SPDY support during NPN Handshake. Both SPDY versions are enabled when this parameter is set to BOTH.<br/>Default value: DISABLED<br/>Possible values = DISABLED, ENABLED, V2, V3 """ try : self._spdy = spdy except Exception as e: raise e @property def reusepooltimeout(self) : """Idle timeout (in seconds) for server connections in re-use pool. Connections in the re-use pool are flushed, if they remain idle for the configured timeout.<br/>Default value: 0<br/>Maximum length = 31536000. """ try : return self._reusepooltimeout except Exception as e: raise e @reusepooltimeout.setter def reusepooltimeout(self, reusepooltimeout) : """Idle timeout (in seconds) for server connections in re-use pool. Connections in the re-use pool are flushed, if they remain idle for the configured timeout.<br/>Default value: 0<br/>Maximum length = 31536000 """ try : self._reusepooltimeout = reusepooltimeout except Exception as e: raise e @property def maxheaderlen(self) : """Number of bytes to be queued to look for complete header before returning error. If complete header is not obtained after queuing these many bytes, request will be marked as invalid and no L7 processing will be done for that TCP connection.<br/>Default value: 24820<br/>Minimum length = 2048<br/>Maximum length = 61440. """ try : return self._maxheaderlen except Exception as e: raise e @maxheaderlen.setter def maxheaderlen(self, maxheaderlen) : """Number of bytes to be queued to look for complete header before returning error. If complete header is not obtained after queuing these many bytes, request will be marked as invalid and no L7 processing will be done for that TCP connection.<br/>Default value: 24820<br/>Minimum length = 2048<br/>Maximum length = 61440 """ try : self._maxheaderlen = maxheaderlen except Exception as e: raise e @property def refcnt(self) : """Number of entities using this profile. """ try : return self._refcnt except Exception as e: raise e @property def builtin(self) : """Flag to determine if http profile is built-in or not.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE, PARTITION_ALL. """ try : return self._builtin except Exception as e: raise e def _get_nitro_response(self, service, response) : """ converts nitro response into object and returns the object array in case of get request. """ try : result = service.payload_formatter.string_to_resource(nshttpprofile_response, response, self.__class__.__name__) if(result.errorcode != 0) : if (result.errorcode == 444) : service.clear_session(self) if result.severity : if (result.severity == "ERROR") : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) else : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) return result.nshttpprofile except Exception as e : raise e def _get_object_name(self) : """ Returns the value of object identifier argument """ try : if (self.name) : return str(self.name) return None except Exception as e : raise e @classmethod def add(cls, client, resource) : """ Use this API to add nshttpprofile. """ try : if type(resource) is not list : addresource = nshttpprofile() addresource.name = resource.name addresource.dropinvalreqs = resource.dropinvalreqs addresource.markhttp09inval = resource.markhttp09inval addresource.markconnreqinval = resource.markconnreqinval addresource.cmponpush = resource.cmponpush addresource.conmultiplex = resource.conmultiplex addresource.maxreusepool = resource.maxreusepool addresource.dropextracrlf = resource.dropextracrlf addresource.incomphdrdelay = resource.incomphdrdelay addresource.websocket = resource.websocket addresource.rtsptunnel = resource.rtsptunnel addresource.reqtimeout = resource.reqtimeout addresource.adpttimeout = resource.adpttimeout addresource.reqtimeoutaction = resource.reqtimeoutaction addresource.dropextradata = resource.dropextradata addresource.weblog = resource.weblog addresource.clientiphdrexpr = resource.clientiphdrexpr addresource.maxreq = resource.maxreq addresource.persistentetag = resource.persistentetag addresource.spdy = resource.spdy addresource.reusepooltimeout = resource.reusepooltimeout addresource.maxheaderlen = resource.maxheaderlen return addresource.add_resource(client) else : if (resource and len(resource) > 0) : addresources = [ nshttpprofile() for _ in range(len(resource))] for i in range(len(resource)) : addresources[i].name = resource[i].name addresources[i].dropinvalreqs = resource[i].dropinvalreqs addresources[i].markhttp09inval = resource[i].markhttp09inval addresources[i].markconnreqinval = resource[i].markconnreqinval addresources[i].cmponpush = resource[i].cmponpush addresources[i].conmultiplex = resource[i].conmultiplex addresources[i].maxreusepool = resource[i].maxreusepool addresources[i].dropextracrlf = resource[i].dropextracrlf addresources[i].incomphdrdelay = resource[i].incomphdrdelay addresources[i].websocket = resource[i].websocket addresources[i].rtsptunnel = resource[i].rtsptunnel addresources[i].reqtimeout = resource[i].reqtimeout addresources[i].adpttimeout = resource[i].adpttimeout addresources[i].reqtimeoutaction = resource[i].reqtimeoutaction addresources[i].dropextradata = resource[i].dropextradata addresources[i].weblog = resource[i].weblog addresources[i].clientiphdrexpr = resource[i].clientiphdrexpr addresources[i].maxreq = resource[i].maxreq addresources[i].persistentetag = resource[i].persistentetag addresources[i].spdy = resource[i].spdy addresources[i].reusepooltimeout = resource[i].reusepooltimeout addresources[i].maxheaderlen = resource[i].maxheaderlen result = cls.add_bulk_request(client, addresources) return result except Exception as e : raise e @classmethod def delete(cls, client, resource) : """ Use this API to delete nshttpprofile. """ try : if type(resource) is not list : deleteresource = nshttpprofile() if type(resource) != type(deleteresource): deleteresource.name = resource else : deleteresource.name = resource.name return deleteresource.delete_resource(client) else : if type(resource[0]) != cls : if (resource and len(resource) > 0) : deleteresources = [ nshttpprofile() for _ in range(len(resource))] for i in range(len(resource)) : deleteresources[i].name = resource[i] else : if (resource and len(resource) > 0) : deleteresources = [ nshttpprofile() for _ in range(len(resource))] for i in range(len(resource)) : deleteresources[i].name = resource[i].name result = cls.delete_bulk_request(client, deleteresources) return result except Exception as e : raise e @classmethod def update(cls, client, resource) : """ Use this API to update nshttpprofile. """ try : if type(resource) is not list : updateresource = nshttpprofile() updateresource.name = resource.name updateresource.dropinvalreqs = resource.dropinvalreqs updateresource.markhttp09inval = resource.markhttp09inval updateresource.markconnreqinval = resource.markconnreqinval updateresource.cmponpush = resource.cmponpush updateresource.conmultiplex = resource.conmultiplex updateresource.maxreusepool = resource.maxreusepool updateresource.dropextracrlf = resource.dropextracrlf updateresource.incomphdrdelay = resource.incomphdrdelay updateresource.websocket = resource.websocket updateresource.rtsptunnel = resource.rtsptunnel updateresource.reqtimeout = resource.reqtimeout updateresource.adpttimeout = resource.adpttimeout updateresource.reqtimeoutaction = resource.reqtimeoutaction updateresource.dropextradata = resource.dropextradata updateresource.weblog = resource.weblog updateresource.clientiphdrexpr = resource.clientiphdrexpr updateresource.maxreq = resource.maxreq updateresource.persistentetag = resource.persistentetag updateresource.spdy = resource.spdy updateresource.reusepooltimeout = resource.reusepooltimeout updateresource.maxheaderlen = resource.maxheaderlen return updateresource.update_resource(client) else : if (resource and len(resource) > 0) : updateresources = [ nshttpprofile() for _ in range(len(resource))] for i in range(len(resource)) : updateresources[i].name = resource[i].name updateresources[i].dropinvalreqs = resource[i].dropinvalreqs updateresources[i].markhttp09inval = resource[i].markhttp09inval updateresources[i].markconnreqinval = resource[i].markconnreqinval updateresources[i].cmponpush = resource[i].cmponpush updateresources[i].conmultiplex = resource[i].conmultiplex updateresources[i].maxreusepool = resource[i].maxreusepool updateresources[i].dropextracrlf = resource[i].dropextracrlf updateresources[i].incomphdrdelay = resource[i].incomphdrdelay updateresources[i].websocket = resource[i].websocket updateresources[i].rtsptunnel = resource[i].rtsptunnel updateresources[i].reqtimeout = resource[i].reqtimeout updateresources[i].adpttimeout = resource[i].adpttimeout updateresources[i].reqtimeoutaction = resource[i].reqtimeoutaction updateresources[i].dropextradata = resource[i].dropextradata updateresources[i].weblog = resource[i].weblog updateresources[i].clientiphdrexpr = resource[i].clientiphdrexpr updateresources[i].maxreq = resource[i].maxreq updateresources[i].persistentetag = resource[i].persistentetag updateresources[i].spdy = resource[i].spdy updateresources[i].reusepooltimeout = resource[i].reusepooltimeout updateresources[i].maxheaderlen = resource[i].maxheaderlen result = cls.update_bulk_request(client, updateresources) return result except Exception as e : raise e @classmethod def unset(cls, client, resource, args) : """ Use this API to unset the properties of nshttpprofile resource. Properties that need to be unset are specified in args array. """ try : if type(resource) is not list : unsetresource = nshttpprofile() if type(resource) != type(unsetresource): unsetresource.name = resource else : unsetresource.name = resource.name return unsetresource.unset_resource(client, args) else : if type(resource[0]) != cls : if (resource and len(resource) > 0) : unsetresources = [ nshttpprofile() for _ in range(len(resource))] for i in range(len(resource)) : unsetresources[i].name = resource[i] else : if (resource and len(resource) > 0) : unsetresources = [ nshttpprofile() for _ in range(len(resource))] for i in range(len(resource)) : unsetresources[i].name = resource[i].name result = cls.unset_bulk_request(client, unsetresources, args) return result except Exception as e : raise e @classmethod def get(cls, client, name="", option_="") : """ Use this API to fetch all the nshttpprofile resources that are configured on netscaler. """ try : if not name : obj = nshttpprofile() response = obj.get_resources(client, option_) else : if type(name) != cls : if type(name) is not list : obj = nshttpprofile() obj.name = name response = obj.get_resource(client, option_) else : if name and len(name) > 0 : response = [nshttpprofile() for _ in range(len(name))] obj = [nshttpprofile() for _ in range(len(name))] for i in range(len(name)) : obj[i] = nshttpprofile() obj[i].name = name[i] response[i] = obj[i].get_resource(client, option_) return response except Exception as e : raise e @classmethod def get_filtered(cls, client, filter_) : """ Use this API to fetch filtered set of nshttpprofile resources. filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = nshttpprofile() option_ = options() option_.filter = filter_ response = obj.getfiltered(client, option_) return response except Exception as e : raise e @classmethod def count(cls, client) : """ Use this API to count the nshttpprofile resources configured on NetScaler. """ try : obj = nshttpprofile() option_ = options() option_.count = True response = obj.get_resources(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e @classmethod def count_filtered(cls, client, filter_) : """ Use this API to count filtered the set of nshttpprofile resources. Filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = nshttpprofile() option_ = options() option_.count = True option_.filter = filter_ response = obj.getfiltered(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e class Conmultiplex: ENABLED = "ENABLED" DISABLED = "DISABLED" class Markhttp09inval: ENABLED = "ENABLED" DISABLED = "DISABLED" class Adpttimeout: ENABLED = "ENABLED" DISABLED = "DISABLED" class Builtin: MODIFIABLE = "MODIFIABLE" DELETABLE = "DELETABLE" IMMUTABLE = "IMMUTABLE" PARTITION_ALL = "PARTITION_ALL" class Markconnreqinval: ENABLED = "ENABLED" DISABLED = "DISABLED" class Spdy: DISABLED = "DISABLED" ENABLED = "ENABLED" V2 = "V2" V3 = "V3" class Persistentetag: ENABLED = "ENABLED" DISABLED = "DISABLED" class Rtsptunnel: ENABLED = "ENABLED" DISABLED = "DISABLED" class Dropinvalreqs: ENABLED = "ENABLED" DISABLED = "DISABLED" class Dropextracrlf: ENABLED = "ENABLED" DISABLED = "DISABLED" class Dropextradata: ENABLED = "ENABLED" DISABLED = "DISABLED" class Websocket: ENABLED = "ENABLED" DISABLED = "DISABLED" class Cmponpush: ENABLED = "ENABLED" DISABLED = "DISABLED" class Weblog: ENABLED = "ENABLED" DISABLED = "DISABLED" class nshttpprofile_response(base_response) : def __init__(self, length=1) : self.nshttpprofile = [] self.errorcode = 0 self.message = "" self.severity = "" self.sessionid = "" self.nshttpprofile = [nshttpprofile() for _ in range(length)]
from astropy.io import fits from astropy.wcs import WCS from astropy.table import Table from astropy.coordinates import SkyCoord from numpy import __dict__ as npdict from numpy import int as ndint from numpy import max as ndmax from numpy import append as ndappend from numpy import concatenate as ndconc from numpy import array, dot, inf, delete, sort, zeros, where, arange, indices, argsort from numpy import unravel_index, argmax, isnan, ones, fromfile, array_equal, identity from numpy import loadtxt from matplotlib.pyplot import show, figure, Circle class fitsHandler(object): def group(self, binfactor=None, reset=True): if reset: self.reset() if binfactor: self.grouping = binfactor @staticmethod def rebin(model, binfactor, scale=lambda: 1): res = model/scale() res[isnan(res)] = 0 res = fitsHandler.ndrebin(res, binfactor) return res @staticmethod def ndrebin(arr, rebin, function='mean'): if rebin <= 1: return arr start = arr[:(arr.shape[0]//rebin)*rebin].reshape(arr.shape[0]//rebin, -1, *arr.shape[1:]) final = arr[(arr.shape[0]//rebin)*rebin:] arr = npdict[function](start, axis=1) if final.size: arr = ndappend(arr, npdict[function](final, axis=0)) return arr @staticmethod def padgrouped(vector, binfactor): res = [] for line in vector: count = 1 res.append(line) while count < binfactor: res.append(line) count += 1 return res class Response(fitsHandler): keVAfac = 12.39842 def __init__(self, response): fitsio = fits.open(response) self.ominebounds = array(list(fitsio['EBOUNDS'].data))[:, 1] self.omaxebounds = array(list(fitsio['EBOUNDS'].data))[:, 2] self.ebins = [] self.ebinAvg = [] self.grouping = 1 elow = 0 ehigh = 1 ngrp = 2 fch = 3 nch = 4 row = 5 energies = [] nchannels = fitsio['MATRIX'].header['DETCHANS'] data = list(fitsio['MATRIX'].data) for record in data: self.ebins.append(record[ehigh]-record[elow]) self.ebinAvg.append((record[ehigh]+record[elow])/2.0) channel = 1 ind = 0 grps = record[ngrp] # Speed things up!!! Also - while kills speed for some reason. matrow = list(record[row]) energies.append([]) try: channels = zip(record[fch][:grps], record[nch][:grps]) except IndexError: if grps > 1: raise channels = (record[fch], record[nch]), for start_channel, nchannel in channels: for _ in range(channel, start_channel): energies[-1].append(0) channel += 1 for _ in range(start_channel, start_channel+nchannel): energies[-1].append(matrow[ind]) ind += 1 channel += 1 for _ in range(channel, nchannels+1): channel += 1 energies[-1].append(0) self.omatrix = array(energies).transpose() self.calcEff() self.reset() self.ebins = array(self.ebins) self.ebinAvg = array(self.ebinAvg) def calcEff(self): self.roeff = self.omatrix.sum(axis=0) self.oeff = self.omatrix.sum(axis=1) def notice(self, channels): self.deleted -= set([c-1 for c in channels if c > 0 and c <= len(self.omatrix)]) self.ignore([]) def ignore(self, channels): self.group(self.grouping, reset=False) self.deleted.update([c-1 for c in channels if c > 0 and c <= len(self.matrix)]) self.matrix = delete(self.matrix, list(self.deleted), axis=0) self.eff = delete(self.eff, list(self.deleted)) def reset(self): self.deleted = set() self.matrix = array(self.omatrix, copy=True) self.eff = array(self.oeff, copy=True) self.reff = array(self.roeff, copy=True) self.minebounds = self.ominebounds.copy() self.maxebounds = self.omaxebounds.copy() def group(self, binfactor=None, reset=True): fitsHandler.group(self, binfactor, reset) self.matrix = fitsHandler.ndrebin(self.omatrix, self.grouping, 'sum') self.eff = fitsHandler.ndrebin(self.oeff, self.grouping, 'mean') self.minebounds = fitsHandler.ndrebin(self.ominebounds, self.grouping, 'min') self.maxebounds = fitsHandler.ndrebin(self.omaxebounds, self.grouping, 'max') def loadancr(self, ancr): if not ancr: return ancr = array([row for row in list(fits.open(ancr)['SPECRESP'].data)]) if not array_equal(ancr[:, :2].sum(axis=1)/2.0, self.ebinAvg): raise ValueError("-E- Energy bins differ between ancr and response file!") self.omatrix *= ancr[:, 2] self.calcEff() self.reset() def convolve_channels(self, vector): return dot(self.matrix, vector*self.ebins) def energy_to_channel(self, minX, maxX): minE = self.minebounds maxE = self.maxebounds if minX == maxX: res = where((minX > minE) & (minX <= maxE))[0] else: res = where((minX <= maxE) & (maxX > minE))[0] return res+1 def wl_to_channel(self, minX, maxX): newMax = self.keVAfac/minX if minX else inf return self.energy_to_channel(self.keVAfac/maxX, newMax) def wl(self, table, xonly=False): wave = self.energy(table, xonly) try: return self.keVAfac/float(wave) except TypeError: pass E = wave[:, 0] dltodE = self.keVAfac/E**2 wave[:, 0] = self.keVAfac/E if xonly: if len(wave[0]) == 4: wave[:, 1] *= dltodE return wave if len(wave[0]) == 4: wave[:, 1] *= dltodE # dl wave[:, 2:] /= dltodE.reshape(-1, 1) return wave wave[:, 1] /= dltodE return wave def energy(self, table=None, xonly=False): try: return ((self.maxebounds+self.minebounds)*0.5)[int(table)-1] except IndexError: return ((self.maxebounds+self.minebounds)*0.5)[-1] except TypeError: pass channels = table[:, 0].astype('int16')-1 energy = ((self.maxebounds+self.minebounds)*0.5)[channels].reshape(-1, 1) eerror = (self.maxebounds-self.minebounds)[channels].reshape(-1, 1) if xonly: if len(table[0]) > 2: return ndconc((energy, eerror, table[:, 1:]), axis=1) return ndappend(energy, table[:, 1:2], axis=1) cts = table[:, 1:2]/eerror if len(table[0]) > 2: error = (table[:, 2:3]/eerror) return ndconc((energy, eerror, cts, error), axis=1) return ndappend(energy, cts, axis=1) class FakeResponse(Response): def __init__(self, axis, minE, maxE): self.ebinAvg = array(axis) self.ominebounds = minE self.omaxebounds = maxE self.omatrix = identity(len(axis)) self.oeff = ones(len(axis)) self.roeff = array([]) self.reset() def energy(self, table=None, xonly=False): channels = array(range(len(table)))+1 return super().energy(ndconc([channels[:, None], table[:, -2:]], axis=1), xonly) def ignore(self, channels): pass def notice(self, channels): pass def convolve_channels(self, vector): return vector class Data(fitsHandler): class lengthMismatch(Exception): pass class MultipleDevices(Exception): pass def __init__(self, data, background=None, text=None, device=None): self.ochannels = [] self.ocounts = [] self.oscales = [] self.obscales = [] self.grouping = 1 self.resp = None self.ancr = None self.back = None self.background = None if text is None: self.loadFits(data, device) else: self.loadText(data) self.ochannels = array(self.ochannels) self.ocounts = array(self.ocounts) self.oscales = array(self.oscales) self.obscales = array(self.obscales) if self.ochannels[0] == 0: self.ochannels += 1 self.reset() if background is not None: self.loadback(background, text) def loadFits(self, data, device=None): fitsio = fits.open(data) data = fitsio[1].data h = fitsio[1].header self.exposure = h['EXPOSURE'] self.asciiflag = False for key in ('RESPFILE', 'BACKFILE', 'ANCRFILE'): try: if h[key] != 'none': self.__dict__[key[:4].lower()] = h[key] except KeyError: pass CHANNEL = "CHANNEL" COUNTS = "COUNTS" QUALITY = "QUALITY" AREASCAL = "AREASCAL" BACKSCAL = "BACKSCAL" if len(data['CHANNEL'].shape) > 1: if device is None or device < 0 or device >= data['CHANNEL'].shape[0]: raise Data.MultipleDevices('Found multiple devices in data file.' ' Please provide device index ({}-{}).'. format(0, data['CHANNEL'].shape[0]-1)) data = data[device] self.ochannels = data['CHANNEL'].copy() self.ocounts = data['COUNTS'].copy() self.oscales = ones(self.ocounts.shape) self.obscales = ones(self.ocounts.shape) return for record in data: counts = record[COUNTS] self.ochannels.append(record[CHANNEL]) try: q = record[QUALITY] > 0 except KeyError: q = 0 if q > 0: counts = 0 self.ocounts.append(0) self.oscales.append(0) self.obscales.append(0) else: bscale = scale = 1.0 try: scale = record[AREASCAL] bscale = record[BACKSCAL] except KeyError: pass self.ocounts.append(counts) self.oscales.append(scale) self.obscales.append(bscale) def loadText(self, fname): self.asciiflag = True self.exposure = 1 self.errorarray = [] try: data = loadtxt(fname) except ValueError: data = loadtxt(fname, skiprows=1) data = data[argsort(data[:, 0])] if data.shape[1] == 4: self.minE, self.maxE, self.ocounts, self.errorarray = data.T self.ochannels = (self.minE + self.maxE)/2 else: self.ochannels, self.ocounts, self.errorarray = data.T bounds = ndconc([[1.5*self.ochannels[0] - 0.5*self.ochannels[1]], (self.ochannels[:-1] + self.ochannels[1:])/2, [0.5*self.ochannels[-1] + self.ochannels[-1]/2]]) self.minE, self.maxE = bounds[:-1], bounds[1:] self.oscales = ones(data.shape[0]).astype('float64') self.obscales = ones(data.shape[0]).astype('float64') self.errorarray = self.errorarray self.errors = (lambda rebin=1, _=1, row=False, x=self.errorarray: Data.ndrebin(x, rebin) if row else Data.ndrebin(x, rebin).reshape(-1, 1)) def getPlot(self, rebin=1, eff=1): return ndconc(( Data.ndrebin(self.channels, rebin).reshape(-1, 1), self.cts(rebin, eff), self.errors(rebin, eff)), axis=1) def cts(self, rebin=1, eff=1, row=False): if self.background is None: cts = self.counts/self.scale(eff) else: back = self.background cts = (self.counts/self.scale(eff) - back.counts/self.bscale(eff)) cts[isnan(cts)] = 0 cts = Data.ndrebin(cts, rebin) return cts if row else cts.reshape(-1, 1) def errors(self, rebin=1, eff=1, row=False): counts = Data.ndrebin(self.counts, rebin, 'sum') if self.background is None: error = counts**0.5/Data.ndrebin(self.scale(eff), rebin, 'sum') else: back = self.background bcounts = Data.ndrebin(back.counts, rebin, 'sum') error = (counts/Data.ndrebin(self. scale(eff), rebin, 'sum')**2 + bcounts/Data.ndrebin(self.bscale(eff), rebin, 'sum')**2)**0.5 error[isnan(error)] = inf error[error == 0] = inf return error if row else error.reshape(-1, 1) def scale(self, eff=1): try: return self.scales*self.exposure*eff*self.transmission except AttributeError: return self.scales*self.exposure*eff def bscale(self, eff=1): back = self.background try: return (back.bscales/self.bscales)*back.scales*back.exposure*eff*self.transmission except AttributeError: return (back.bscales/self.bscales)*back.scales*back.exposure*eff def loadback(self, background=None, text=None): if background is None: background = self.back back = Data(background, text=text) if self.deleted: back.ignore([c+1 for c in self.deleted]) if len(back) != len(self): raise Data.lengthMismatch("Got "+str(background)+" with length "+str(len(back))+".") self.background = back def __len__(self): return len(self.channels) # Assume same amount of channels def __div__(self, other): try: other.reset() other.ignore((c+1 for c in self.delete)) you = other.cts(rebin=self.grouping) eyou = other.errors(rebin=self.grouping) except AttributeError: try: dother = Data(other) dother.ignore((c+1 for c in self.deleted)) you = dother.cts(rebin=self.grouping) eyou = dother.errors(rebin=self.grouping) except IOError: you = Data.ndrebin(fromfile(other, sep=" "), self.grouping) eyou = zeros(you.size) me = self.cts(rebin=self.grouping) eme = self.errors(rebin=self.grouping) if len(you) != len(me): raise Data.lengthMismatch("Dividing to data with different amount of channels!") result = me/you eresult = ((eme/you)**2+(me*eyou/you**2)**2)**0.5 return list(zip(Data.ndrebin(self.channels, self.grouping), result, eresult)) def reset(self): self.deleted = set() self.channels = array(self.ochannels, copy=True) self.counts = array(self.ocounts, copy=True) self.scales = array(self.oscales, copy=True) self.bscales = array(self.obscales, copy=True) try: self.transmission = array(self.otransmission, copy=True) except AttributeError: pass if self.background is not None: self.background.reset() def group(self, binfactor=None, reset=True): fitsHandler.group(self, binfactor, reset) self.channels = arange(len(self.ochannels)//self.grouping + (len(self.ochannels) % self.grouping > 0))+1 self.counts = fitsHandler.ndrebin(self.ocounts, self.grouping, 'sum') self.scales = fitsHandler.ndrebin(self.oscales, self.grouping, 'mean') self.bscales = fitsHandler.ndrebin(self.obscales, self.grouping, 'mean') try: self.transmission = fitsHandler.ndrebin(self.otransmission, self.grouping, 'mean') except AttributeError: pass if self.background is not None: self.background.group(self.grouping, reset) def notice(self, channels): self.deleted -= set([c-1 for c in channels if c > 0 and c <= self.ochannels[-1]]) self.ignore([]) def ignore(self, channels): self.deleted.update((c-1 for c in channels if c >= self.channels[0] and c <= self.channels[-1])) self.group(self.grouping, reset=False) self.channels = delete(self.channels, list(self.deleted), axis=0) self.counts = delete(self.counts, list(self.deleted), axis=0) self.scales = delete(self.scales, list(self.deleted), axis=0) self.bscales = delete(self.bscales, list(self.deleted), axis=0) try: self.transmission = delete(self.otransmission, list(self.deleted), axis=0) except AttributeError: pass if self.background is not None: self.background.ignore(channels) def untransmit(self): try: del(self.otransmission) del(self.transmission) except AttributeError: pass def transmit(self, table): name = str(table) try: transmission = array([float(x) for x in open(table)]) except IOError: transmission = array(list(table)) if len(transmission) != len(self.counts): raise Data.lengthMismatch("Got "+name+" with length "+str(len(transmission))) self.otransmission = transmission self.transmission = delete(self.otransmission, list(self.deleted), axis=0) class Events(fitsHandler): def __init__(self, event_file): try: fitsio = Table.read(event_file, hdu=1) self.events = sort(fitsio['X', 'Y']) xmax = self.events[-1][0] ymax = max((a[1] for a in self.events)) self.map = zeros((ymax, xmax), dtype=ndint) X = 0 Y = 1 for event in self.events: self.map[event[Y]-1][event[X]-1] += 1 except ValueError: fitsio = fits.open(event_file, hdu=1)[0] self.events = fitsio.data xmax = len(self.events[0])+1 ymax = len(self.events)+1 self.map = self.events self.xl, self.xr, self.yb, self.yt = (1, len(self.map[0]), 1, len(self.map)) self.wcs = WCS(fitsio) self.pixels = indices(self.map.shape) self.pixel_per_arc_second = 1/(SkyCoord(*self.wcs.wcs_pix2world(0, 1, 1), frame='fk5', unit='deg' ).separation(SkyCoord(*self.wcs.wcs_pix2world(0, 0, 1), frame='fk5', unit='deg')).dms.s) def outOfBound(self, x, y): return x < self.xl or x > self.xr or y < self.yb or y > self.yt def _plotval(self, x, y): x = int(x+1) y = int(y+1) try: if self.outOfBound(x, y): return 'Out' return str(self.map[x, y]) except IndexError: return 'Out' def plot(self, obj=None, coord='pixel'): fig = figure() axes = fig.add_subplot(111) def vals(x, y): return 'x=%d, y=%d, z=%s' % (x+1, y+1, self._plotval(x, y)) img = axes.imshow(self.map, origin='lower', cmap='gray', interpolation=None) a = axes.get_xticks() a[1] = 1 axes.set_xticks(a) a = axes.get_yticks() a[1] = 1 axes.set_yticks(a) img.set_extent((0.5, len(self.map[0]+0.5), 0.5, len(self.map)+0.5)) axes.format_coord = vals if obj is not None: x, y, R = self.coord_transform(obj[:2], obj[2], coord) axes.add_artist(Circle((x, y), R, color='green', fill=False)) if len(obj) > 3: x, y, R, RM = self.coord_transform(obj[:2], obj[3], coord) axes.add_artist(Circle((x, y), RM, color='green', fill=False)) show(block=False) return axes def centroid(self, ignore=()): return tuple((x+1 for x in unravel_index(argmax(self.map), self.map.shape)[::-1])) def _is_effective_radius(self, center, R, thresh=None): if thresh is None: thresh = 1 X, Y = center count = valid = 0 for pixels in (self.map[X-R, Y-R:Y+R+1], self.map[X+R, Y-R:Y+R+1], self.map[Y-R, X-R:X+R+1], self.map[Y+R, X-R:X+R+1]): count += pixels.shape[0] valid += pixels[pixels > thresh].shape[0] return valid/float(count) >= 0.1 def coord_transform(self, coord, R=None, what='pixel'): if what != 'pixel': if what == 'fk5': coord = array(self.wcs.wcs_world2pix(coord[0], coord[1], 1)) if R is not None: coord = ndappend(coord, self.pixel_per_arc_second*R) else: print("-E- Currently only coordinates in pixel or fk5 are supported!") return elif R is not None: return ndappend(array(coord), R).round().astype('int64') return array(coord).round().astype('int64') def object(self, center, what='pixel', constant=None): R = 0 center = self.coord_transform(center, what=what) if constant is not None: if constant == 0: return center return ndappend(center, constant) while self._is_effective_radius(center, R): R += 1 return ndappend(center, R) def max(self): return ndmax(self.map) def counts_around(self, x, y, R, what='pixel', returnMax=False, debug=False): x, y, R = self.coord_transform((x, y), R, what) pixels = (self.pixels[0]-y)**2+(self.pixels[1]-x)**2 if debug: return pixels dists = self.map[pixels <= R**2] if returnMax: try: return dists.sum(), dists.max() except ValueError: return 0, 0 return dists.sum() def background(self, center, outer_coefficient=1.25, constant=None, coord='pixel'): x, y, Rmin = self.object(center, coord=coord) if constant is not None: return x, y, constant[0], constant[1] return x, y, Rmin, Rmin*outer_coefficient def __iter__(self): self.iter = 0 return self def __next__(self): if self.iter == len(self.events): raise StopIteration() self.iter += 1 return self.events[self.iter - 1]
"""Unit tests for the copy module.""" import copy import copy_reg import weakref import unittest from test import test_support class TestCopy(unittest.TestCase): # Attempt full line coverage of copy.py from top to bottom def test_exceptions(self): self.assertTrue(copy.Error is copy.error) self.assertTrue(issubclass(copy.Error, Exception)) # The copy() method def test_copy_basic(self): x = 42 y = copy.copy(x) self.assertEqual(x, y) def test_copy_copy(self): class C(object): def __init__(self, foo): self.foo = foo def __copy__(self): return C(self.foo) x = C(42) y = copy.copy(x) self.assertEqual(y.__class__, x.__class__) self.assertEqual(y.foo, x.foo) def test_copy_registry(self): class C(object): def __new__(cls, foo): obj = object.__new__(cls) obj.foo = foo return obj def pickle_C(obj): return (C, (obj.foo,)) x = C(42) self.assertRaises(TypeError, copy.copy, x) copy_reg.pickle(C, pickle_C, C) y = copy.copy(x) def test_copy_reduce_ex(self): class C(object): def __reduce_ex__(self, proto): return "" def __reduce__(self): raise test_support.TestFailed, "shouldn't call this" x = C() y = copy.copy(x) self.assertTrue(y is x) def test_copy_reduce(self): class C(object): def __reduce__(self): return "" x = C() y = copy.copy(x) self.assertTrue(y is x) def test_copy_cant(self): class C(object): def __getattribute__(self, name): if name.startswith("__reduce"): raise AttributeError, name return object.__getattribute__(self, name) x = C() self.assertRaises(copy.Error, copy.copy, x) # Type-specific _copy_xxx() methods def test_copy_atomic(self): class Classic: pass class NewStyle(object): pass def f(): pass tests = [None, 42, 2L**100, 3.14, True, False, 1j, "hello", u"hello\u1234", f.func_code, NewStyle, xrange(10), Classic, max] for x in tests: self.assertTrue(copy.copy(x) is x, repr(x)) def test_copy_list(self): x = [1, 2, 3] self.assertEqual(copy.copy(x), x) def test_copy_tuple(self): x = (1, 2, 3) self.assertEqual(copy.copy(x), x) def test_copy_dict(self): x = {"foo": 1, "bar": 2} self.assertEqual(copy.copy(x), x) def test_copy_inst_vanilla(self): class C: def __init__(self, foo): self.foo = foo def __cmp__(self, other): return cmp(self.foo, other.foo) x = C(42) self.assertEqual(copy.copy(x), x) def test_copy_inst_copy(self): class C: def __init__(self, foo): self.foo = foo def __copy__(self): return C(self.foo) def __cmp__(self, other): return cmp(self.foo, other.foo) x = C(42) self.assertEqual(copy.copy(x), x) def test_copy_inst_getinitargs(self): class C: def __init__(self, foo): self.foo = foo def __getinitargs__(self): return (self.foo,) def __cmp__(self, other): return cmp(self.foo, other.foo) x = C(42) self.assertEqual(copy.copy(x), x) def test_copy_inst_getstate(self): class C: def __init__(self, foo): self.foo = foo def __getstate__(self): return {"foo": self.foo} def __cmp__(self, other): return cmp(self.foo, other.foo) x = C(42) self.assertEqual(copy.copy(x), x) def test_copy_inst_setstate(self): class C: def __init__(self, foo): self.foo = foo def __setstate__(self, state): self.foo = state["foo"] def __cmp__(self, other): return cmp(self.foo, other.foo) x = C(42) self.assertEqual(copy.copy(x), x) def test_copy_inst_getstate_setstate(self): class C: def __init__(self, foo): self.foo = foo def __getstate__(self): return self.foo def __setstate__(self, state): self.foo = state def __cmp__(self, other): return cmp(self.foo, other.foo) x = C(42) self.assertEqual(copy.copy(x), x) # The deepcopy() method def test_deepcopy_basic(self): x = 42 y = copy.deepcopy(x) self.assertEqual(y, x) def test_deepcopy_memo(self): # Tests of reflexive objects are under type-specific sections below. # This tests only repetitions of objects. x = [] x = [x, x] y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y is not x) self.assertTrue(y[0] is not x[0]) self.assertTrue(y[0] is y[1]) def test_deepcopy_issubclass(self): # XXX Note: there's no way to test the TypeError coming out of # issubclass() -- this can only happen when an extension # module defines a "type" that doesn't formally inherit from # type. class Meta(type): pass class C: __metaclass__ = Meta self.assertEqual(copy.deepcopy(C), C) def test_deepcopy_deepcopy(self): class C(object): def __init__(self, foo): self.foo = foo def __deepcopy__(self, memo=None): return C(self.foo) x = C(42) y = copy.deepcopy(x) self.assertEqual(y.__class__, x.__class__) self.assertEqual(y.foo, x.foo) def test_deepcopy_registry(self): class C(object): def __new__(cls, foo): obj = object.__new__(cls) obj.foo = foo return obj def pickle_C(obj): return (C, (obj.foo,)) x = C(42) self.assertRaises(TypeError, copy.deepcopy, x) copy_reg.pickle(C, pickle_C, C) y = copy.deepcopy(x) def test_deepcopy_reduce_ex(self): class C(object): def __reduce_ex__(self, proto): return "" def __reduce__(self): raise test_support.TestFailed, "shouldn't call this" x = C() y = copy.deepcopy(x) self.assertTrue(y is x) def test_deepcopy_reduce(self): class C(object): def __reduce__(self): return "" x = C() y = copy.deepcopy(x) self.assertTrue(y is x) def test_deepcopy_cant(self): class C(object): def __getattribute__(self, name): if name.startswith("__reduce"): raise AttributeError, name return object.__getattribute__(self, name) x = C() self.assertRaises(copy.Error, copy.deepcopy, x) # Type-specific _deepcopy_xxx() methods def test_deepcopy_atomic(self): class Classic: pass class NewStyle(object): pass def f(): pass tests = [None, 42, 2L**100, 3.14, True, False, 1j, "hello", u"hello\u1234", f.func_code, NewStyle, xrange(10), Classic, max] for x in tests: self.assertTrue(copy.deepcopy(x) is x, repr(x)) def test_deepcopy_list(self): x = [[1, 2], 3] y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(x is not y) self.assertTrue(x[0] is not y[0]) def test_deepcopy_reflexive_list(self): x = [] x.append(x) y = copy.deepcopy(x) self.assertRaises(RuntimeError, cmp, y, x) self.assertTrue(y is not x) self.assertTrue(y[0] is y) self.assertEqual(len(y), 1) def test_deepcopy_tuple(self): x = ([1, 2], 3) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(x is not y) self.assertTrue(x[0] is not y[0]) def test_deepcopy_reflexive_tuple(self): x = ([],) x[0].append(x) y = copy.deepcopy(x) self.assertRaises(RuntimeError, cmp, y, x) self.assertTrue(y is not x) self.assertTrue(y[0] is not x[0]) self.assertTrue(y[0][0] is y) def test_deepcopy_dict(self): x = {"foo": [1, 2], "bar": 3} y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(x is not y) self.assertTrue(x["foo"] is not y["foo"]) def test_deepcopy_reflexive_dict(self): x = {} x['foo'] = x y = copy.deepcopy(x) self.assertRaises(RuntimeError, cmp, y, x) self.assertTrue(y is not x) self.assertTrue(y['foo'] is y) self.assertEqual(len(y), 1) def test_deepcopy_keepalive(self): memo = {} x = 42 y = copy.deepcopy(x, memo) self.assertTrue(memo[id(x)] is x) def test_deepcopy_inst_vanilla(self): class C: def __init__(self, foo): self.foo = foo def __cmp__(self, other): return cmp(self.foo, other.foo) x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y.foo is not x.foo) def test_deepcopy_inst_deepcopy(self): class C: def __init__(self, foo): self.foo = foo def __deepcopy__(self, memo): return C(copy.deepcopy(self.foo, memo)) def __cmp__(self, other): return cmp(self.foo, other.foo) x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y is not x) self.assertTrue(y.foo is not x.foo) def test_deepcopy_inst_getinitargs(self): class C: def __init__(self, foo): self.foo = foo def __getinitargs__(self): return (self.foo,) def __cmp__(self, other): return cmp(self.foo, other.foo) x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y is not x) self.assertTrue(y.foo is not x.foo) def test_deepcopy_inst_getstate(self): class C: def __init__(self, foo): self.foo = foo def __getstate__(self): return {"foo": self.foo} def __cmp__(self, other): return cmp(self.foo, other.foo) x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y is not x) self.assertTrue(y.foo is not x.foo) def test_deepcopy_inst_setstate(self): class C: def __init__(self, foo): self.foo = foo def __setstate__(self, state): self.foo = state["foo"] def __cmp__(self, other): return cmp(self.foo, other.foo) x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y is not x) self.assertTrue(y.foo is not x.foo) def test_deepcopy_inst_getstate_setstate(self): class C: def __init__(self, foo): self.foo = foo def __getstate__(self): return self.foo def __setstate__(self, state): self.foo = state def __cmp__(self, other): return cmp(self.foo, other.foo) x = C([42]) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y is not x) self.assertTrue(y.foo is not x.foo) def test_deepcopy_reflexive_inst(self): class C: pass x = C() x.foo = x y = copy.deepcopy(x) self.assertTrue(y is not x) self.assertTrue(y.foo is y) # _reconstruct() def test_reconstruct_string(self): class C(object): def __reduce__(self): return "" x = C() y = copy.copy(x) self.assertTrue(y is x) y = copy.deepcopy(x) self.assertTrue(y is x) def test_reconstruct_nostate(self): class C(object): def __reduce__(self): return (C, ()) x = C() x.foo = 42 y = copy.copy(x) self.assertTrue(y.__class__ is x.__class__) y = copy.deepcopy(x) self.assertTrue(y.__class__ is x.__class__) def test_reconstruct_state(self): class C(object): def __reduce__(self): return (C, (), self.__dict__) def __cmp__(self, other): return cmp(self.__dict__, other.__dict__) __hash__ = None # Silence Py3k warning x = C() x.foo = [42] y = copy.copy(x) self.assertEqual(y, x) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y.foo is not x.foo) def test_reconstruct_state_setstate(self): class C(object): def __reduce__(self): return (C, (), self.__dict__) def __setstate__(self, state): self.__dict__.update(state) def __cmp__(self, other): return cmp(self.__dict__, other.__dict__) __hash__ = None # Silence Py3k warning x = C() x.foo = [42] y = copy.copy(x) self.assertEqual(y, x) y = copy.deepcopy(x) self.assertEqual(y, x) self.assertTrue(y.foo is not x.foo) def test_reconstruct_reflexive(self): class C(object): pass x = C() x.foo = x y = copy.deepcopy(x) self.assertTrue(y is not x) self.assertTrue(y.foo is y) # Additions for Python 2.3 and pickle protocol 2 def test_reduce_4tuple(self): class C(list): def __reduce__(self): return (C, (), self.__dict__, iter(self)) def __cmp__(self, other): return (cmp(list(self), list(other)) or cmp(self.__dict__, other.__dict__)) __hash__ = None # Silence Py3k warning x = C([[1, 2], 3]) y = copy.copy(x) self.assertEqual(x, y) self.assertTrue(x is not y) self.assertTrue(x[0] is y[0]) y = copy.deepcopy(x) self.assertEqual(x, y) self.assertTrue(x is not y) self.assertTrue(x[0] is not y[0]) def test_reduce_5tuple(self): class C(dict): def __reduce__(self): return (C, (), self.__dict__, None, self.iteritems()) def __cmp__(self, other): return (cmp(dict(self), list(dict)) or cmp(self.__dict__, other.__dict__)) __hash__ = None # Silence Py3k warning x = C([("foo", [1, 2]), ("bar", 3)]) y = copy.copy(x) self.assertEqual(x, y) self.assertTrue(x is not y) self.assertTrue(x["foo"] is y["foo"]) y = copy.deepcopy(x) self.assertEqual(x, y) self.assertTrue(x is not y) self.assertTrue(x["foo"] is not y["foo"]) def test_copy_slots(self): class C(object): __slots__ = ["foo"] x = C() x.foo = [42] y = copy.copy(x) self.assertTrue(x.foo is y.foo) def test_deepcopy_slots(self): class C(object): __slots__ = ["foo"] x = C() x.foo = [42] y = copy.deepcopy(x) self.assertEqual(x.foo, y.foo) self.assertTrue(x.foo is not y.foo) def test_copy_list_subclass(self): class C(list): pass x = C([[1, 2], 3]) x.foo = [4, 5] y = copy.copy(x) self.assertEqual(list(x), list(y)) self.assertEqual(x.foo, y.foo) self.assertTrue(x[0] is y[0]) self.assertTrue(x.foo is y.foo) def test_deepcopy_list_subclass(self): class C(list): pass x = C([[1, 2], 3]) x.foo = [4, 5] y = copy.deepcopy(x) self.assertEqual(list(x), list(y)) self.assertEqual(x.foo, y.foo) self.assertTrue(x[0] is not y[0]) self.assertTrue(x.foo is not y.foo) def test_copy_tuple_subclass(self): class C(tuple): pass x = C([1, 2, 3]) self.assertEqual(tuple(x), (1, 2, 3)) y = copy.copy(x) self.assertEqual(tuple(y), (1, 2, 3)) def test_deepcopy_tuple_subclass(self): class C(tuple): pass x = C([[1, 2], 3]) self.assertEqual(tuple(x), ([1, 2], 3)) y = copy.deepcopy(x) self.assertEqual(tuple(y), ([1, 2], 3)) self.assertTrue(x is not y) self.assertTrue(x[0] is not y[0]) def test_getstate_exc(self): class EvilState(object): def __getstate__(self): raise ValueError, "ain't got no stickin' state" self.assertRaises(ValueError, copy.copy, EvilState()) def test_copy_function(self): self.assertEqual(copy.copy(global_foo), global_foo) def foo(x, y): return x+y self.assertEqual(copy.copy(foo), foo) bar = lambda: None self.assertEqual(copy.copy(bar), bar) def test_deepcopy_function(self): self.assertEqual(copy.deepcopy(global_foo), global_foo) def foo(x, y): return x+y self.assertEqual(copy.deepcopy(foo), foo) bar = lambda: None self.assertEqual(copy.deepcopy(bar), bar) def _check_weakref(self, _copy): class C(object): pass obj = C() x = weakref.ref(obj) y = _copy(x) self.assertTrue(y is x) del obj y = _copy(x) self.assertTrue(y is x) def test_copy_weakref(self): self._check_weakref(copy.copy) def test_deepcopy_weakref(self): self._check_weakref(copy.deepcopy) def _check_copy_weakdict(self, _dicttype): class C(object): pass a, b, c, d = [C() for i in xrange(4)] u = _dicttype() u[a] = b u[c] = d v = copy.copy(u) self.assertFalse(v is u) self.assertEqual(v, u) self.assertEqual(v[a], b) self.assertEqual(v[c], d) self.assertEqual(len(v), 2) del c, d self.assertEqual(len(v), 1) x, y = C(), C() # The underlying containers are decoupled v[x] = y self.assertNotIn(x, u) def test_copy_weakkeydict(self): self._check_copy_weakdict(weakref.WeakKeyDictionary) def test_copy_weakvaluedict(self): self._check_copy_weakdict(weakref.WeakValueDictionary) def test_deepcopy_weakkeydict(self): class C(object): def __init__(self, i): self.i = i a, b, c, d = [C(i) for i in xrange(4)] u = weakref.WeakKeyDictionary() u[a] = b u[c] = d # Keys aren't copied, values are v = copy.deepcopy(u) self.assertNotEqual(v, u) self.assertEqual(len(v), 2) self.assertFalse(v[a] is b) self.assertFalse(v[c] is d) self.assertEqual(v[a].i, b.i) self.assertEqual(v[c].i, d.i) del c self.assertEqual(len(v), 1) def test_deepcopy_weakvaluedict(self): class C(object): def __init__(self, i): self.i = i a, b, c, d = [C(i) for i in xrange(4)] u = weakref.WeakValueDictionary() u[a] = b u[c] = d # Keys are copied, values aren't v = copy.deepcopy(u) self.assertNotEqual(v, u) self.assertEqual(len(v), 2) (x, y), (z, t) = sorted(v.items(), key=lambda pair: pair[0].i) self.assertFalse(x is a) self.assertEqual(x.i, a.i) self.assertTrue(y is b) self.assertFalse(z is c) self.assertEqual(z.i, c.i) self.assertTrue(t is d) del x, y, z, t del d self.assertEqual(len(v), 1) def test_deepcopy_bound_method(self): class Foo(object): def m(self): pass f = Foo() f.b = f.m g = copy.deepcopy(f) self.assertEqual(g.m, g.b) self.assertTrue(g.b.im_self is g) g.b() def global_foo(x, y): return x+y def test_main(): test_support.run_unittest(TestCopy) if __name__ == "__main__": test_main()
# ================================================================================================== # Copyright 2011 Twitter, Inc. # -------------------------------------------------------------------------------------------------- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this work except in compliance with the License. # You may obtain a copy of the License in the LICENSE file, or at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================================== from __future__ import print_function __author__ = 'John Sirois' from . import Command import json import os import pkgutil import shutil import subprocess import traceback import pkg_resources from copy import copy from datetime import datetime from twitter.common.collections import OrderedSet from twitter.common.config import Properties from twitter.pants import is_exported, is_jvm, is_doc from twitter.pants.base import Address, Target from twitter.pants.base.generator import Generator from twitter.pants.targets import JavaLibrary from twitter.pants.pants_doc import DocBuilder _ASSETS_DIR = 'doc/assets' class Doc(Command): """Generates documentation for a set of targets.""" __command__ = 'doc' @staticmethod def _is_documentable(target): # TODO(John Sirois): support java_thrift_library, java_protobuf_library, pydoc and scaladoc return isinstance(target, JavaLibrary) or is_doc(target) def setup_parser(self, parser, args): parser.set_usage("%prog doc ([spec]...)") parser.add_option("--provides", action="store_true", dest = "only_provides", default = False, help = "Specifies docs should only be generated for build targets with " "a provides artifact") parser.add_option("--ignore-failure", action="store_true", dest = "ignore_failure", default = False, help = "Specifies that javadoc failures should not be reflected in this " "command's exit code.") parser.add_option("--link-changelog", action="store_true", dest = "link_changelog", default = False, help = "Causes a link to the artifact's changelog to be shown in the " "artifact header") parser.add_option("--title", dest = "title", default = "API Javadoc", help = "Specifies a custom title for the generated javadoc site.") parser.epilog = """Generates documentation for the specified targets or else all documentable targets if none are specified. If any of the specified targets are not documentable, aborts with a non-zero exit code""" def __init__(self, root_dir, parser, argv, target_path = None): Command.__init__(self, root_dir, parser, argv) self.only_provides = self.options.only_provides self.link_changelog = self.options.link_changelog if self.args: self.targets = self._parse_targets(root_dir) else: def get_targets(): for address in Command.scan_addresses(root_dir): target = Target.get(address) if Doc._is_documentable(target): yield target self.targets = list(get_targets()) if target_path: self.target_path = target_path else: self.target_path = os.path.join(root_dir, 'target', 'pants.doc') self.java_src_prefix = os.path.join(root_dir, 'src', 'java') self.ivy_jar = os.path.join(root_dir, 'build-support', 'ivy', 'lib', 'ivy-2.2.0.jar') self.ivy_settings = os.path.join(root_dir, 'build-support', 'ivy', 'ivysettings.xml') def _artifact_data_path(self, base): return os.path.abspath(os.path.join(base, 'artifacts.js')) def _parse_targets(self, root_dir): targets = OrderedSet() for spec in self.args: try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing target %s: %s" % (address, traceback.format_exc())) if not Doc._is_documentable(target): self.error("Target: %s is not documentable" % address) targets.add(target) return targets @staticmethod def _walk_assets(directory): assets = pkg_resources.resource_listdir(__name__, directory) files, dirs = [], [] for f in assets: if pkg_resources.resource_isdir(__name__, os.path.join(directory, f)): dirs.append(f) else: files.append(f) yield directory, dirs, files for dir in dirs: for r, d, f in Doc._walk_assets(os.path.join(directory, dir)): yield r, d, f def execute(self): if os.path.exists(self.target_path): shutil.rmtree(self.target_path) # we pass self.targets and self.target_path as initial values here. # during recursive execution (in the case of dependencies on doc targets) # these are called with different targets/target/path values if self.execute_javadoc(self.targets, self.target_path): print("JavaDoc execution failed") return 1 if self.execute_pantsdoc(self.targets, self.target_path): print("PantsDoc execution failed") return 1 return 0 def execute_javadoc(self, targets, target_path): if not os.path.exists(target_path): os.makedirs(target_path) all_sources, all_deps = self._extract_java_sources_and_deps(targets) if all_sources == []: return 0 doc_target = self._create_doc_target(targets, all_sources, all_deps) classpath_result, classpath_file = self._create_classpath_file(doc_target, target_path) if classpath_result != 0: print("Failed to generate javadoc classpath.") return classpath_result self._create_artifact_data(targets, target_path) sources_file = self._create_sources_file(doc_target, target_path) command = [ 'javadoc', '-encoding', 'UTF-8', '-notimestamp', '-doctitle', self.options.title, '-use', '-linksource', '-top', '<script type="text/javascript">top.updateArtifact(window.location);</script>', '-classpath', '@%s' % classpath_file, '-d', target_path, '@%s' % sources_file ] # Propagate JVM flags via ANT_OPTS if 'ANT_OPTS' in os.environ: for ant_opt in os.environ['ANT_OPTS'].split(): command += [ '-J%s' % ant_opt.strip() ] # Always provide external linking for java API offlinelinks = set([ 'http://download.oracle.com/javase/6/docs/api/' ]) def link(target): for jar in target.jar_dependencies: if jar.apidocs: offlinelinks.add(jar.apidocs) doc_target.walk(link, is_jvm) for link in offlinelinks: command.extend(['-linkoffline', link, link]) javadoc_result = subprocess.call(command) if self.options.ignore_failure or javadoc_result == 0: for root, _, files in self._walk_assets(_ASSETS_DIR): newdir = os.path.join(target_path, os.path.relpath(root, _ASSETS_DIR)) if not os.path.exists(newdir): os.makedirs(newdir) for filename in files: if not filename.startswith("javadoc"): with open(os.path.join(newdir, filename), 'w') as output: output.write(pkg_resources.resource_string(__name__, os.path.join(root, filename))) # figure out if we docd multiple packages docd_packages = OrderedSet() for source in doc_target.sources: docd_packages.add(os.path.dirname(source)) # write javadoc.html, special casing for single package doc targets new_file = os.path.join(target_path, "javadoc.html") if len(docd_packages) > 1: source_file = "javadoc.html" else: source_file = "javadoc-single-package.html" with open(new_file, 'w') as output: output.write(pkg_resources.resource_string(__name__, os.path.join(_ASSETS_DIR, source_file))) return 0 return javadoc_result def _extract_java_sources_and_deps(self, targets): all_sources = [] all_deps = OrderedSet() for target in targets: if (not self.only_provides or is_exported(target)) and (not is_doc(target)): for source in target.sources: source_path = os.path.join(self.java_src_prefix, source) if os.path.exists(source_path): all_sources.append(source_path) else: print("skipping %s" % source_path) for jar_dep in target.jar_dependencies: if jar_dep.rev: all_deps.add(copy(jar_dep).intransitive()) return all_sources, all_deps def _create_doc_target(self, targets, all_sources, all_deps): def create_meta_target(): return JavaLibrary('pants.doc.deps', all_sources, dependencies = all_deps, is_meta = True) # TODO(John Sirois): Find a better way to do_in_context when we don't care about the context return list(targets)[0].do_in_context(create_meta_target) def _create_classpath_file(self, target, target_path): classpath_file = os.path.abspath(os.path.join(target_path, 'classpath.txt')) classpath_result = subprocess.call([ 'java', '-jar', self.ivy_jar, '-warn', '-settings', self.ivy_settings, '-ivy', self._create_ivy_file(target, target_path), '-cachepath', classpath_file, ]) return classpath_result, classpath_file def _create_ivy_file(self, target, target_path): ivy_file = os.path.abspath(os.path.join(target_path, 'ivy.xml')) template_data = target._create_template_data() template_path = os.path.join('ivy_resolve', 'ivy.mustache') generator = Generator(pkgutil.get_data('twitter.pants.tasks', template_path), root_dir=self.root_dir, lib=template_data) with open(ivy_file, 'w') as outfile: generator.write(outfile) return ivy_file def _create_artifact_data(self, targets, target_path): props_by_repo = {} def get_publish_properties(target): if target.provides.repo not in props_by_repo: with open(target.provides.repo.push_db) as props: props_by_repo[target.provides.repo] = Properties.load(props) return props_by_repo.get(target.provides.repo) data = {} for target in targets: if is_exported(target): props = get_publish_properties(target) for source in target.sources: source_path = os.path.join(self.java_src_prefix, source) key = '%s%%%s' % (target.provides.org, target.provides.name) if os.path.exists(source_path): if 'revision.major.%s' % key in props: major = props['revision.major.%s' % key] minor = props['revision.minor.%s' % key] patch = props['revision.patch.%s' % key] revision = '%s.%s.%s' % (major, minor, patch) else: revision = 'NOT-PUBLISHED-YET' data[source] = dict(org = target.provides.org, name = target.provides.name, rev = revision, artifactBaseUrl = target.provides.repo.url) with open(self._artifact_data_path(target_path), mode = 'w') as data_file: print("var artifacts = %s;" % json.dumps(data, sort_keys = True, indent = 2), file=data_file) print("artifacts.title = '%s';" % self.options.title, file=data_file) print("artifacts.publishDate = '%s';" % ( datetime.now().strftime('%m/%d/%Y %I:%M %p') ), file=data_file) print("artifacts.hasChangelog = %s;" % ( 'true' if self.link_changelog else 'false' ), file=data_file) def _create_sources_file(self, target, target_path): sources_file = os.path.abspath(os.path.join(target_path, 'sources.txt')) with open(sources_file, 'w') as argfile: argfile.writelines(['%s\n' % s for s in target.sources]) return sources_file def execute_pantsdoc(self, targets, target_path): if not os.path.exists(target_path): os.makedirs(target_path) try: doc_targets = filter(is_doc, targets) for doc_target in doc_targets: def exec_doc(dependency): for dep_target in dependency.resolve(): # TODO: document how doc projects get nested # in pants_doc output if is_jvm(dep_target): self.execute_javadoc([dep_target], os.path.join(target_path, doc_target.id, dep_target.id)) if is_doc(dep_target) and not dep_target in targets: self.execute_pantsdoc([dep_target], os.path.join(target_path, doc_target.id, dep_target.id)) doc_target.walk(exec_doc) executor = DocBuilder(target_path) return executor.build(doc_targets, self.args) except: self.error("Problem executing PantsDocBuilder for targets %s: %s" % (doc_targets, traceback.format_exc()))
import os import subprocess from buildutil.config import ( DEFAULT_LOCATE_ORDER, LOCATE_ARTIFACT_ORDER, LOCATE_SOURCE_ORDER, ) from buildutil.target_patterns import TargetPatterns from buildutil.util import validate_pkg_path, validate_target_name class TargetRule(object): def __init__( self, config, pkg_path, name, sources=(), dependencies=(), artifacts=(), visibility_set=None): """ sources: list of files (in relative path form) in the current directory (may also be in sub directories). dependencies: list of target path. artifacts: list of "output" files (in relative path form). visibility_set: list of visibility targets (None means use package default) """ assert validate_pkg_path(pkg_path), 'Invalid package path: %s' % pkg_path assert validate_target_name(name), ( 'Invalid target name: %s (pkg: %s)' % (name, pkg_path)) self._pkg_path = pkg_path self.name = name self.config = config self._sources = sources self.dependency_patterns = TargetPatterns(pkg_path) self.dependency_patterns.set_patterns(dependencies) self._artifacts = artifacts if visibility_set is not None: self.visibility_patterns = TargetPatterns(pkg_path) self.visibility_patterns.set_patterns(visibility_set) else: # bind when the target is added to a package during registration. self.visibility_patterns = None # The following are initialized in later analysis passes. self.deps_binded = False self.dependencies = {} # None for not checked, False for no cycle, True for cycle. self.in_cycle = None # NOTE: If this was implemented as a server (like blaze), then we should # check source content hashes/size instead of sources/artifacts mtime self.soruces_max_mtime = None self.artifacts_max_mtime = None # None for not checked, True if has build, False if build was unnecessary. self.has_modified = None def target_path(self): """DO NOT OVERRIDE""" return self._pkg_path + ':' + self.name def pkg_path(self, name=''): if not name: return self._pkg_path return os.path.join(self._pkg_path, name) def src_abs_path(self, name=''): """DO NOT OVERRIDE""" return self.config.pkg_path_to_src_abs_path(self.pkg_path(name=name)) def genfile_abs_path(self, name=''): """DO NOT OVERRIDE""" return self.config.pkg_path_to_genfile_abs_path(self.pkg_path(name=name)) def build_abs_path(self, name=''): """DO NOT OVERRIDE""" return self.config.pkg_path_to_build_abs_path(self.pkg_path(name=name)) def is_visible_to(self, target): """DO NOT OVERRIDE""" if self.pkg_path() == target.pkg_path(): return True return self.visibility_patterns.matches(target) def _get_max_mtime( self, files, locate_order, verify_existence=False): """DO NOT OVERRIDE""" max_mtime = None for f in files: abs_path = self.locate_file(f, locate_order=locate_order) if abs_path is None: assert not verify_existence, ( 'Failed to locate: %s (target: %s)' % (f, self.target_path())) return None mtime = os.lstat(abs_path).st_mtime # Don't follow links if max_mtime is None or max_mtime < mtime: max_mtime = mtime return max_mtime def update_sources_max_mtime(self): """DO NOT OVERRIDE""" self.sources_max_mtime = self._get_max_mtime( self.sources(), self.locate_source_order(), verify_existence=True) def update_artifacts_max_mtime(self, verify_existence=True): """DO NOT OVERRIDE""" artifacts = self.artifacts() assert artifacts, 'Target must have at least one artifact: %s (pkg: %s)' % ( name, pkg.pkg_path) self.artifacts_max_mtime = self._get_max_mtime( artifacts, self.locate_artifact_order(), verify_existence=verify_existence) def execute_cmd(self, cmd_str, additional_env=None): """DO NOT OVERRIDE. Use this for shelling out commands for building / testing.""" env = { 'PROJECT_ROOT_DIR' : self.config.project_dir_abs_path, 'SRC_DIR' : self.config.src_dir_abs_path, 'GENFILE_DIR' : self.config.genfile_dir_abs_path, 'BUILD_DIR': self.config.build_dir_abs_path, 'PACKAGE': self.pkg_path()[2:], 'TARGET': self.name, } if additional_env: env.update(additional_env) print 'Executing:', cmd_str p = subprocess.Popen( cmd_str, shell=True, cwd=self.config.project_dir_abs_path, env=env) r = p.wait() assert r == 0, 'Failed to execute: %s' % cmd_str def locate_file( self, file_name, locate_order=DEFAULT_LOCATE_ORDER): """DO NOT OVERRIDE""" return self.config.locate_file( self.pkg_path(name=file_name), locate_order=locate_order) def locate_source_order(self): return LOCATE_SOURCE_ORDER def locate_artifact_order(self): return LOCATE_ARTIFACT_ORDER @classmethod def is_unique_target(cls): """Return false if the target can be register multiple times into the same package (The first entry is used; the rest are ignored).""" return True @classmethod def generate_targets( cls, targets_accumulator, config, current_pkg_path, **kwargs): """Override to customize target registration (see PyBinaryTargetRule for example).""" targets_accumulator.append( cls(config=config, pkg_path=current_pkg_path, **kwargs)) @classmethod def rule_name(cls): """The function name used in BUILD file, e.g., cc_library""" raise NotImplemented @classmethod def include_in_all_targets(cls): """When true, return the target as part of pkg.get_all_targets(), which is used for :all or ... target expansion""" return True def is_test_rule(self): """When building libraries / binaries, test targets are ignored. When true, must implement test""" return False def sources(self): """Override if the source list cannot be computed statically during initialization. Can assume dependencies are binded and built when overriding""" return self._sources def artifacts(self): """Override if the artifact list cannot be computed statically during initialization. Can assume dependencies are binded and built when overriding.""" return self._artifacts def list_dependencies_artifacts(self): result = set() for d in self.dependencies.values(): result = result.union(d.list_artifacts()) return result @classmethod def include_dependencies_artifacts(cls): """Controls list_artifacts default behavior. Useful for stopping artifacts from propagating beyond a certain target.""" return True def list_artifacts(self): result = set() for name in self.artifacts(): result.add(self.pkg_path(name=name)) if self.include_dependencies_artifacts(): result = result.union(self.list_dependencies_artifacts()) return result def should_build(self): # Artifacts are created without source and dependencies. if not self.sources() and not self.dependencies: return True # First time building the artifacts. if self.artifacts_max_mtime is None: return True if self.sources(): assert self.sources_max_mtime # Sources are newer than the artifacts. if self.artifacts_max_mtime < self.sources_max_mtime: return True for dep in self.dependencies.values(): assert dep.has_modified is not None # A dependency changed within the same session (i.e., when multiple # targets are specified in the same build command). This is more # accurate than the mtime check. if dep.has_modified: return True assert dep.artifacts_max_mtime is not None # The dependency changed from a previous session. if self.artifacts_max_mtime < dep.artifacts_max_mtime: return True return False def build(self): """How the target should be build. Returns true if build succeeded, false otherwise.""" print 'BUILD', self.name #raise NotImplemented return True def test(self): """How the target should be tested. Returns true if test succeeded, false otherwise.""" try: self.execute_cmd(self.build_abs_path(self.name)) except AssertionError: return False return True
""" Test BSDF plugin. """ import numpy as np from pytest import raises from imageio import core import imageio from imageio.plugins import _bsdf as bsdf import pytest import sys xfail_big_endian = pytest.mark.xfail( sys.byteorder == "big", reason="expected failure on big-endian" ) def test_select(test_images): F = imageio.formats["BSDF"] assert F.name == "BSDF" fname1 = test_images / "chelsea.bsdf" assert F.can_read(core.Request(fname1, "rI")) assert F.can_write(core.Request(fname1, "wI")) assert F.can_read(core.Request(fname1, "ri")) assert F.can_read(core.Request(fname1, "rv")) assert type(imageio.formats[".bsdf"]) is type(F) assert type( imageio.formats.search_write_format(core.Request(fname1, "wi")) ) is type(F) assert type(imageio.formats.search_read_format(core.Request(fname1, "ri"))) is type( F ) assert type( imageio.formats.search_write_format(core.Request(fname1, "wI")) ) is type(F) assert type(imageio.formats.search_read_format(core.Request(fname1, "rI"))) is type( F ) def test_not_an_image(tmp_path): fname = str(tmp_path / "notanimage.bsdf") # Not an image not a list bsdf.save(fname, 1) with raises(RuntimeError): imageio.imread(fname) # A list with non-images bsdf.save(fname, [1]) with raises(RuntimeError): imageio.imread(fname) # An empty list could work though bsdf.save(fname, []) with raises(IndexError): imageio.imread(fname) assert imageio.mimread(fname) == [] @xfail_big_endian def test_singleton(test_images, tmp_path): im1 = imageio.imread(test_images / "chelsea.png") fname = str(tmp_path / "chelsea.bsdf") imageio.imsave(fname, im1) # Does it look alright if we open it in bsdf without extensions? raw = bsdf.load(fname, []) assert isinstance(raw, dict) assert set(raw.keys()) == set(["meta", "array"]) assert isinstance(raw["meta"], dict) assert isinstance(raw["array"], dict) assert raw["array"]["shape"] == list(im1.shape) assert isinstance(raw["array"]["data"], bytes) # Read singleton image as singleton im2 = imageio.imread(fname) assert np.all(im1 == im2) # Read singleton image as series ims = imageio.mimread(fname) assert len(ims) == 1 and np.all(im1 == ims[0]) # Read + write back without image extensions bsdf.save(fname, bsdf.load(fname)) im3 = imageio.mimread(fname) assert np.all(im1 == im3) @xfail_big_endian def test_series(test_images, tmp_path): im1 = imageio.imread(test_images / "chelsea.png") ims1 = [im1, im1 * 0.8, im1 * 0.5] fname = str(tmp_path / "chelseam.bsdf") imageio.mimsave(fname, ims1) # Does it look alright if we open it in bsdf without extensions? raw = bsdf.load(fname, []) assert isinstance(raw, list) and len(raw) == 3 for r in raw: assert set(r.keys()) == set(["meta", "array"]) assert isinstance(r["meta"], dict) assert isinstance(r["array"], dict) assert r["array"]["shape"] == list(im1.shape) assert isinstance(r["array"]["data"], bytes) # Read multi-image as singleton im2 = imageio.imread(fname) assert np.all(im1 == im2) # Read multi-image as series ims2 = imageio.mimread(fname) assert len(ims2) == 3 and all(np.all(ims1[i] == ims2[i]) for i in range(3)) # Read + write back without image extensions bsdf.save(fname, bsdf.load(fname)) ims3 = imageio.mimread(fname) assert len(ims3) == 3 and all(np.all(ims1[i] == ims3[i]) for i in range(3)) @xfail_big_endian def test_series_unclosed(test_images, tmp_path): im1 = imageio.imread(test_images / "chelsea.png") ims1 = [im1, im1 * 0.8, im1 * 0.5] fname = tmp_path / "chelseam.bsdf" w = imageio.get_writer(fname) for im in ims1: w.append_data(im) w._close = lambda: None # nope, leave stream open w.close() # read non-streaming, reads all frames on opening (but skips over blobs r = imageio.get_reader(fname) assert r.get_length() == 3 # not np.inf because not streaming # read streaming and get all r = imageio.get_reader(fname, random_access=False) assert r.get_length() == np.inf # ims2 = [im for im in r] assert len(ims2) == 3 and all(np.all(ims1[i] == ims2[i]) for i in range(3)) # read streaming and read one r = imageio.get_reader(fname, random_access=False) assert r.get_length() == np.inf # assert np.all(ims1[2] == r.get_data(2)) @xfail_big_endian def test_random_access(test_images, tmp_path): im1 = imageio.imread(test_images / "chelsea.png") ims1 = [im1, im1 * 0.8, im1 * 0.5] fname = tmp_path / "chelseam.bsdf" imageio.mimsave(fname, ims1) r = imageio.get_reader(fname) for i in (1, 0, 2, 0, 1, 2): assert np.all(ims1[i] == r.get_data(i)) # Note that if we would not get the data of one image in the series, # these bytes are never read. @xfail_big_endian def test_volume(test_images, tmp_path): fname1 = test_images / "stent.npz" vol1 = imageio.imread(fname1) assert vol1.shape == (256, 128, 128) fname = tmp_path / "stent.bsdf" imageio.volsave(fname, vol1) vol2 = imageio.volread(fname) assert vol1.shape == vol2.shape assert np.all(vol1 == vol2) @pytest.mark.needs_internet def test_from_url(test_images): im = imageio.imread( "https://raw.githubusercontent.com/imageio/" + "imageio-binaries/master/images/chelsea.bsdf" ) assert im.shape == (300, 451, 3) r = imageio.get_reader( "https://raw.githubusercontent.com/imageio/" + "imageio-binaries/master/images/newtonscradle.bsdf" ) # Can read, as long as its forward r.get_data(0) r.get_data(10) # No rewinding, because we read in streaming mode with raises(IndexError): r.get_data(9)
from django.conf import settings from django.db import transaction from dimagi.utils.logging import notify_exception from corehq.apps.users.dbaccessors import get_user_id_by_username from corehq.apps.users.util import cached_user_id_to_username, format_username from .models import ( DeviceReportEntry, ForceCloseEntry, UserEntry, UserErrorEntry, ) from .tasks import send_device_log_to_sumologic def device_users_by_xform(xform_id): return list( UserEntry.objects.filter(xform_id__exact=xform_id) .distinct('username').values_list('username', flat=True) ) def _force_list(obj_or_list): return obj_or_list if isinstance(obj_or_list, list) else [obj_or_list] def _get_log_entries(report, report_slug): for subreport in report: # subreport should be {"log": <one or more entry models>} if isinstance(subreport, dict) and report_slug in subreport: entry_or_entries = subreport.get(report_slug) if isinstance(entry_or_entries, list): for entry in entry_or_entries: yield entry else: yield entry_or_entries def _get_logs(form, report_name, report_slug): """ Returns a list of log entries matching report_name.report_slug These entries are 1-to-1 with the phonelog models (DeviceReportEntry, UserErrorEntry, UserEntry). """ report = form.get(report_name, {}) or {} if isinstance(report, list): return list(_get_log_entries(report, report_slug)) return _force_list(report.get(report_slug, [])) @transaction.atomic def process_device_log(domain, xform, force_logs): _process_user_subreport(xform) _process_log_subreport(domain, xform) _process_user_error_subreport(domain, xform) _process_force_close_subreport(domain, xform) if force_logs: clear_device_log_request(domain, xform) def _process_user_subreport(xform): if UserEntry.objects.filter(xform_id=xform.form_id).exists(): return userlogs = _get_logs(xform.form_data, 'user_subreport', 'user') to_save = [] for i, log in enumerate(userlogs): to_save.append(UserEntry( xform_id=xform.form_id, i=i, user_id=log["user_id"], username=log["username"], sync_token=log["sync_token"], server_date=xform.received_on )) UserEntry.objects.bulk_create(to_save) def _process_log_subreport(domain, xform): if DeviceReportEntry.objects.filter(xform_id=xform.form_id).exists(): return form_data = xform.form_data logs = _get_logs(form_data, 'log_subreport', 'log') to_save = [] for i, log in enumerate(logs): if not log: continue logged_in_username, logged_in_user_id = _get_user_info_from_log(domain, log) to_save.append(DeviceReportEntry( xform_id=xform.form_id, i=i, domain=domain, type=log["type"], msg=log["msg"], # must accept either date or datetime string date=log["@date"], server_date=xform.received_on, app_version=form_data.get('app_version'), device_id=form_data.get('device_id'), username=logged_in_username, user_id=logged_in_user_id, )) DeviceReportEntry.objects.bulk_create(to_save) def _get_user_info_from_log(domain, log): logged_in_username = None logged_in_user_id = None if log["type"] == 'login': # j2me log = user_id_prefix-username logged_in_username = log["msg"].split('-')[1] cc_username = format_username(logged_in_username, domain) logged_in_user_id = get_user_id_by_username(cc_username) elif log["type"] == 'user' and log["msg"][:5] == 'login': # android log = login|username|user_id msg_split = log["msg"].split('|') logged_in_username = msg_split[1] logged_in_user_id = msg_split[2] return logged_in_username, logged_in_user_id def _process_user_error_subreport(domain, xform): if UserErrorEntry.objects.filter(xform_id=xform.form_id).exists(): return errors = _get_logs(xform.form_data, 'user_error_subreport', 'user_error') to_save = [] for i, error in enumerate(errors): # beta versions have 'version', but the name should now be 'app_build'. # Probably fine to remove after June 2016. version = error['app_build'] if 'app_build' in error else error['version'] entry = UserErrorEntry( domain=domain, xform_id=xform.form_id, i=i, app_id=error['app_id'], version_number=int(version), date=error["@date"], server_date=xform.received_on, user_id=error['user_id'], expr=error['expr'], msg=error['msg'], session=error['session'], type=error['type'], context_node=error.get('context_node', ''), ) to_save.append(entry) UserErrorEntry.objects.bulk_create(to_save) def _process_force_close_subreport(domain, xform): if ForceCloseEntry.objects.filter(xform_id=xform.form_id).exists(): return force_closures = _get_logs(xform.form_data, 'force_close_subreport', 'force_close') to_save = [] for force_closure in force_closures: # There are some testing versions going around with an outdated schema # This never made it into an official release, but: # app_id and user_id might be missing # early versions have 'build_number' - the name should now be 'app_build' # All of this is probably fine to remove after, say June 2016. version = (force_closure['app_build'] if 'app_build' in force_closure else force_closure['build_number']) entry = ForceCloseEntry( domain=domain, xform_id=xform.form_id, app_id=force_closure.get('app_id'), version_number=int(version), date=force_closure["@date"], server_date=xform.received_on, user_id=force_closure.get('user_id'), type=force_closure['type'], msg=force_closure['msg'], android_version=force_closure['android_version'], device_model=force_closure['device_model'], session_readable=force_closure['session_readable'], session_serialized=force_closure['session_serialized'], ) to_save.append(entry) ForceCloseEntry.objects.bulk_create(to_save) class SumoLogicLog(object): """Compiles devicelog data to be sent to sumologic More info here: https://docs.google.com/document/d/18sSwv2GRGepOIHthC6lxQAh_aUYgDcTou6w9jL2976o/edit """ def __init__(self, domain, xform): self.domain = domain self.xform = xform def send_data(self, url): send_device_log_to_sumologic.delay(url, self.log_subreport(), self._get_header('log')) send_device_log_to_sumologic.delay(url, self.user_error_subreport(), self._get_header('user_error')) send_device_log_to_sumologic.delay(url, self.force_close_subreport(), self._get_header('force_close')) def _get_header(self, fmt): """ https://docs.google.com/document/d/18sSwv2GRGepOIHthC6lxQAh_aUYgDcTou6w9jL2976o/edit#bookmark=id.ao4j7x5tjvt7 """ environment = 'test-env' if settings.SERVER_ENVIRONMENT in settings.ICDS_ENVS: environment = 'cas' if settings.SERVER_ENVIRONMENT == 'india': environment = 'india' if settings.SERVER_ENVIRONMENT == 'production': environment = 'prod' return {b"X-Sumo-Category": "{env}/{domain}/{fmt}".format( env=environment, domain=self.domain, fmt=fmt, ).encode('utf-8')} def _fill_base_template(self, log): from corehq.apps.receiverwrapper.util import ( get_version_from_appversion_text, get_commcare_version_from_appversion_text, ) template = ( "[log_date={log_date}] " "[log_submission_date={log_submission_date}] " "[log_type={log_type}] " "[domain={domain}] " "[username={username}] " "[device_id={device_id}] " "[app_version={app_version}] " "[cc_version={cc_version}] " "[msg={msg}]" ) appversion_text = self.xform.form_data.get('app_version') return template.format( log_date=log.get("@date"), log_submission_date=self.xform.received_on if self.xform.received_on else None, log_type=log.get("type"), domain=self.domain, username=self._get_user_info(log)[0], device_id=self.xform.form_data.get('device_id'), app_version=get_version_from_appversion_text(appversion_text), cc_version=get_commcare_version_from_appversion_text(appversion_text), msg=log["msg"], ) def _get_user_info(self, log): user_subreport = _get_logs(self.xform.form_data, 'user_subreport', 'user') username, user_id = _get_user_info_from_log(self.domain, log) if not user_subreport: return username or '', user_id or '' else: # If it's available, use the first user subreport to infer username and user id if username is None: username = user_subreport[0].get('username') if user_id is None: user_id = user_subreport[0].get('user_id') return username, user_id def log_subreport(self): logs = _get_logs(self.xform.form_data, 'log_subreport', 'log') return ("\n" .join([self._fill_base_template(log) for log in logs if log.get('type') != 'forceclose']) .encode('utf-8')) def user_error_subreport(self): logs = _get_logs(self.xform.form_data, 'user_error_subreport', 'user_error') log_additions_template = " [app_id={app_id}] [user_id={user_id}] [session={session}] [expr={expr}]" return ("\n".join( self._fill_base_template(log) + log_additions_template.format( app_id=log.get('app_id'), user_id=log.get('user_id'), session=log.get('session'), expr=log.get('expr'), ) for log in logs ).encode('utf-8')) def force_close_subreport(self): logs = _get_logs(self.xform.form_data, 'force_close_subreport', 'force_close') log_additions_template = ( " [app_id={app_id}] [user_id={user_id}] [session={session}] " "[device_model={device_model}]" ) return ("\n".join( self._fill_base_template(log) + log_additions_template.format( app_id=log.get('app_id'), user_id=log.get('user_id'), session=log.get('session_readable'), device_model=log.get('device_model'), ) for log in logs ).encode('utf-8')) def clear_device_log_request(domain, xform): from corehq.apps.ota.models import DeviceLogRequest user_subreport = _get_logs(xform.form_data, 'user_subreport', 'user') username = (user_subreport[0].get('username') if user_subreport else cached_user_id_to_username(xform.user_id)) try: if not username: raise DeviceLogRequest.DoesNotExist() log_request = DeviceLogRequest.objects.get( domain=domain, username=username, ) except DeviceLogRequest.DoesNotExist: msg = "Forced log submission, but no corresponding request found." notify_exception(None, msg, details={ 'domain': domain, 'username': username }) else: log_request.delete()