gt stringclasses 1 value | context stringlengths 2.49k 119k |
|---|---|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
iLO Management Interface
"""
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LI
from ironic.common.i18n import _LW
from ironic.conductor import task_manager
from ironic.drivers import base
from ironic.drivers.modules.ilo import common as ilo_common
from ironic.drivers.modules import ipmitool
LOG = logging.getLogger(__name__)
ilo_error = importutils.try_import('proliantutils.exception')
BOOT_DEVICE_MAPPING_TO_ILO = {boot_devices.PXE: 'NETWORK',
boot_devices.DISK: 'HDD',
boot_devices.CDROM: 'CDROM'
}
BOOT_DEVICE_ILO_TO_GENERIC = {v: k
for k, v in BOOT_DEVICE_MAPPING_TO_ILO.items()}
MANAGEMENT_PROPERTIES = ilo_common.REQUIRED_PROPERTIES.copy()
MANAGEMENT_PROPERTIES.update(ilo_common.CLEAN_PROPERTIES)
clean_step_opts = [
cfg.IntOpt('clean_priority_reset_ilo',
default=1,
help='Priority for reset_ilo clean step.'),
cfg.IntOpt('clean_priority_reset_bios_to_default',
default=10,
help='Priority for reset_bios_to_default clean step.'),
cfg.IntOpt('clean_priority_reset_secure_boot_keys_to_default',
default=20,
help='Priority for reset_secure_boot_keys clean step. This '
'step will reset the secure boot keys to manufacturing '
' defaults.'),
cfg.IntOpt('clean_priority_clear_secure_boot_keys',
default=0,
help='Priority for clear_secure_boot_keys clean step. This '
'step is not enabled by default. It can be enabled to '
'to clear all secure boot keys enrolled with iLO.'),
cfg.IntOpt('clean_priority_reset_ilo_credential',
default=30,
help='Priority for reset_ilo_credential clean step. This step '
'requires "ilo_change_password" parameter to be updated '
'in nodes\'s driver_info with the new password.'),
]
CONF = cfg.CONF
CONF.register_opts(clean_step_opts, group='ilo')
def _execute_ilo_clean_step(node, step, *args, **kwargs):
"""Executes a particular clean step.
:param node: an Ironic node object.
:param step: a clean step to be executed.
:param args: The args to be passed to the clean step.
:param kwargs: The kwargs to be passed to the clean step.
:raises: NodeCleaningFailure, on failure to execute step.
"""
ilo_object = ilo_common.get_ilo_object(node)
try:
clean_step = getattr(ilo_object, step)
except AttributeError:
# The specified clean step is not present in the proliantutils
# package. Raise exception to update the proliantutils package
# to newer version.
raise exception.NodeCleaningFailure(_("Clean step '%s' not "
"found. 'proliantutils' package needs to be updated.") % step)
try:
clean_step(*args, **kwargs)
except ilo_error.IloCommandNotSupportedError:
# This clean step is not supported on Gen8 and below servers.
# Log the failure and continue with cleaning.
LOG.warn(_LW("'%(step)s' clean step is not supported on node "
"%(uuid)s. Skipping the clean step."),
{'step': step, 'uuid': node.uuid})
except ilo_error.IloError as ilo_exception:
raise exception.NodeCleaningFailure(_("Clean step %(step)s failed "
"on node %(node)s with error: %(err)s") %
{'node': node.uuid, 'step': step, 'err': ilo_exception})
class IloManagement(base.ManagementInterface):
def get_properties(self):
return MANAGEMENT_PROPERTIES
def validate(self, task):
"""Check that 'driver_info' contains required ILO credentials.
Validates whether the 'driver_info' property of the supplied
task's node contains the required credentials information.
:param task: a task from TaskManager.
:raises: InvalidParameterValue if required iLO parameters
are not valid.
:raises: MissingParameterValue if a required parameter is missing.
"""
ilo_common.parse_driver_info(task.node)
def get_supported_boot_devices(self):
"""Get a list of the supported boot devices.
:returns: A list with the supported boot devices defined
in :mod:`ironic.common.boot_devices`.
"""
return list(BOOT_DEVICE_MAPPING_TO_ILO.keys())
def get_boot_device(self, task):
"""Get the current boot device for a node.
Returns the current boot device of the node.
:param task: a task from TaskManager.
:raises: MissingParameterValue if a required iLO parameter is missing.
:raises: IloOperationError on an error from IloClient library.
:returns: a dictionary containing:
:boot_device:
the boot device, one of the supported devices listed in
:mod:`ironic.common.boot_devices` or None if it is unknown.
:persistent:
Whether the boot device will persist to all future boots or
not, None if it is unknown.
"""
ilo_object = ilo_common.get_ilo_object(task.node)
persistent = False
try:
# Return one time boot device if set, else return
# the persistent boot device
next_boot = ilo_object.get_one_time_boot()
if next_boot == 'Normal':
# One time boot is not set. Check for persistent boot.
persistent = True
next_boot = ilo_object.get_persistent_boot_device()
except ilo_error.IloError as ilo_exception:
operation = _("Get boot device")
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
boot_device = BOOT_DEVICE_ILO_TO_GENERIC.get(next_boot, None)
if boot_device is None:
persistent = None
return {'boot_device': boot_device, 'persistent': persistent}
@task_manager.require_exclusive_lock
def set_boot_device(self, task, device, persistent=False):
"""Set the boot device for a node.
Set the boot device to use on next reboot of the node.
:param task: a task from TaskManager.
:param device: the boot device, one of the supported devices
listed in :mod:`ironic.common.boot_devices`.
:param persistent: Boolean value. True if the boot device will
persist to all future boots, False if not.
Default: False.
:raises: InvalidParameterValue if an invalid boot device is
specified.
:raises: MissingParameterValue if a required parameter is missing.
:raises: IloOperationError on an error from IloClient library.
"""
try:
boot_device = BOOT_DEVICE_MAPPING_TO_ILO[device]
except KeyError:
raise exception.InvalidParameterValue(_(
"Invalid boot device %s specified.") % device)
try:
ilo_object = ilo_common.get_ilo_object(task.node)
if not persistent:
ilo_object.set_one_time_boot(boot_device)
else:
ilo_object.update_persistent_boot([boot_device])
except ilo_error.IloError as ilo_exception:
operation = _("Setting %s as boot device") % device
raise exception.IloOperationError(operation=operation,
error=ilo_exception)
LOG.debug("Node %(uuid)s set to boot from %(device)s.",
{'uuid': task.node.uuid, 'device': device})
def get_sensors_data(self, task):
"""Get sensors data.
:param task: a TaskManager instance.
:raises: FailedToGetSensorData when getting the sensor data fails.
:raises: FailedToParseSensorData when parsing sensor data fails.
:raises: InvalidParameterValue if required ipmi parameters
are missing.
:raises: MissingParameterValue if a required parameter is missing.
:returns: returns a dict of sensor data group by sensor type.
"""
ilo_common.update_ipmi_properties(task)
ipmi_management = ipmitool.IPMIManagement()
return ipmi_management.get_sensors_data(task)
@base.clean_step(priority=CONF.ilo.clean_priority_reset_ilo)
def reset_ilo(self, task):
"""Resets the iLO.
:param task: a task from TaskManager.
:raises: NodeCleaningFailure, on failure to execute step.
"""
return _execute_ilo_clean_step(task.node, 'reset_ilo')
@base.clean_step(priority=CONF.ilo.clean_priority_reset_ilo_credential)
def reset_ilo_credential(self, task):
"""Resets the iLO password.
:param task: a task from TaskManager.
:raises: NodeCleaningFailure, on failure to execute step.
"""
info = task.node.driver_info
password = info.pop('ilo_change_password', None)
if not password:
LOG.info(_LI("Missing 'ilo_change_password' parameter in "
"driver_info. Clean step 'reset_ilo_credential' is "
"not performed on node %s."), task.node.uuid)
return
_execute_ilo_clean_step(task.node, 'reset_ilo_credential', password)
info['ilo_password'] = password
task.node.driver_info = info
task.node.save()
@base.clean_step(priority=CONF.ilo.clean_priority_reset_bios_to_default)
def reset_bios_to_default(self, task):
"""Resets the BIOS settings to default values.
Resets BIOS to default settings. This operation is currently supported
only on HP Proliant Gen9 and above servers.
:param task: a task from TaskManager.
:raises: NodeCleaningFailure, on failure to execute step.
"""
return _execute_ilo_clean_step(task.node, 'reset_bios_to_default')
@base.clean_step(priority=CONF.ilo.
clean_priority_reset_secure_boot_keys_to_default)
def reset_secure_boot_keys_to_default(self, task):
"""Reset secure boot keys to manufacturing defaults.
Resets the secure boot keys to manufacturing defaults. This
operation is supported only on HP Proliant Gen9 and above servers.
:param task: a task from TaskManager.
:raises: NodeCleaningFailure, on failure to execute step.
"""
return _execute_ilo_clean_step(task.node, 'reset_secure_boot_keys')
@base.clean_step(priority=CONF.ilo.clean_priority_clear_secure_boot_keys)
def clear_secure_boot_keys(self, task):
"""Clear all secure boot keys.
Clears all the secure boot keys. This operation is supported only
on HP Proliant Gen9 and above servers.
:param task: a task from TaskManager.
:raises: NodeCleaningFailure, on failure to execute step.
"""
return _execute_ilo_clean_step(task.node, 'clear_secure_boot_keys')
| |
import os
import logging
import io
import pandas as pd
import numpy as np
import boto3
import datetime
import sys
import json
import re
import argparse
from dataactcore.config import CONFIG_BROKER
from dataactcore.interfaces.db import GlobalDB
from dataactcore.interfaces.function_bag import update_external_data_load_date
from dataactcore.logging import configure_logging
from dataactcore.models.domainModels import ProgramActivity, ExternalDataLoadDate
from dataactcore.models.lookups import EXTERNAL_DATA_TYPE_DICT
from dataactvalidator.health_check import create_app
from dataactvalidator.scripts.loader_utils import clean_data, insert_dataframe
from dataactcore.utils.failure_threshold_exception import FailureThresholdExceededException
logger = logging.getLogger(__name__)
PA_BUCKET = CONFIG_BROKER['data_sources_bucket']
PA_SUB_KEY = 'OMB_Data/'
PA_FILE_NAME = 'DATA Act Program Activity List for Treas.csv'
VALID_HEADERS = {'AGENCY_CODE', 'ALLOCATION_ID', 'ACCOUNT_CODE', 'PA_CODE', 'PA_TITLE', 'FYQ'}
def get_program_activity_file(base_path):
""" Retrieves the program activity file to load
Args:
base_path: directory of domain config files
Returns:
the file path for the pa file either on S3 or locally
"""
if CONFIG_BROKER['use_aws']:
s3 = boto3.resource('s3', region_name=CONFIG_BROKER['aws_region'])
s3_object = s3.Object(PA_BUCKET, PA_SUB_KEY + PA_FILE_NAME)
response = s3_object.get(Key=(PA_SUB_KEY + PA_FILE_NAME))
pa_file = io.BytesIO(response['Body'].read())
else:
pa_file = os.path.join(base_path, PA_FILE_NAME)
return pa_file
def get_date_of_current_pa_upload(base_path):
""" Gets the last time the file was uploaded to S3, or alternatively the last time the local file was modified.
Args:
base_path: directory of domain config files
Returns:
DateTime object
"""
if CONFIG_BROKER['use_aws']:
last_uploaded = boto3.client('s3', region_name=CONFIG_BROKER['aws_region']). \
head_object(Bucket=PA_BUCKET, Key=PA_SUB_KEY + PA_FILE_NAME)['LastModified']
# LastModified is coming back to us in UTC already; just drop the TZ.
last_uploaded = last_uploaded.replace(tzinfo=None)
else:
pa_file = get_program_activity_file(base_path)
last_uploaded = datetime.datetime.utcfromtimestamp(os.path.getmtime(pa_file))
return last_uploaded
def get_stored_pa_last_upload():
""" Gets last recorded timestamp from last time file was processed.
Returns:
Upload date of most recent file we have recorded (Datetime object)
"""
sess = GlobalDB.db().session
last_stored_obj = sess.query(ExternalDataLoadDate).filter_by(
external_data_type_id=EXTERNAL_DATA_TYPE_DICT['program_activity_upload']).one_or_none()
if not last_stored_obj:
# return epoch ts to make sure we load the data the first time through,
# and ideally any time the data might have been wiped
last_stored = datetime.datetime.utcfromtimestamp(0)
else:
last_stored = last_stored_obj.last_load_date_start
return last_stored
def export_public_pa(raw_data):
""" Exports a public copy of the raw file (modified columns)
Args:
raw_data: the raw csv data analyzed from the latest program activity file
"""
updated_cols = {
'fyq': 'REPORTING_PERIOD',
'agency': 'AGENCY_IDENTIFIER_NAME',
'allocation_id': 'ALLOCATION_TRANSFER_AGENCY_IDENTIFIER_CODE',
'agency_code': 'AGENCY_IDENTIFIER_CODE',
'account_code': 'MAIN_ACCOUNT_CODE',
'pa_title': 'PROGRAM_ACTIVITY_NAME',
'pa_code': 'PROGRAM_ACTIVITY_CODE',
'omb_bureau_title_optnl': 'OMB_BUREAU_TITLE_OPTNL',
'omb_account_title_optnl': 'OMB_ACCOUNT_TITLE_OPTNL'
}
raw_data = raw_data[list(updated_cols.keys())]
raw_data.columns = [list(updated_cols.values())]
export_name = 'program_activity.csv'
logger.info('Exporting loaded PA file to {}'.format(export_name))
raw_data.to_csv(export_name, index=0)
def load_program_activity_data(base_path, force_reload=False, export=False):
""" Load program activity lookup table.
Args:
base_path: directory of domain config files
force_reload: whether or not to force a reload
export: whether or not to export a public copy of the file
"""
now = datetime.datetime.now()
metrics_json = {
'script_name': 'load_program_activity.py',
'start_time': str(now),
'records_received': 0,
'duplicates_dropped': 0,
'invalid_records_dropped': 0,
'records_deleted': 0,
'records_inserted': 0
}
dropped_count = 0
logger.info('Checking PA upload dates to see if we can skip.')
last_upload = get_date_of_current_pa_upload(base_path)
if not (last_upload > get_stored_pa_last_upload()) and not force_reload:
logger.info('Skipping load as it\'s already been done')
else:
logger.info('Getting the progrma activity file')
program_activity_file = get_program_activity_file(base_path)
logger.info('Loading program activity: {}'.format(PA_FILE_NAME))
with create_app().app_context():
sess = GlobalDB.db().session
try:
raw_data = pd.read_csv(program_activity_file, dtype=str)
except pd.io.common.EmptyDataError:
log_blank_file()
exit_if_nonlocal(4) # exit code chosen arbitrarily, to indicate distinct failure states
return
headers = set([header.upper() for header in list(raw_data)])
if not VALID_HEADERS.issubset(headers):
logger.error('Missing required headers. Required headers include: %s' % str(VALID_HEADERS))
exit_if_nonlocal(4)
return
try:
dropped_count, data = clean_data(
raw_data,
ProgramActivity,
{'fyq': 'fiscal_year_period', 'agency_code': 'agency_id', 'allocation_id': 'allocation_transfer_id',
'account_code': 'account_number', 'pa_code': 'program_activity_code',
'pa_title': 'program_activity_name'},
{'program_activity_code': {'pad_to_length': 4}, 'agency_id': {'pad_to_length': 3},
'allocation_transfer_id': {'pad_to_length': 3, 'keep_null': True},
'account_number': {'pad_to_length': 4}},
['agency_id', 'program_activity_code', 'account_number', 'program_activity_name'],
True
)
except FailureThresholdExceededException as e:
if e.count == 0:
log_blank_file()
exit_if_nonlocal(4)
return
else:
logger.error('Loading of program activity file failed due to exceeded failure threshold. '
'Application tried to drop {} rows'.format(e.count))
exit_if_nonlocal(5)
return
metrics_json['records_deleted'] = sess.query(ProgramActivity).delete()
metrics_json['invalid_records_dropped'] = dropped_count
# Lowercase Program Activity Name
data['program_activity_name'] = data['program_activity_name'].apply(lambda x: lowercase_or_notify(x))
# Convert FYQ to FYP
data['fiscal_year_period'] = data['fiscal_year_period'].apply(lambda x: convert_fyq_to_fyp(x))
# because we're only loading a subset of program activity info, there will be duplicate records in the
# dataframe. this is ok, but need to de-duped before the db load. We also need to log them.
base_count = len(data.index)
metrics_json['records_received'] = base_count
data.drop_duplicates(inplace=True)
dupe_count = base_count - len(data.index)
logger.info('Dropped {} duplicate rows.'.format(dupe_count))
metrics_json['duplicates_dropped'] = dupe_count
# insert to db
table_name = ProgramActivity.__table__.name
num = insert_dataframe(data, table_name, sess.connection())
sess.commit()
if export:
export_public_pa(raw_data)
end_time = datetime.datetime.now()
update_external_data_load_date(now, end_time, 'program_activity')
update_external_data_load_date(last_upload, end_time, 'program_activity_upload')
logger.info('{} records inserted to {}'.format(num, table_name))
metrics_json['records_inserted'] = num
metrics_json['duration'] = str(end_time - now)
with open('load_program_activity_metrics.json', 'w+') as metrics_file:
json.dump(metrics_json, metrics_file)
if dropped_count > 0:
exit_if_nonlocal(3)
return
def lowercase_or_notify(x):
""" Lowercases the input if it is valid, otherwise logs the error and sets a default value
Args:
String to lowercase
Returns:
Lowercased string if possible, else unmodified string or default value.
"""
try:
return x.lower()
except Exception:
if x and not np.isnan(x):
logger.info('Program activity of {} was unable to be lowercased. Entered as-is.'.format(x))
return x
else:
logger.info('Null value found for program activity name. Entered default value.') # should not happen
return '(not provided)'
def convert_fyq_to_fyp(fyq):
""" Converts the fyq provided to fyp if it is in fyq format. Do nothing if it is already in fyp format
Args:
fyq: String to convert or leave alone fiscal year quarters
Returns:
FYQ converted to FYP or left the same
"""
# If it's in quarter format, convert to period
if re.match('^FY\d{2}Q\d$', str(fyq).upper().strip()):
# Make sure it's all uppercase and replace the Q with a P
fyq = fyq.upper().strip().replace('Q', 'P')
# take the last character in the string (the quarter), multiply by 3, replace
quarter = fyq[-1]
period = str(int(quarter) * 3).zfill(2)
fyq = fyq[:-1] + period
return fyq
return fyq
def log_blank_file():
""" Helper function for specific reused log message """
logger.error('File was blank! Not loaded, routine aborted.')
def exit_if_nonlocal(exit_code):
if not CONFIG_BROKER['local']:
sys.exit(exit_code)
if __name__ == '__main__':
configure_logging()
parser = argparse.ArgumentParser(description='Loads in Program Activit data')
parser.add_argument('-e', '--export', help='If provided, exports a public version of the file locally',
action='store_true')
parser.add_argument('-f', '--force', help='If provided, forces a reload',
action='store_true')
args = parser.parse_args()
config_path = os.path.join(CONFIG_BROKER["path"], "dataactvalidator", "config")
load_program_activity_data(config_path, force_reload=args.force, export=args.export)
| |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import base64
import hashlib
import pkgutil
import os
import zipfile
from .command import Command
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.utils import keys_to_typing
# Python 3 imports
try:
str = basestring
except NameError:
pass
try:
from StringIO import StringIO as IOStream
except ImportError: # 3+
from io import BytesIO as IOStream
getAttribute_js = pkgutil.get_data(__package__, 'getAttribute.js').decode('utf8')
isDisplayed_js = pkgutil.get_data(__package__, 'isDisplayed.js').decode('utf8')
class WebElement(object):
"""Represents a DOM element.
Generally, all interesting operations that interact with a document will be
performed through this interface.
All method calls will do a freshness check to ensure that the element
reference is still valid. This essentially determines whether or not the
element is still attached to the DOM. If this test fails, then an
``StaleElementReferenceException`` is thrown, and all future calls to this
instance will fail."""
def __init__(self, parent, id_, w3c=False):
self._parent = parent
self._id = id_
self._w3c = w3c
def __repr__(self):
return '<{0.__module__}.{0.__name__} (session="{1}", element="{2}")>'.format(
type(self), self._parent.session_id, self._id)
@property
def tag_name(self):
"""This element's ``tagName`` property."""
return self._execute(Command.GET_ELEMENT_TAG_NAME)['value']
@property
def text(self):
"""The text of the element."""
return self._execute(Command.GET_ELEMENT_TEXT)['value']
def click(self):
"""Clicks the element."""
self._execute(Command.CLICK_ELEMENT)
def submit(self):
"""Submits a form."""
if self._w3c:
form = self.find_element(By.XPATH, "./ancestor-or-self::form")
self._parent.execute_script(
"var e = arguments[0].ownerDocument.createEvent('Event');"
"e.initEvent('submit', true, true);"
"if (arguments[0].dispatchEvent(e)) { arguments[0].submit() }", form)
else:
self._execute(Command.SUBMIT_ELEMENT)
def clear(self):
"""Clears the text if it's a text entry element."""
self._execute(Command.CLEAR_ELEMENT)
def get_property(self, name):
"""
Gets the given property of the element.
:Args:
- name - Name of the property to retrieve.
Example::
# Check if the "active" CSS class is applied to an element.
text_length = target_element.get_property("text_length")
"""
try:
return self._execute(Command.GET_ELEMENT_PROPERTY, {"name": name})["value"]
except WebDriverException:
# if we hit an end point that doesnt understand getElementProperty lets fake it
return self.parent.execute_script('return arguments[0][arguments[1]]', self, name)
def get_attribute(self, name):
"""Gets the given attribute or property of the element.
This method will first try to return the value of a property with the
given name. If a property with that name doesn't exist, it returns the
value of the attribute with the same name. If there's no attribute with
that name, ``None`` is returned.
Values which are considered truthy, that is equals "true" or "false",
are returned as booleans. All other non-``None`` values are returned
as strings. For attributes or properties which do not exist, ``None``
is returned.
:Args:
- name - Name of the attribute/property to retrieve.
Example::
# Check if the "active" CSS class is applied to an element.
is_active = "active" in target_element.get_attribute("class")
"""
attributeValue = ''
if self._w3c:
attributeValue = self.parent.execute_script(
"return (%s).apply(null, arguments);" % getAttribute_js,
self, name)
else:
resp = self._execute(Command.GET_ELEMENT_ATTRIBUTE, {'name': name})
attributeValue = resp.get('value')
if attributeValue is not None:
if name != 'value' and attributeValue.lower() in ('true', 'false'):
attributeValue = attributeValue.lower()
return attributeValue
def is_selected(self):
"""Returns whether the element is selected.
Can be used to check if a checkbox or radio button is selected.
"""
return self._execute(Command.IS_ELEMENT_SELECTED)['value']
def is_enabled(self):
"""Returns whether the element is enabled."""
return self._execute(Command.IS_ELEMENT_ENABLED)['value']
def find_element_by_id(self, id_):
"""Finds element within this element's children by ID.
:Args:
- id\_ - ID of child element to locate.
"""
return self.find_element(by=By.ID, value=id_)
def find_elements_by_id(self, id_):
"""Finds a list of elements within this element's children by ID.
:Args:
- id\_ - Id of child element to find.
"""
return self.find_elements(by=By.ID, value=id_)
def find_element_by_name(self, name):
"""Finds element within this element's children by name.
:Args:
- name - name property of the element to find.
"""
return self.find_element(by=By.NAME, value=name)
def find_elements_by_name(self, name):
"""Finds a list of elements within this element's children by name.
:Args:
- name - name property to search for.
"""
return self.find_elements(by=By.NAME, value=name)
def find_element_by_link_text(self, link_text):
"""Finds element within this element's children by visible link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_element(by=By.LINK_TEXT, value=link_text)
def find_elements_by_link_text(self, link_text):
"""Finds a list of elements within this element's children by visible link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_elements(by=By.LINK_TEXT, value=link_text)
def find_element_by_partial_link_text(self, link_text):
"""Finds element within this element's children by partially visible link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_elements_by_partial_link_text(self, link_text):
"""Finds a list of elements within this element's children by link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_element_by_tag_name(self, name):
"""Finds element within this element's children by tag name.
:Args:
- name - name of html tag (eg: h1, a, span)
"""
return self.find_element(by=By.TAG_NAME, value=name)
def find_elements_by_tag_name(self, name):
"""Finds a list of elements within this element's children by tag name.
:Args:
- name - name of html tag (eg: h1, a, span)
"""
return self.find_elements(by=By.TAG_NAME, value=name)
def find_element_by_xpath(self, xpath):
"""Finds element by xpath.
:Args:
xpath - xpath of element to locate. "//input[@class='myelement']"
Note: The base path will be relative to this element's location.
This will select the first link under this element.
::
myelement.find_elements_by_xpath(".//a")
However, this will select the first link on the page.
::
myelement.find_elements_by_xpath("//a")
"""
return self.find_element(by=By.XPATH, value=xpath)
def find_elements_by_xpath(self, xpath):
"""Finds elements within the element by xpath.
:Args:
- xpath - xpath locator string.
Note: The base path will be relative to this element's location.
This will select all links under this element.
::
myelement.find_elements_by_xpath(".//a")
However, this will select all links in the page itself.
::
myelement.find_elements_by_xpath("//a")
"""
return self.find_elements(by=By.XPATH, value=xpath)
def find_element_by_class_name(self, name):
"""Finds element within this element's children by class name.
:Args:
- name - class name to search for.
"""
return self.find_element(by=By.CLASS_NAME, value=name)
def find_elements_by_class_name(self, name):
"""Finds a list of elements within this element's children by class name.
:Args:
- name - class name to search for.
"""
return self.find_elements(by=By.CLASS_NAME, value=name)
def find_element_by_css_selector(self, css_selector):
"""Finds element within this element's children by CSS selector.
:Args:
- css_selector - CSS selctor string, ex: 'a.nav#home'
"""
return self.find_element(by=By.CSS_SELECTOR, value=css_selector)
def find_elements_by_css_selector(self, css_selector):
"""Finds a list of elements within this element's children by CSS selector.
:Args:
- css_selector - CSS selctor string, ex: 'a.nav#home'
"""
return self.find_elements(by=By.CSS_SELECTOR, value=css_selector)
def send_keys(self, *value):
"""Simulates typing into the element.
:Args:
- value - A string for typing, or setting form fields. For setting
file inputs, this could be a local file path.
Use this to send simple key events or to fill out form fields::
form_textfield = driver.find_element_by_name('username')
form_textfield.send_keys("admin")
This can also be used to set file inputs.
::
file_input = driver.find_element_by_name('profilePic')
file_input.send_keys("path/to/profilepic.gif")
# Generally it's better to wrap the file path in one of the methods
# in os.path to return the actual path to support cross OS testing.
# file_input.send_keys(os.path.abspath("path/to/profilepic.gif"))
"""
# transfer file to another machine only if remote driver is used
# the same behaviour as for java binding
if self.parent._is_remote:
local_file = self.parent.file_detector.is_local_file(*value)
if local_file is not None:
value = self._upload(local_file)
self._execute(Command.SEND_KEYS_TO_ELEMENT,
{'text': "".join(keys_to_typing(value)),
'value': keys_to_typing(value)})
# RenderedWebElement Items
def is_displayed(self):
"""Whether the element is visible to a user."""
# Only go into this conditional for browsers that don't use the atom themselves
if self._w3c and self.parent.capabilities['browserName'] == 'safari':
return self.parent.execute_script(
"return (%s).apply(null, arguments);" % isDisplayed_js,
self)
else:
return self._execute(Command.IS_ELEMENT_DISPLAYED)['value']
@property
def location_once_scrolled_into_view(self):
"""THIS PROPERTY MAY CHANGE WITHOUT WARNING. Use this to discover
where on the screen an element is so that we can click it. This method
should cause the element to be scrolled into view.
Returns the top lefthand corner location on the screen, or ``None`` if
the element is not visible.
"""
if self._w3c:
old_loc = self._execute(Command.W3C_EXECUTE_SCRIPT, {
'script': "arguments[0].scrollIntoView(true); return arguments[0].getBoundingClientRect()",
'args': [self]})['value']
return {"x": round(old_loc['x']),
"y": round(old_loc['y'])}
else:
return self._execute(Command.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW)['value']
@property
def size(self):
"""The size of the element."""
size = {}
if self._w3c:
size = self._execute(Command.GET_ELEMENT_RECT)['value']
else:
size = self._execute(Command.GET_ELEMENT_SIZE)['value']
new_size = {"height": size["height"],
"width": size["width"]}
return new_size
def value_of_css_property(self, property_name):
"""The value of a CSS property."""
return self._execute(Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY, {
'propertyName': property_name})['value']
@property
def location(self):
"""The location of the element in the renderable canvas."""
if self._w3c:
old_loc = self._execute(Command.GET_ELEMENT_RECT)['value']
else:
old_loc = self._execute(Command.GET_ELEMENT_LOCATION)['value']
new_loc = {"x": round(old_loc['x']),
"y": round(old_loc['y'])}
return new_loc
@property
def rect(self):
"""A dictionary with the size and location of the element."""
return self._execute(Command.GET_ELEMENT_RECT)['value']
@property
def screenshot_as_base64(self):
"""
Gets the screenshot of the current element as a base64 encoded string.
:Usage:
img_b64 = element.screenshot_as_base64
"""
return self._execute(Command.ELEMENT_SCREENSHOT)['value']
@property
def screenshot_as_png(self):
"""
Gets the screenshot of the current element as a binary data.
:Usage:
element_png = element.screenshot_as_png
"""
return base64.b64decode(self.screenshot_as_base64.encode('ascii'))
def screenshot(self, filename):
"""
Gets the screenshot of the current element. Returns False if there is
any IOError, else returns True. Use full paths in your filename.
:Args:
- filename: The full path you wish to save your screenshot to.
:Usage:
element.screenshot('/Screenshots/foo.png')
"""
png = self.screenshot_as_png
try:
with open(filename, 'wb') as f:
f.write(png)
except IOError:
return False
finally:
del png
return True
@property
def parent(self):
"""Internal reference to the WebDriver instance this element was found from."""
return self._parent
@property
def id(self):
"""Internal ID used by selenium.
This is mainly for internal use. Simple use cases such as checking if 2
webelements refer to the same element, can be done using ``==``::
if element1 == element2:
print("These 2 are equal")
"""
return self._id
def __eq__(self, element):
return hasattr(element, 'id') and self._id == element.id
def __ne__(self, element):
return not self.__eq__(element)
# Private Methods
def _execute(self, command, params=None):
"""Executes a command against the underlying HTML element.
Args:
command: The name of the command to _execute as a string.
params: A dictionary of named parameters to send with the command.
Returns:
The command's JSON response loaded into a dictionary object.
"""
if not params:
params = {}
params['id'] = self._id
return self._parent.execute(command, params)
def find_element(self, by=By.ID, value=None):
if self._w3c:
if by == By.ID:
by = By.CSS_SELECTOR
value = '[id="%s"]' % value
elif by == By.TAG_NAME:
by = By.CSS_SELECTOR
elif by == By.CLASS_NAME:
by = By.CSS_SELECTOR
value = ".%s" % value
elif by == By.NAME:
by = By.CSS_SELECTOR
value = '[name="%s"]' % value
return self._execute(Command.FIND_CHILD_ELEMENT,
{"using": by, "value": value})['value']
def find_elements(self, by=By.ID, value=None):
if self._w3c:
if by == By.ID:
by = By.CSS_SELECTOR
value = '[id="%s"]' % value
elif by == By.TAG_NAME:
by = By.CSS_SELECTOR
elif by == By.CLASS_NAME:
by = By.CSS_SELECTOR
value = ".%s" % value
elif by == By.NAME:
by = By.CSS_SELECTOR
value = '[name="%s"]' % value
return self._execute(Command.FIND_CHILD_ELEMENTS,
{"using": by, "value": value})['value']
def __hash__(self):
return int(hashlib.md5(self._id.encode('utf-8')).hexdigest(), 16)
def _upload(self, filename):
fp = IOStream()
zipped = zipfile.ZipFile(fp, 'w', zipfile.ZIP_DEFLATED)
zipped.write(filename, os.path.split(filename)[1])
zipped.close()
content = base64.encodestring(fp.getvalue())
if not isinstance(content, str):
content = content.decode('utf-8')
try:
return self._execute(Command.UPLOAD_FILE, {'file': content})['value']
except WebDriverException as e:
if "Unrecognized command: POST" in e.__str__():
return filename
elif "Command not found: POST " in e.__str__():
return filename
elif '{"status":405,"value":["GET","HEAD","DELETE"]}' in e.__str__():
return filename
else:
raise e
| |
#!/usr/bin/env python
#
# Copyright 2007 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A module that handles deferred execution of callables via the task queue.
Tasks consist of a callable and arguments to pass to it. The callable and its
arguments are serialized and put on the task queue, which deserializes and
executes them. The following callables can be used as tasks:
1) Functions defined in the top level of a module
2) Classes defined in the top level of a module
3) Instances of classes in (2) that implement __call__
4) Instance methods of objects of classes in (2)
5) Class methods of classes in (2)
6) Built-in functions
7) Built-in methods
The following callables can NOT be used as tasks:
1) Nested functions or closures
2) Nested classes or objects of them
3) Lambda functions
4) Static methods
The arguments to the callable, and the object (in the case of method or object
calls) must all be pickleable.
If you want your tasks to execute reliably, don't use mutable global variables;
they are not serialized with the task and may not be the same when your task
executes as they were when it was enqueued (in fact, they will almost certainly
be different).
If your app relies on manipulating the import path, make sure that the function
you are deferring is defined in a module that can be found without import path
manipulation. Alternately, you can include deferred.TaskHandler in your own
webapp application instead of using the easy-install method detailed below.
When you create a deferred task using deferred.defer, the task is serialized,
and an attempt is made to add it directly to the task queue. If the task is too
big (larger than about 10 kilobytes when serialized), a datastore entry will be
created for the task, and a new task will be enqueued, which will fetch the
original task from the datastore and execute it. This is much less efficient
than the direct execution model, so it's a good idea to minimize the size of
your tasks when possible.
By default, the deferred module uses the URL '/_ah/queue/deferred',
and the default queue.
To enable the Deferred API, set 'use_deferred=True' in the call
to 'wrap_wsgi_app()'.
Example for a Flask app:
app = Flask(__name__)
app.wsgi_app = wrap_wsgi_app(app.wsgi_app, use_deferred=True)
Deferring a task in Flask:
```
from flask import Flask, request
from google.appengine.api import wrap_wsgi_app
from google.appengine.ext import ndb
from google.appengine.ext import deferred
class MyModel(ndb.Model):
total = ndb.IntegerProperty(indexed=True)
my_key = "defaultKey"
def do_something_later(key, amount):
entity = MyModel.get_or_insert(key, total=0)
entity.total += amount
entity.put()
@app.route("/home")
def hello_world():
# Use default URL and queue name, no task name, execute ASAP.
deferred.defer(do_something_later, my_key, 20)
# Execute after 60s
deferred.defer(do_something_later, my_key, 20, _countdown=60)
# Using a non-default queue (a TaskQueue 'foo' should already exist)
deferred.defer(do_something_later, my_key, 20, _queue="foo", _countdown=60)
app = Flask(__name__)
app.wsgi_app = wrap_wsgi_app(app.wsgi_app, use_deferred=True)
```
"""
import http
import logging
import os
import pickle
import types
from google.appengine.api import taskqueue
from google.appengine.ext import db
_DEFAULT_LOG_LEVEL = logging.INFO
_TASKQUEUE_HEADERS = {"Content-Type": "application/octet-stream"}
_TASKQUEUE_RESPONSE_HEADERS = ("Content-Type", "text/plain")
_DEFAULT_URL = "/_ah/queue/deferred"
_DEFAULT_QUEUE = "default"
class Error(Exception):
"""Base class for exceptions in this module."""
class PermanentTaskFailure(Error):
"""Indicates that a task failed, and will never succeed."""
class SingularTaskFailure(Error):
"""Indicates that a task failed once."""
def set_log_level(log_level):
"""Sets the log level deferred will log to in normal circumstances.
Args:
log_level: one of logging log levels, e.g. logging.DEBUG, logging.INFO, etc.
"""
global _DEFAULT_LOG_LEVEL
_DEFAULT_LOG_LEVEL = log_level
def run(data):
"""Unpickles and executes a task.
Args:
data: A pickled tuple of (function, args, kwargs) to execute.
Returns:
The return value of the function invocation.
Raises:
PermanentTaskFailure if an error occurred during unpickling the task.
"""
try:
func, args, kwds = pickle.loads(data)
except Exception as e:
raise PermanentTaskFailure(e)
else:
return func(*args, **kwds)
class _DeferredTaskEntity(db.Model):
"""Datastore representation of a deferred task.
This is used in cases when the deferred task is too big to be included as
payload with the task queue entry.
"""
data = db.BlobProperty(required=True)
def run_from_datastore(key):
"""Retrieves a task from the datastore and executes it.
Args:
key: The datastore key of a _DeferredTaskEntity storing the task.
Returns:
The return value of the function invocation.
Raises:
PermanentTaskFailure: Raised if the task entity is missing.
"""
entity = _DeferredTaskEntity.get(key)
if not entity:
raise PermanentTaskFailure()
try:
ret = run(entity.data)
entity.delete()
except PermanentTaskFailure:
entity.delete()
raise
def invoke_member(obj, membername, *args, **kwargs):
"""Retrieves a member of an object, then calls it with the provided arguments.
Args:
obj: The object to operate on.
membername: The name of the member to retrieve from ojb.
*args: Positional arguments to pass to the method.
**kwargs: Keyword arguments to pass to the method.
Returns:
The return value of the method invocation.
"""
return getattr(obj, membername)(*args, **kwargs)
def _curry_callable(obj, *args, **kwargs):
"""Takes a callable and arguments and returns a task queue tuple.
The returned tuple consists of (callable, args, kwargs), and can be pickled
and unpickled safely.
Args:
obj: The callable to curry. See the module docstring for restrictions.
*args: Positional arguments to call the callable with.
**kwargs: Keyword arguments to call the callable with.
Returns:
A tuple consisting of (callable, args, kwargs) that can be evaluated by
run() with equivalent effect of executing the function directly.
Raises:
ValueError: If the passed in object is not of a valid callable type.
"""
if isinstance(obj, types.MethodType):
return (invoke_member, (obj.__self__, obj.__func__.__name__) + args, kwargs)
elif isinstance(obj, types.BuiltinMethodType):
if not obj.__self__:
return (obj, args, kwargs)
else:
if isinstance(obj.__self__, types.ModuleType):
return (obj, args, kwargs)
else:
return (invoke_member, (obj.__self__, obj.__name__) + args, kwargs)
elif isinstance(obj, object) and hasattr(obj, "__call__"):
return (obj, args, kwargs)
elif isinstance(obj, (types.FunctionType, types.BuiltinFunctionType, type,
types.UnboundMethodType)):
return (obj, args, kwargs)
else:
raise ValueError("obj must be callable")
def serialize(obj, *args, **kwargs):
"""Serializes a callable into a format recognized by the deferred executor.
Args:
obj: The callable to serialize. See module docstring for restrictions.
*args: Positional arguments to call the callable with.
**kwargs: Keyword arguments to call the callable with.
Returns:
A serialized representation of the callable.
"""
curried = _curry_callable(obj, *args, **kwargs)
if os.environ.get("DEFERRED_USE_CROSS_COMPATIBLE_PICKLE_PROTOCOL", False):
protocol = 0
else:
protocol = pickle.HIGHEST_PROTOCOL
return pickle.dumps(curried, protocol)
def defer(obj, *args, **kwargs):
"""Defers a callable for execution later.
The default deferred URL of /_ah/queue/deferred will be used unless an
alternate URL is explicitly specified. If you want to use the default URL for
a queue, specify _url=None. If you specify a different URL, you will need to
install the handler on that URL (see the module docstring for details).
Args:
obj: The callable to execute. See module docstring for restrictions.
_countdown, _eta, _headers, _name, _target, _transactional, _url,
_retry_options, _queue: Passed through to the task queue - see the task
queue documentation for details.
*args: Positional arguments to call the callable with.
**kwargs: Any other keyword arguments are passed through to the callable.
Returns:
A taskqueue.Task object which represents an enqueued callable.
"""
taskargs = dict(
(x, kwargs.pop(("_%s" % x), None))
for x in ("countdown", "eta", "name", "target", "retry_options"))
taskargs["url"] = kwargs.pop("_url", _DEFAULT_URL)
transactional = kwargs.pop("_transactional", False)
taskargs["headers"] = dict(_TASKQUEUE_HEADERS)
taskargs["headers"].update(kwargs.pop("_headers", {}))
queue = kwargs.pop("_queue", _DEFAULT_QUEUE)
pickled = serialize(obj, *args, **kwargs)
try:
task = taskqueue.Task(payload=pickled, **taskargs)
return task.add(queue, transactional=transactional)
except taskqueue.TaskTooLargeError:
key = _DeferredTaskEntity(data=pickled).put()
pickled = serialize(run_from_datastore, str(key))
task = taskqueue.Task(payload=pickled, **taskargs)
return task.add(queue)
class Handler():
"""A handler class for processesing deferred invocations."""
def run_from_request(self, environ):
"""Executes deferred tasks after verifying the caller.
This function assumes that the WSGI environ dict originated from a POST
request by the GAE TaskQueue service. It checks the caller IP and request
headers to verify the caller.
Args:
environ: a WSGI dict describing the HTTP request (See PEP 333).
Returns:
response: a string containing body of the response
status: HTTP status code of enum type http.HTTPStatus
headers: a dict containing response headers
Raises:
PermanentTaskFailure if an error occurred during unpickling the task.
"""
if "HTTP_X_APPENGINE_TASKNAME" not in environ:
error_message = ("Detected an attempted XSRF attack. "
"The header 'X-AppEngine-Taskname' was not set.")
logging.error(error_message)
return error_message, http.HTTPStatus.FORBIDDEN, [
_TASKQUEUE_RESPONSE_HEADERS
]
in_prod = (
not environ.get("SERVER_SOFTWARE").startswith("Devel"))
if in_prod and environ.get("REMOTE_ADDR") != "0.1.0.2":
error_message = ("Detected an attempted XSRF attack. "
"This request did not originate from Task Queue.")
logging.error(error_message)
return error_message, http.HTTPStatus.FORBIDDEN, [
_TASKQUEUE_RESPONSE_HEADERS
]
headers = [
"%s:%s" % (k[5:], v)
for k, v in environ.items()
if k.upper().startswith("HTTP_X_APPENGINE_")
]
logging.log(_DEFAULT_LOG_LEVEL, ", ".join(headers))
try:
request_body_size = int(environ.get("CONTENT_LENGTH", 0))
except ValueError:
request_body_size = 0
request_body = environ["wsgi.input"].read(request_body_size)
run(request_body)
return "Success", http.HTTPStatus.OK, [_TASKQUEUE_RESPONSE_HEADERS]
def post(self, environ):
"""Default behavior for POST requests to the deferred endpoint.
If the Deferred API has been enabled, this function is automatically called
after 'deferred.defer()' is used to defer a task. Behind the scenes,
the TaskQueue service calls the default Deferred endpoint
'/_ah/queue/deferred', which is routed to this function.
If deferred.defer() is passed a custom '_url' parameter, POST requests to
the custom endpoint needs to be handled by the app. To replicate the
default behavior of executing a deferred task, this function can be called
by the custom endpoint handler, and passed the WSGI 'environ' dictionary
of the POST request.
Args:
environ: a WSGI dict describing the HTTP request (See PEP 333)
Returns:
response: a string containing body of the response
status: HTTP status code of enum type http.HTTPStatus
headers: a dict containing response headers
"""
try:
response, status, headers = self.run_from_request(environ)
except SingularTaskFailure:
response, status, headers = ("SingularTaskFailure",
http.HTTPStatus.REQUEST_TIMEOUT,
[_TASKQUEUE_RESPONSE_HEADERS])
logging.debug("Failure executing task, task retry forced")
except PermanentTaskFailure:
response, status, headers = ("PermanentTaskFailure", http.HTTPStatus.OK,
[_TASKQUEUE_RESPONSE_HEADERS])
logging.exception("Permanent failure attempting to execute task")
return response, status, headers
def __call__(self, environ, start_response):
"""WSGI app callable to handle POST requests to the deferred endpoint.
This function allows the 'Handler' class to behave like a WSGI app for
handling Deferred task execution.
If the parent WSGI app is required to handle multiple kinds of requests,
a dispatcher (compliant with the app's Web Framework) can be used to route
an endpoint/URL to be handled by this callable.
Args:
environ: a WSGI dict describing the HTTP request (See PEP 333)
start_response: callable (See PEP 3333)
Returns:
list of bytes response
"""
if environ["REQUEST_METHOD"] != "POST":
return ("", http.HTTPStatus.METHOD_NOT_ALLOWED, [("Allow", "POST")])
response, status, headers = self.post(environ)
start_response(f"{status.value} {status.phrase}", headers)
return [response.encode("utf-8")]
application = Handler()
| |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1NodeStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, addresses=None, allocatable=None, capacity=None, conditions=None, daemon_endpoints=None, images=None, node_info=None, phase=None, volumes_attached=None, volumes_in_use=None):
"""
V1NodeStatus - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'addresses': 'list[V1NodeAddress]',
'allocatable': 'dict(str, str)',
'capacity': 'dict(str, str)',
'conditions': 'list[V1NodeCondition]',
'daemon_endpoints': 'V1NodeDaemonEndpoints',
'images': 'list[V1ContainerImage]',
'node_info': 'V1NodeSystemInfo',
'phase': 'str',
'volumes_attached': 'list[V1AttachedVolume]',
'volumes_in_use': 'list[str]'
}
self.attribute_map = {
'addresses': 'addresses',
'allocatable': 'allocatable',
'capacity': 'capacity',
'conditions': 'conditions',
'daemon_endpoints': 'daemonEndpoints',
'images': 'images',
'node_info': 'nodeInfo',
'phase': 'phase',
'volumes_attached': 'volumesAttached',
'volumes_in_use': 'volumesInUse'
}
self._addresses = addresses
self._allocatable = allocatable
self._capacity = capacity
self._conditions = conditions
self._daemon_endpoints = daemon_endpoints
self._images = images
self._node_info = node_info
self._phase = phase
self._volumes_attached = volumes_attached
self._volumes_in_use = volumes_in_use
@property
def addresses(self):
"""
Gets the addresses of this V1NodeStatus.
List of addresses reachable to the node. Queried from cloud provider, if available. More info: https://kubernetes.io/docs/concepts/nodes/node/#addresses
:return: The addresses of this V1NodeStatus.
:rtype: list[V1NodeAddress]
"""
return self._addresses
@addresses.setter
def addresses(self, addresses):
"""
Sets the addresses of this V1NodeStatus.
List of addresses reachable to the node. Queried from cloud provider, if available. More info: https://kubernetes.io/docs/concepts/nodes/node/#addresses
:param addresses: The addresses of this V1NodeStatus.
:type: list[V1NodeAddress]
"""
self._addresses = addresses
@property
def allocatable(self):
"""
Gets the allocatable of this V1NodeStatus.
Allocatable represents the resources of a node that are available for scheduling. Defaults to Capacity.
:return: The allocatable of this V1NodeStatus.
:rtype: dict(str, str)
"""
return self._allocatable
@allocatable.setter
def allocatable(self, allocatable):
"""
Sets the allocatable of this V1NodeStatus.
Allocatable represents the resources of a node that are available for scheduling. Defaults to Capacity.
:param allocatable: The allocatable of this V1NodeStatus.
:type: dict(str, str)
"""
self._allocatable = allocatable
@property
def capacity(self):
"""
Gets the capacity of this V1NodeStatus.
Capacity represents the total resources of a node. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#capacity
:return: The capacity of this V1NodeStatus.
:rtype: dict(str, str)
"""
return self._capacity
@capacity.setter
def capacity(self, capacity):
"""
Sets the capacity of this V1NodeStatus.
Capacity represents the total resources of a node. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#capacity
:param capacity: The capacity of this V1NodeStatus.
:type: dict(str, str)
"""
self._capacity = capacity
@property
def conditions(self):
"""
Gets the conditions of this V1NodeStatus.
Conditions is an array of current observed node conditions. More info: https://kubernetes.io/docs/concepts/nodes/node/#condition
:return: The conditions of this V1NodeStatus.
:rtype: list[V1NodeCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""
Sets the conditions of this V1NodeStatus.
Conditions is an array of current observed node conditions. More info: https://kubernetes.io/docs/concepts/nodes/node/#condition
:param conditions: The conditions of this V1NodeStatus.
:type: list[V1NodeCondition]
"""
self._conditions = conditions
@property
def daemon_endpoints(self):
"""
Gets the daemon_endpoints of this V1NodeStatus.
Endpoints of daemons running on the Node.
:return: The daemon_endpoints of this V1NodeStatus.
:rtype: V1NodeDaemonEndpoints
"""
return self._daemon_endpoints
@daemon_endpoints.setter
def daemon_endpoints(self, daemon_endpoints):
"""
Sets the daemon_endpoints of this V1NodeStatus.
Endpoints of daemons running on the Node.
:param daemon_endpoints: The daemon_endpoints of this V1NodeStatus.
:type: V1NodeDaemonEndpoints
"""
self._daemon_endpoints = daemon_endpoints
@property
def images(self):
"""
Gets the images of this V1NodeStatus.
List of container images on this node
:return: The images of this V1NodeStatus.
:rtype: list[V1ContainerImage]
"""
return self._images
@images.setter
def images(self, images):
"""
Sets the images of this V1NodeStatus.
List of container images on this node
:param images: The images of this V1NodeStatus.
:type: list[V1ContainerImage]
"""
self._images = images
@property
def node_info(self):
"""
Gets the node_info of this V1NodeStatus.
Set of ids/uuids to uniquely identify the node. More info: https://kubernetes.io/docs/concepts/nodes/node/#info
:return: The node_info of this V1NodeStatus.
:rtype: V1NodeSystemInfo
"""
return self._node_info
@node_info.setter
def node_info(self, node_info):
"""
Sets the node_info of this V1NodeStatus.
Set of ids/uuids to uniquely identify the node. More info: https://kubernetes.io/docs/concepts/nodes/node/#info
:param node_info: The node_info of this V1NodeStatus.
:type: V1NodeSystemInfo
"""
self._node_info = node_info
@property
def phase(self):
"""
Gets the phase of this V1NodeStatus.
NodePhase is the recently observed lifecycle phase of the node. More info: https://kubernetes.io/docs/concepts/nodes/node/#phase The field is never populated, and now is deprecated.
:return: The phase of this V1NodeStatus.
:rtype: str
"""
return self._phase
@phase.setter
def phase(self, phase):
"""
Sets the phase of this V1NodeStatus.
NodePhase is the recently observed lifecycle phase of the node. More info: https://kubernetes.io/docs/concepts/nodes/node/#phase The field is never populated, and now is deprecated.
:param phase: The phase of this V1NodeStatus.
:type: str
"""
self._phase = phase
@property
def volumes_attached(self):
"""
Gets the volumes_attached of this V1NodeStatus.
List of volumes that are attached to the node.
:return: The volumes_attached of this V1NodeStatus.
:rtype: list[V1AttachedVolume]
"""
return self._volumes_attached
@volumes_attached.setter
def volumes_attached(self, volumes_attached):
"""
Sets the volumes_attached of this V1NodeStatus.
List of volumes that are attached to the node.
:param volumes_attached: The volumes_attached of this V1NodeStatus.
:type: list[V1AttachedVolume]
"""
self._volumes_attached = volumes_attached
@property
def volumes_in_use(self):
"""
Gets the volumes_in_use of this V1NodeStatus.
List of attachable volumes in use (mounted) by the node.
:return: The volumes_in_use of this V1NodeStatus.
:rtype: list[str]
"""
return self._volumes_in_use
@volumes_in_use.setter
def volumes_in_use(self, volumes_in_use):
"""
Sets the volumes_in_use of this V1NodeStatus.
List of attachable volumes in use (mounted) by the node.
:param volumes_in_use: The volumes_in_use of this V1NodeStatus.
:type: list[str]
"""
self._volumes_in_use = volumes_in_use
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1NodeStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| |
# -*- coding: utf-8 -*-
import subprocess
import struct
import os
import json
import uuid
import StringIO
import django
from django.test import TestCase
try:
from django.test.utils import override_settings
except ImportError:
from override_settings import override_settings
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.http import HttpResponseNotAllowed
from django.core import management
try:
from django.utils.timezone import now as dt_now
except ImportError:
import datetime
dt_now = datetime.datetime.now
from .models import APNService, Device, Notification, NotificationPayloadSizeExceeded
from .http import JSONResponse
from .utils import generate_cert_and_pkey
from .forms import APNServiceForm
from .settings import get_setting
TOKEN = '0fd12510cfe6b0a4a89dc7369c96df956f991e66131dab63398734e8000d0029'
TEST_PEM = os.path.abspath(os.path.join(os.path.dirname(__file__), 'test.pem'))
SSL_SERVER_COMMAND = ('openssl', 's_server', '-accept', '2195', '-cert', TEST_PEM)
class UseMockSSLServerMixin(object):
@classmethod
def setUpClass(cls):
super(UseMockSSLServerMixin, cls).setUpClass()
cls.test_server_proc = subprocess.Popen(SSL_SERVER_COMMAND, stdout=subprocess.PIPE)
@classmethod
def tearDownClass(cls):
cls.test_server_proc.kill()
super(UseMockSSLServerMixin, cls).tearDownClass()
class APNServiceTest(UseMockSSLServerMixin, TestCase):
def setUp(self):
cert, key = generate_cert_and_pkey()
self.service = APNService.objects.create(name='test-service', hostname='127.0.0.1',
certificate=cert, private_key=key)
self.device = Device.objects.create(token=TOKEN, service=self.service)
self.notification = Notification.objects.create(message='Test message', service=self.service)
def test_invalid_payload_size(self):
n = Notification(message='.' * 250)
self.assertRaises(NotificationPayloadSizeExceeded, self.service.pack_message, n.payload, self.device)
def test_payload_packed_correctly(self):
fmt = self.service.fmt
payload = self.notification.payload
msg = self.service.pack_message(payload, self.device)
unpacked = struct.unpack(fmt % len(payload), msg)
self.assertEqual(unpacked[-1], payload)
def test_pack_message_with_invalid_device(self):
self.assertRaises(TypeError, self.service.pack_message, None)
def test_can_connect_and_push_notification(self):
self.assertIsNone(self.notification.last_sent_at)
self.assertIsNone(self.device.last_notified_at)
self.service.push_notification_to_devices(self.notification, [self.device])
self.assertIsNotNone(self.notification.last_sent_at)
self.device = Device.objects.get(pk=self.device.pk) # Refresh the object with values from db
self.assertIsNotNone(self.device.last_notified_at)
def test_create_with_passphrase(self):
cert, key = generate_cert_and_pkey(as_string=True, passphrase='pass')
form = APNServiceForm({'name': 'test', 'hostname': 'localhost', 'certificate': cert, 'private_key': key, 'passphrase': 'pass'})
self.assertTrue(form.is_valid())
def test_create_with_invalid_passphrase(self):
cert, key = generate_cert_and_pkey(as_string=True, passphrase='correct')
form = APNServiceForm({'name': 'test', 'hostname': 'localhost', 'certificate': cert, 'private_key': key, 'passphrase': 'incorrect'})
self.assertFalse(form.is_valid())
self.assertTrue('passphrase' in form.errors)
def test_pushing_notification_in_chunks(self):
devices = []
for i in xrange(10):
token = uuid.uuid1().get_hex() * 2
device = Device.objects.create(token=token, service=self.service)
devices.append(device)
started_at = dt_now()
self.service.push_notification_to_devices(self.notification, devices, chunk_size=2)
device_count = len(devices)
self.assertEquals(device_count,
Device.objects.filter(last_notified_at__gte=started_at).count())
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION='AuthNone')
class APITest(UseMockSSLServerMixin, TestCase):
urls = 'ios_notifications.urls'
def setUp(self):
cert, key = generate_cert_and_pkey()
self.service = APNService.objects.create(name='test-service', hostname='127.0.0.1',
certificate=cert, private_key=key)
self.device_token = TOKEN
self.user = User.objects.create(username='testuser', email='test@example.com')
self.device = Device.objects.create(service=self.service, token='0fd12510cfe6b0a4a89dc7369d96df956f991e66131dab63398734e8000d0029')
def test_register_device_invalid_params(self):
"""
Test that sending a POST request to the device API
without POST parameters `token` and `service` results
in a 400 bad request response.
"""
resp = self.client.post(reverse('ios-notifications-device-create'))
self.assertEqual(resp.status_code, 400)
self.assertTrue(isinstance(resp, JSONResponse))
content = json.loads(resp.content)
keys = content.keys()
self.assertTrue('token' in keys and 'service' in keys)
def test_register_device(self):
"""
Test a device is created when calling the API with the correct
POST parameters.
"""
resp = self.client.post(reverse('ios-notifications-device-create'),
{'token': self.device_token,
'service': self.service.id})
self.assertEqual(resp.status_code, 201)
self.assertTrue(isinstance(resp, JSONResponse))
content = resp.content
device_json = json.loads(content)
self.assertEqual(device_json.get('model'), 'ios_notifications.device')
def test_disallowed_method(self):
resp = self.client.delete(reverse('ios-notifications-device-create'))
self.assertEqual(resp.status_code, 405)
self.assertTrue(isinstance(resp, HttpResponseNotAllowed))
def test_update_device(self):
kwargs = {'token': self.device.token, 'service__id': self.device.service.id}
url = reverse('ios-notifications-device', kwargs=kwargs)
resp = self.client.put(url, 'users=%d&platform=iPhone' % self.user.id,
content_type='application/x-www-form-urlencode')
self.assertEqual(resp.status_code, 200)
self.assertTrue(isinstance(resp, JSONResponse))
device_json = json.loads(resp.content)
self.assertEqual(device_json.get('pk'), self.device.id)
self.assertTrue(self.user in self.device.users.all())
def test_get_device_details(self):
kwargs = {'token': self.device.token, 'service__id': self.device.service.id}
url = reverse('ios-notifications-device', kwargs=kwargs)
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
content = resp.content
device_json = json.loads(content)
self.assertEqual(device_json.get('model'), 'ios_notifications.device')
class AuthenticationDecoratorTestAuthBasic(UseMockSSLServerMixin, TestCase):
urls = 'ios_notifications.urls'
def setUp(self):
cert, key = generate_cert_and_pkey()
self.service = APNService.objects.create(name='test-service', hostname='127.0.0.1',
certificate=cert, private_key=key)
self.device_token = TOKEN
self.user_password = 'abc123'
self.user = User.objects.create(username='testuser', email='test@example.com')
self.user.set_password(self.user_password)
self.user.is_staff = True
self.user.save()
self.device = Device.objects.create(service=self.service, token='0fd12510cfe6b0a4a89dc7369d96df956f991e66131dab63398734e8000d0029')
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION='AuthBasic')
def test_basic_authorization_request(self):
kwargs = {'token': self.device.token, 'service__id': self.device.service.id}
url = reverse('ios-notifications-device', kwargs=kwargs)
user_pass = '%s:%s' % (self.user.username, self.user_password)
auth_header = 'Basic %s' % user_pass.encode('base64')
resp = self.client.get(url, {}, HTTP_AUTHORIZATION=auth_header)
self.assertEquals(resp.status_code, 200)
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION='AuthBasic')
def test_basic_authorization_request_invalid_credentials(self):
user_pass = '%s:%s' % (self.user.username, 'invalidpassword')
auth_header = 'Basic %s' % user_pass.encode('base64')
url = reverse('ios-notifications-device-create')
resp = self.client.get(url, HTTP_AUTHORIZATION=auth_header)
self.assertEquals(resp.status_code, 401)
self.assertTrue('authentication error' in resp.content)
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION='AuthBasic')
def test_basic_authorization_missing_header(self):
url = reverse('ios-notifications-device-create')
resp = self.client.get(url)
self.assertEquals(resp.status_code, 401)
self.assertTrue('Authorization header not set' in resp.content)
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION='AuthDoesNotExist')
def test_invalid_authentication_type(self):
from ios_notifications.decorators import InvalidAuthenticationType
url = reverse('ios-notifications-device-create')
self.assertRaises(InvalidAuthenticationType, self.client.get, url)
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION=None)
def test_no_authentication_type(self):
from ios_notifications.decorators import InvalidAuthenticationType
url = reverse('ios-notifications-device-create')
self.assertRaises(InvalidAuthenticationType, self.client.get, url)
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION='AuthBasicIsStaff')
def test_basic_authorization_is_staff(self):
kwargs = {'token': self.device.token, 'service__id': self.device.service.id}
url = reverse('ios-notifications-device', kwargs=kwargs)
user_pass = '%s:%s' % (self.user.username, self.user_password)
auth_header = 'Basic %s' % user_pass.encode('base64')
self.user.is_staff = True
resp = self.client.get(url, HTTP_AUTHORIZATION=auth_header)
self.assertEquals(resp.status_code, 200)
@override_settings(IOS_NOTIFICATIONS_AUTHENTICATION='AuthBasicIsStaff')
def test_basic_authorization_is_staff_with_non_staff_user(self):
kwargs = {'token': self.device.token, 'service__id': self.device.service.id}
url = reverse('ios-notifications-device', kwargs=kwargs)
user_pass = '%s:%s' % (self.user.username, self.user_password)
auth_header = 'Basic %s' % user_pass.encode('base64')
self.user.is_staff = False
self.user.save()
resp = self.client.get(url, HTTP_AUTHORIZATION=auth_header)
self.assertEquals(resp.status_code, 401)
self.assertTrue('authentication error' in resp.content)
class NotificationTest(UseMockSSLServerMixin, TestCase):
def setUp(self):
cert, key = generate_cert_and_pkey()
self.service = APNService.objects.create(name='service', hostname='127.0.0.1',
private_key=key, certificate=cert)
self.service.PORT = 2195 # For ease of use simply change port to default port in test_server
self.custom_payload = json.dumps({"." * 10: "." * 50})
self.notification = Notification.objects.create(service=self.service, message='Test message', custom_payload=self.custom_payload)
def test_valid_length(self):
self.notification.message = 'test message'
self.assertTrue(self.notification.is_valid_length())
def test_invalid_length(self):
self.notification.message = '.' * 250
self.assertFalse(self.notification.is_valid_length())
def test_invalid_length_with_custom_payload(self):
self.notification.message = '.' * 100
self.notification.custom_payload = '{"%s":"%s"}' % ("." * 20, "." * 120)
self.assertFalse(self.notification.is_valid_length())
def test_extra_property_with_custom_payload(self):
custom_payload = {"." * 10: "." * 50, "nested": {"+" * 10: "+" * 50}}
self.notification.extra = custom_payload
self.assertEqual(self.notification.custom_payload, json.dumps(custom_payload))
self.assertEqual(self.notification.extra, custom_payload)
self.assertTrue(self.notification.is_valid_length())
def test_loc_data_payload(self):
self.notification.set_loc_data('TEST_1', [1, 'ab', 1.2, 'CD'])
self.notification.message = 'test message'
loc_data = {'loc-key': 'TEST_1', 'loc-args': ['1', 'ab', '1.2', 'CD']}
self.assertEqual(self.notification.loc_data, loc_data)
self.assertTrue(self.notification.is_valid_length())
p = self.notification.payload
self.assertEqual(json.loads(p)['aps']['alert'], loc_data)
def test_extra_property_not_dict(self):
with self.assertRaises(TypeError):
self.notification.extra = 111
def test_extra_property_none(self):
self.notification.extra = None
self.assertEqual(self.notification.extra, None)
self.assertEqual(self.notification.custom_payload, '')
self.assertTrue(self.notification.is_valid_length())
def test_push_to_all_devices_persist_existing(self):
self.assertIsNone(self.notification.last_sent_at)
self.notification.persist = False
self.notification.push_to_all_devices()
self.assertIsNotNone(self.notification.last_sent_at)
def test_push_to_all_devices_persist_new(self):
notification = Notification(service=self.service, message='Test message (new)')
notification.persist = True
notification.push_to_all_devices()
self.assertIsNotNone(notification.last_sent_at)
self.assertIsNotNone(notification.pk)
def test_push_to_all_devices_no_persist(self):
notification = Notification(service=self.service, message='Test message (new)')
notification.persist = False
notification.push_to_all_devices()
self.assertIsNone(notification.last_sent_at)
self.assertIsNone(notification.pk)
class ManagementCommandPushNotificationTest(UseMockSSLServerMixin, TestCase):
def setUp(self):
self.started_at = dt_now()
cert, key = generate_cert_and_pkey()
self.service = APNService.objects.create(name='service', hostname='127.0.0.1',
private_key=key, certificate=cert)
self.service.PORT = 2195
self.device = Device.objects.create(token=TOKEN, service=self.service)
def test_call_push_ios_notification_command_explicit_persist(self):
msg = 'some message'
management.call_command('push_ios_notification', **{'message': msg, 'service': self.service.id, 'verbosity': 0, 'persist': True})
self.assertTrue(Notification.objects.filter(message=msg, last_sent_at__gt=self.started_at).exists())
self.assertTrue(self.device in Device.objects.filter(last_notified_at__gt=self.started_at))
def test_call_push_ios_notification_command_explicit_no_persist(self):
msg = 'some message'
management.call_command('push_ios_notification', **{'message': msg, 'service': self.service.id, 'verbosity': 0, 'persist': False})
self.assertFalse(Notification.objects.filter(message=msg, last_sent_at__gt=self.started_at).exists())
self.assertTrue(self.device in Device.objects.filter(last_notified_at__gt=self.started_at))
@override_settings(IOS_NOTIFICATIONS_PERSIST_NOTIFICATIONS=True)
def test_call_push_ios_notification_command_default_persist(self):
msg = 'some message'
management.call_command('push_ios_notification', **{'message': msg, 'service': self.service.id, 'verbosity': 0})
self.assertTrue(Notification.objects.filter(message=msg, last_sent_at__gt=self.started_at).exists())
self.assertTrue(self.device in Device.objects.filter(last_notified_at__gt=self.started_at))
@override_settings(IOS_NOTIFICATIONS_PERSIST_NOTIFICATIONS=False)
def test_call_push_ios_notification_command_default_no_persist(self):
msg = 'some message'
management.call_command('push_ios_notification', **{'message': msg, 'service': self.service.id, 'verbosity': 0})
self.assertFalse(Notification.objects.filter(message=msg, last_sent_at__gt=self.started_at).exists())
self.assertTrue(self.device in Device.objects.filter(last_notified_at__gt=self.started_at))
@override_settings()
def test_call_push_ios_notification_command_default_persist_not_specified(self):
try:
# making sure that IOS_NOTIFICATIONS_PERSIST_NOTIFICATIONS is not specified in app settings, otherwise this test means nothing
del settings.IOS_NOTIFICATIONS_PERSIST_NOTIFICATIONS
except AttributeError:
pass
msg = 'some message'
management.call_command('push_ios_notification', **{'message': msg, 'service': self.service.id, 'verbosity': 0})
self.assertTrue(Notification.objects.filter(message=msg, last_sent_at__gt=self.started_at).exists())
self.assertTrue(self.device in Device.objects.filter(last_notified_at__gt=self.started_at))
def test_either_message_or_extra_option_required(self):
# In Django < 1.5 django.core.management.base.BaseCommand.execute
# catches CommandError and raises SystemExit instead.
exception = SystemExit if django.VERSION < (1, 5) else management.base.CommandError
with self.assertRaises(exception):
management.call_command('push_ios_notification', service=self.service.pk,
verbosity=0, stderr=StringIO.StringIO())
class ManagementCommandCallFeedbackService(TestCase):
pass
class DefaultSettings(TestCase):
def test_persist_notifications_setting(self):
self.assertEqual(True, get_setting('IOS_NOTIFICATIONS_PERSIST_NOTIFICATIONS'))
def test_authentication_setting(self):
self.assertEqual(None, get_setting('IOS_NOTIFICATIONS_AUTHENTICATION'))
def test_auth_user_model(self):
self.assertEqual('auth.User', get_setting('AUTH_USER_MODEL'))
def test_invalid_setting(self):
setting_name = '_THIS_SETTING_SHOULD_NOT_EXIST__________'
with self.assertRaises(KeyError):
get_setting(setting_name)
| |
from twisted.trial import unittest
import random, time
from twisted.web2 import http_headers
from twisted.web2.http_headers import Cookie, HeaderHandler
from twisted.python import util
class parsedvalue:
"""Marker class"""
def __init__(self, raw):
self.raw = raw
def __eq__(self, other):
return isinstance(other, parsedvalue) and other.raw == self.raw
class HeadersAPITest(unittest.TestCase):
"""Make sure the public API exists and works."""
def testRaw(self):
rawvalue = ("value1", "value2")
h = http_headers.Headers(handler=HeaderHandler(parsers={}, generators={}))
h.setRawHeaders("test", rawvalue)
self.assertEquals(h.hasHeader("test"), True)
self.assertEquals(h.getRawHeaders("test"), rawvalue)
self.assertEquals(list(h.getAllRawHeaders()), [('Test', rawvalue)])
self.assertEquals(h.getRawHeaders("foobar"), None)
h.removeHeader("test")
self.assertEquals(h.getRawHeaders("test"), None)
def testParsed(self):
parsed = parsedvalue(("value1", "value2"))
h = http_headers.Headers(handler=HeaderHandler(parsers={}, generators={}))
h.setHeader("test", parsed)
self.assertEquals(h.hasHeader("test"), True)
self.assertEquals(h.getHeader("test"), parsed)
self.assertEquals(h.getHeader("foobar"), None)
h.removeHeader("test")
self.assertEquals(h.getHeader("test"), None)
def testParsedAndRaw(self):
def parse(raw):
return parsedvalue(raw)
def generate(parsed):
return parsed.raw
rawvalue = ("value1", "value2")
rawvalue2 = ("value3", "value4")
handler = HeaderHandler(parsers={'test':(parse,)},
generators={'test':(generate,)})
h = http_headers.Headers(handler=handler)
h.setRawHeaders("test", rawvalue)
self.assertEquals(h.getHeader("test"), parsedvalue(rawvalue))
h.setHeader("test", parsedvalue(rawvalue2))
self.assertEquals(h.getRawHeaders("test"), rawvalue2)
# Check the initializers
h = http_headers.Headers(rawHeaders={"test": rawvalue},
handler=handler)
self.assertEquals(h.getHeader("test"), parsedvalue(rawvalue))
h = http_headers.Headers({"test": parsedvalue(rawvalue2)},
handler=handler)
self.assertEquals(h.getRawHeaders("test"), rawvalue2)
def testImmutable(self):
h = http_headers.Headers(handler=HeaderHandler(parsers={}, generators={}))
h.makeImmutable()
self.assertRaises(AttributeError, h.setRawHeaders, "test", [1])
self.assertRaises(AttributeError, h.setHeader, "test", 1)
self.assertRaises(AttributeError, h.removeHeader, "test")
class TokenizerTest(unittest.TestCase):
"""Test header list parsing functions."""
def testParse(self):
parser = lambda val: list(http_headers.tokenize([val,]))
Token = http_headers.Token
tests = (('foo,bar', ['foo', Token(','), 'bar']),
('FOO,BAR', ['foo', Token(','), 'bar']),
(' \t foo \t bar \t , \t baz ', ['foo', Token(' '), 'bar', Token(','), 'baz']),
('()<>@,;:\\/[]?={}', [Token('('), Token(')'), Token('<'), Token('>'), Token('@'), Token(','), Token(';'), Token(':'), Token('\\'), Token('/'), Token('['), Token(']'), Token('?'), Token('='), Token('{'), Token('}')]),
(' "foo" ', ['foo']),
('"FOO(),\\"BAR,"', ['FOO(),"BAR,']))
raiseTests = ('"open quote', '"ending \\', "control character: \x127", "\x00", "\x1f")
for test,result in tests:
self.assertEquals(parser(test), result)
for test in raiseTests:
self.assertRaises(ValueError, parser, test)
def testGenerate(self):
pass
def testRoundtrip(self):
pass
def atSpecifiedTime(when, func):
def inner(*a, **kw):
orig = time.time
time.time = lambda: when
try:
return func(*a, **kw)
finally:
time.time = orig
return util.mergeFunctionMetadata(func, inner)
def parseHeader(name, val):
head = http_headers.Headers(handler=http_headers.DefaultHTTPHandler)
head.setRawHeaders(name,val)
return head.getHeader(name)
parseHeader = atSpecifiedTime(999999990, parseHeader) # Sun, 09 Sep 2001 01:46:30 GMT
def generateHeader(name, val):
head = http_headers.Headers(handler=http_headers.DefaultHTTPHandler)
head.setHeader(name, val)
return head.getRawHeaders(name)
generateHeader = atSpecifiedTime(999999990, generateHeader) # Sun, 09 Sep 2001 01:46:30 GMT
class HeaderParsingTestBase(unittest.TestCase):
def runRoundtripTest(self, headername, table):
"""
Perform some assertions about the behavior of parsing and
generating HTTP headers. Specifically: parse an HTTP header
value, assert that the parsed form contains all the available
information with the correct structure; generate the HTTP
header value from the parsed form, assert that it contains
certain literal strings; finally, re-parse the generated HTTP
header value and assert that the resulting structured data is
the same as the first-pass parsed form.
@type headername: C{str}
@param headername: The name of the HTTP header L{table} contains values for.
@type table: A sequence of tuples describing inputs to and
outputs from header parsing and generation. The tuples may be
either 2 or 3 elements long. In either case: the first
element is a string representing an HTTP-format header value;
the second element is a dictionary mapping names of parameters
to values of those parameters (the parsed form of the header).
If there is a third element, it is a list of strings which
must occur exactly in the HTTP header value
string which is re-generated from the parsed form.
"""
for row in table:
if len(row) == 2:
rawHeaderInput, parsedHeaderData = row
requiredGeneratedElements = []
elif len(row) == 3:
rawHeaderInput, parsedHeaderData, requiredGeneratedElements = row
assert isinstance(requiredGeneratedElements, list)
# parser
parsed = parseHeader(headername, [rawHeaderInput,])
self.assertEquals(parsed, parsedHeaderData)
regeneratedHeaderValue = generateHeader(headername, parsed)
if requiredGeneratedElements:
# generator
for regeneratedElement in regeneratedHeaderValue:
reqEle = requiredGeneratedElements[regeneratedHeaderValue.index(regeneratedElement)]
elementIndex = regeneratedElement.find(reqEle)
self.assertNotEqual(
elementIndex, -1,
"%r did not appear in generated HTTP header %r: %r" % (reqEle,
headername,
regeneratedElement))
# parser/generator
reparsed = parseHeader(headername, regeneratedHeaderValue)
self.assertEquals(parsed, reparsed)
def invalidParseTest(self, headername, values):
for val in values:
parsed = parseHeader(headername, val)
self.assertEquals(parsed, None)
class GeneralHeaderParsingTests(HeaderParsingTestBase):
def testCacheControl(self):
table = (
("no-cache",
{'no-cache':None}),
("no-cache, no-store, max-age=5, max-stale=3, min-fresh=5, no-transform, only-if-cached, blahblah-extension-thingy",
{'no-cache': None,
'no-store': None,
'max-age':5,
'max-stale':3,
'min-fresh':5,
'no-transform':None,
'only-if-cached':None,
'blahblah-extension-thingy':None}),
("max-stale",
{'max-stale':None}),
("public, private, no-cache, no-store, no-transform, must-revalidate, proxy-revalidate, max-age=5, s-maxage=10, blahblah-extension-thingy",
{'public':None,
'private':None,
'no-cache':None,
'no-store':None,
'no-transform':None,
'must-revalidate':None,
'proxy-revalidate':None,
'max-age':5,
's-maxage':10,
'blahblah-extension-thingy':None}),
('private="Set-Cookie, Set-Cookie2", no-cache="PROXY-AUTHENTICATE"',
{'private': ['set-cookie', 'set-cookie2'],
'no-cache': ['proxy-authenticate']},
['private="Set-Cookie, Set-Cookie2"', 'no-cache="Proxy-Authenticate"']),
)
self.runRoundtripTest("Cache-Control", table)
def testConnection(self):
table = (
("close", ['close',]),
("close, foo-bar", ['close', 'foo-bar'])
)
self.runRoundtripTest("Connection", table)
def testDate(self):
# Don't need major tests since the datetime parser has its own tests
self.runRoundtripTest("Date", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
# def testPragma(self):
# fail
# def testTrailer(self):
# fail
def testTransferEncoding(self):
table = (
('chunked', ['chunked']),
('gzip, chunked', ['gzip', 'chunked'])
)
self.runRoundtripTest("Transfer-Encoding", table)
# def testUpgrade(self):
# fail
# def testVia(self):
# fail
# def testWarning(self):
# fail
class RequestHeaderParsingTests(HeaderParsingTestBase):
#FIXME test ordering too.
def testAccept(self):
table = (
("audio/*;q=0.2, audio/basic",
{http_headers.MimeType('audio', '*'): 0.2,
http_headers.MimeType('audio', 'basic'): 1.0}),
("text/plain;q=0.5, text/html, text/x-dvi;q=0.8, text/x-c",
{http_headers.MimeType('text', 'plain'): 0.5,
http_headers.MimeType('text', 'html'): 1.0,
http_headers.MimeType('text', 'x-dvi'): 0.8,
http_headers.MimeType('text', 'x-c'): 1.0}),
("text/*, text/html, text/html;level=1, */*",
{http_headers.MimeType('text', '*'): 1.0,
http_headers.MimeType('text', 'html'): 1.0,
http_headers.MimeType('text', 'html', (('level', '1'),)): 1.0,
http_headers.MimeType('*', '*'): 1.0}),
("text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5",
{http_headers.MimeType('text', '*'): 0.3,
http_headers.MimeType('text', 'html'): 0.7,
http_headers.MimeType('text', 'html', (('level', '1'),)): 1.0,
http_headers.MimeType('text', 'html', (('level', '2'),)): 0.4,
http_headers.MimeType('*', '*'): 0.5}),
)
self.runRoundtripTest("Accept", table)
def testAcceptCharset(self):
table = (
("iso-8859-5, unicode-1-1;q=0.8",
{'iso-8859-5': 1.0, 'iso-8859-1': 1.0, 'unicode-1-1': 0.8},
["iso-8859-5", "unicode-1-1;q=0.8", "iso-8859-1"]),
("iso-8859-1;q=0.7",
{'iso-8859-1': 0.7}),
("*;q=.7",
{'*': 0.7},
["*;q=0.7"]),
("",
{'iso-8859-1': 1.0},
["iso-8859-1"]), # Yes this is an actual change -- we'll say that's okay. :)
)
self.runRoundtripTest("Accept-Charset", table)
def testAcceptEncoding(self):
table = (
("compress, gzip",
{'compress': 1.0, 'gzip': 1.0, 'identity': 0.0001}),
("",
{'identity': 0.0001}),
("*",
{'*': 1}),
("compress;q=0.5, gzip;q=1.0",
{'compress': 0.5, 'gzip': 1.0, 'identity': 0.0001},
["compress;q=0.5", "gzip"]),
("gzip;q=1.0, identity;q=0.5, *;q=0",
{'gzip': 1.0, 'identity': 0.5, '*':0},
["gzip", "identity;q=0.5", "*;q=0"]),
)
self.runRoundtripTest("Accept-Encoding", table)
def testAcceptLanguage(self):
table = (
("da, en-gb;q=0.8, en;q=0.7",
{'da': 1.0, 'en-gb': 0.8, 'en': 0.7}),
("*",
{'*': 1}),
)
self.runRoundtripTest("Accept-Language", table)
def testAuthorization(self):
table = (
("Basic dXNlcm5hbWU6cGFzc3dvcmQ=",
("basic", "dXNlcm5hbWU6cGFzc3dvcmQ="),
["basic dXNlcm5hbWU6cGFzc3dvcmQ="]),
('Digest nonce="bar", realm="foo", username="baz", response="bax"',
('digest', 'nonce="bar", realm="foo", username="baz", response="bax"'),
['digest', 'nonce="bar"', 'realm="foo"', 'username="baz"', 'response="bax"'])
)
self.runRoundtripTest("Authorization", table)
def testCookie(self):
table = (
('name=value', [Cookie('name', 'value')]),
('"name"="value"', [Cookie('"name"', '"value"')]),
('name,"blah=value,"', [Cookie('name,"blah', 'value,"')]),
('name,"blah = value," ', [Cookie('name,"blah', 'value,"')], ['name,"blah=value,"']),
("`~!@#$%^&*()-_+[{]}\\|:'\",<.>/?=`~!@#$%^&*()-_+[{]}\\|:'\",<.>/?", [Cookie("`~!@#$%^&*()-_+[{]}\\|:'\",<.>/?", "`~!@#$%^&*()-_+[{]}\\|:'\",<.>/?")]),
('name,"blah = value," ; name2=val2',
[Cookie('name,"blah', 'value,"'), Cookie('name2', 'val2')],
['name,"blah=value,"', 'name2=val2']),
)
self.runRoundtripTest("Cookie", table)
#newstyle RFC2965 Cookie
table2 = (
('$Version="1";'
'name="value";$Path="/foo";$Domain="www.local";$Port="80,8000";'
'name2="value"',
[Cookie('name', 'value', path='/foo', domain='www.local', ports=(80,8000), version=1), Cookie('name2', 'value', version=1)]),
('$Version="1";'
'name="value";$Port',
[Cookie('name', 'value', ports=(), version=1)]),
('$Version = 1, NAME = "qq\\"qq",Frob=boo',
[Cookie('name', 'qq"qq', version=1), Cookie('frob', 'boo', version=1)],
['$Version="1";name="qq\\"qq";frob="boo"']),
)
self.runRoundtripTest("Cookie", table2)
# Generate only!
# make headers by combining oldstyle and newstyle cookies
table3 = (
([Cookie('name', 'value'), Cookie('name2', 'value2', version=1)],
'$Version="1";name=value;name2="value2"'),
([Cookie('name', 'value', path="/foo"), Cookie('name2', 'value2', domain="bar.baz", version=1)],
'$Version="1";name=value;$Path="/foo";name2="value2";$Domain="bar.baz"'),
([Cookie('invalid,"name', 'value'), Cookie('name2', 'value2', version=1)],
'$Version="1";name2="value2"'),
([Cookie('name', 'qq"qq'), Cookie('name2', 'value2', version=1)],
'$Version="1";name="qq\\"qq";name2="value2"'),
)
for row in table3:
self.assertEquals(generateHeader("Cookie", row[0]), [row[1],])
def testSetCookie(self):
table = (
('name,"blah=value,; expires=Sun, 09 Sep 2001 01:46:40 GMT; path=/foo; domain=bar.baz; secure',
[Cookie('name,"blah', 'value,', expires=1000000000, path="/foo", domain="bar.baz", secure=True)]),
('name,"blah = value, ; expires="Sun, 09 Sep 2001 01:46:40 GMT"',
[Cookie('name,"blah', 'value,', expires=1000000000)],
['name,"blah=value,', 'expires=Sun, 09 Sep 2001 01:46:40 GMT']),
)
self.runRoundtripTest("Set-Cookie", table)
def testSetCookie2(self):
table = (
('name="value"; Comment="YadaYada"; CommentURL="http://frobnotz/"; Discard; Domain="blah.blah"; Max-Age=10; Path="/foo"; Port="80,8080"; Secure; Version="1"',
[Cookie("name", "value", comment="YadaYada", commenturl="http://frobnotz/", discard=True, domain="blah.blah", expires=1000000000, path="/foo", ports=(80,8080), secure=True, version=1)]),
)
self.runRoundtripTest("Set-Cookie2", table)
def testExpect(self):
table = (
("100-continue",
{"100-continue":(None,)}),
('foobar=twiddle',
{'foobar':('twiddle',)}),
("foo=bar;a=b;c",
{'foo':('bar',('a', 'b'), ('c', None))})
)
self.runRoundtripTest("Expect", table)
def testFrom(self):
self.runRoundtripTest("From", (("webmaster@w3.org", "webmaster@w3.org"),))
def testHost(self):
self.runRoundtripTest("Host", (("www.w3.org", "www.w3.org"),))
def testIfMatch(self):
table = (
('"xyzzy"', [http_headers.ETag('xyzzy')]),
('"xyzzy", "r2d2xxxx", "c3piozzzz"', [http_headers.ETag('xyzzy'),
http_headers.ETag('r2d2xxxx'),
http_headers.ETag('c3piozzzz')]),
('*', ['*']),
)
def testIfModifiedSince(self):
# Don't need major tests since the datetime parser has its own test
# Just test stupid ; length= brokenness.
table = (
("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),
("Sun, 09 Sep 2001 01:46:40 GMT; length=500", 1000000000, ["Sun, 09 Sep 2001 01:46:40 GMT"]),
)
self.runRoundtripTest("If-Modified-Since", table)
def testIfNoneMatch(self):
table = (
('"xyzzy"', [http_headers.ETag('xyzzy')]),
('W/"xyzzy", "r2d2xxxx", "c3piozzzz"', [http_headers.ETag('xyzzy', weak=True),
http_headers.ETag('r2d2xxxx'),
http_headers.ETag('c3piozzzz')]),
('W/"xyzzy", W/"r2d2xxxx", W/"c3piozzzz"', [http_headers.ETag('xyzzy', weak=True),
http_headers.ETag('r2d2xxxx', weak=True),
http_headers.ETag('c3piozzzz', weak=True)]),
('*', ['*']),
)
self.runRoundtripTest("If-None-Match", table)
def testIfRange(self):
table = (
('"xyzzy"', http_headers.ETag('xyzzy')),
('W/"xyzzy"', http_headers.ETag('xyzzy', weak=True)),
('W/"xyzzy"', http_headers.ETag('xyzzy', weak=True)),
("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),
)
self.runRoundtripTest("If-Range", table)
def testIfUnmodifiedSince(self):
self.runRoundtripTest("If-Unmodified-Since", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
def testMaxForwards(self):
self.runRoundtripTest("Max-Forwards", (("15", 15),))
# def testProxyAuthorize(self):
# fail
def testRange(self):
table = (
("bytes=0-499", ('bytes', [(0,499),])),
("bytes=500-999", ('bytes', [(500,999),])),
("bytes=-500",('bytes', [(None,500),])),
("bytes=9500-",('bytes', [(9500, None),])),
("bytes=0-0,-1", ('bytes', [(0,0),(None,1)])),
)
self.runRoundtripTest("Range", table)
def testReferer(self):
self.runRoundtripTest("Referer", (("http://www.w3.org/hypertext/DataSources/Overview.html",
"http://www.w3.org/hypertext/DataSources/Overview.html"),))
def testTE(self):
table = (
("deflate", {'deflate':1}),
("", {}),
("trailers, deflate;q=0.5", {'trailers':1, 'deflate':0.5}),
)
self.runRoundtripTest("TE", table)
def testUserAgent(self):
self.runRoundtripTest("User-Agent", (("CERN-LineMode/2.15 libwww/2.17b3", "CERN-LineMode/2.15 libwww/2.17b3"),))
class ResponseHeaderParsingTests(HeaderParsingTestBase):
def testAcceptRanges(self):
self.runRoundtripTest("Accept-Ranges", (("bytes", ["bytes"]), ("none", ["none"])))
def testAge(self):
self.runRoundtripTest("Age", (("15", 15),))
def testETag(self):
table = (
('"xyzzy"', http_headers.ETag('xyzzy')),
('W/"xyzzy"', http_headers.ETag('xyzzy', weak=True)),
('""', http_headers.ETag('')),
)
self.runRoundtripTest("ETag", table)
def testLocation(self):
self.runRoundtripTest("Location", (("http://www.w3.org/pub/WWW/People.htm",
"http://www.w3.org/pub/WWW/People.htm"),))
# def testProxyAuthenticate(self):
# fail
def testRetryAfter(self):
# time() is always 999999990 when being tested.
table = (
("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000, ["10"]),
("120", 999999990+120),
)
self.runRoundtripTest("Retry-After", table)
def testServer(self):
self.runRoundtripTest("Server", (("CERN/3.0 libwww/2.17", "CERN/3.0 libwww/2.17"),))
def testVary(self):
table = (
("*", ["*"]),
("Accept, Accept-Encoding", ["accept", "accept-encoding"], ["accept", "accept-encoding"])
)
self.runRoundtripTest("Vary", table)
def testWWWAuthenticate(self):
digest = ('Digest realm="digest realm", nonce="bAr", qop="auth"',
[('Digest', {'realm': 'digest realm', 'nonce': 'bAr',
'qop': 'auth'})],
['Digest', 'realm="digest realm"',
'nonce="bAr"', 'qop="auth"'])
basic = ('Basic realm="foo"',
[('Basic', {'realm': 'foo'})], ['Basic', 'realm="foo"'])
ntlm = ('NTLM',
[('NTLM', {})], ['NTLM', ''])
negotiate = ('Negotiate SomeGssAPIData',
[('Negotiate', 'SomeGssAPIData')],
['Negotiate', 'SomeGssAPIData'])
table = (digest,
basic,
(digest[0]+', '+basic[0],
digest[1] + basic[1],
[digest[2], basic[2]]),
ntlm,
negotiate,
(ntlm[0]+', '+basic[0],
ntlm[1] + basic[1],
[ntlm[2], basic[2]]),
(digest[0]+', '+negotiate[0],
digest[1] + negotiate[1],
[digest[2], negotiate[2]]),
(negotiate[0]+', '+negotiate[0],
negotiate[1] + negotiate[1],
[negotiate[2] + negotiate[2]]),
(ntlm[0]+', '+ntlm[0],
ntlm[1] + ntlm[1],
[ntlm[2], ntlm[2]]),
(basic[0]+', '+ntlm[0],
basic[1] + ntlm[1],
[basic[2], ntlm[2]]),
)
# runRoundtripTest doesn't work because we don't generate a single
# header
headername = 'WWW-Authenticate'
for row in table:
rawHeaderInput, parsedHeaderData, requiredGeneratedElements = row
parsed = parseHeader(headername, [rawHeaderInput,])
self.assertEquals(parsed, parsedHeaderData)
regeneratedHeaderValue = generateHeader(headername, parsed)
for regeneratedElement in regeneratedHeaderValue:
requiredElements = requiredGeneratedElements[
regeneratedHeaderValue.index(
regeneratedElement)]
for reqEle in requiredElements:
elementIndex = regeneratedElement.find(reqEle)
self.assertNotEqual(
elementIndex, -1,
"%r did not appear in generated HTTP header %r: %r" % (reqEle,
headername,
regeneratedElement))
# parser/generator
reparsed = parseHeader(headername, regeneratedHeaderValue)
self.assertEquals(parsed, reparsed)
class EntityHeaderParsingTests(HeaderParsingTestBase):
def testAllow(self):
# Allow is a silly case-sensitive header unlike all the rest
table = (
("GET", ['GET', ]),
("GET, HEAD, PUT", ['GET', 'HEAD', 'PUT']),
)
self.runRoundtripTest("Allow", table)
def testContentEncoding(self):
table = (
("gzip", ['gzip',]),
)
self.runRoundtripTest("Content-Encoding", table)
def testContentLanguage(self):
table = (
("da", ['da',]),
("mi, en", ['mi', 'en']),
)
self.runRoundtripTest("Content-Language", table)
def testContentLength(self):
self.runRoundtripTest("Content-Length", (("15", 15),))
self.invalidParseTest("Content-Length", ("asdf",))
def testContentLocation(self):
self.runRoundtripTest("Content-Location",
(("http://www.w3.org/pub/WWW/People.htm",
"http://www.w3.org/pub/WWW/People.htm"),))
def testContentMD5(self):
self.runRoundtripTest("Content-MD5", (("Q2hlY2sgSW50ZWdyaXR5IQ==", "Check Integrity!"),))
self.invalidParseTest("Content-MD5", ("sdlaksjdfhlkaj",))
def testContentRange(self):
table = (
("bytes 0-499/1234", ("bytes", 0, 499, 1234)),
("bytes 500-999/1234", ("bytes", 500, 999, 1234)),
("bytes 500-1233/1234", ("bytes", 500, 1233, 1234)),
("bytes 734-1233/1234", ("bytes", 734, 1233, 1234)),
("bytes 734-1233/*", ("bytes", 734, 1233, None)),
("bytes */1234", ("bytes", None, None, 1234)),
("bytes */*", ("bytes", None, None, None))
)
self.runRoundtripTest("Content-Range", table)
def testContentType(self):
table = (
("text/html;charset=iso-8859-4", http_headers.MimeType('text', 'html', (('charset','iso-8859-4'),))),
("text/html", http_headers.MimeType('text', 'html')),
)
self.runRoundtripTest("Content-Type", table)
def testExpires(self):
self.runRoundtripTest("Expires", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
# Invalid expires MUST return date in the past.
self.assertEquals(parseHeader("Expires", ["0"]), 0)
self.assertEquals(parseHeader("Expires", ["wejthnaljn"]), 0)
def testLastModified(self):
# Don't need major tests since the datetime parser has its own test
self.runRoundtripTest("Last-Modified", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
class DateTimeTest(unittest.TestCase):
"""Test date parsing functions."""
def testParse(self):
timeNum = 784111777
timeStrs = ('Sun, 06 Nov 1994 08:49:37 GMT',
'Sunday, 06-Nov-94 08:49:37 GMT',
'Sun Nov 6 08:49:37 1994',
# Also some non-RFC formats, for good measure.
'Somefakeday 6 Nov 1994 8:49:37',
'6 Nov 1994 8:49:37',
'Sun, 6 Nov 1994 8:49:37',
'6 Nov 1994 8:49:37 GMT',
'06-Nov-94 08:49:37',
'Sunday, 06-Nov-94 08:49:37',
'06-Nov-94 08:49:37 GMT',
'Nov 6 08:49:37 1994',
)
for timeStr in timeStrs:
self.assertEquals(http_headers.parseDateTime(timeStr), timeNum)
# Test 2 Digit date wraparound yuckiness.
self.assertEquals(http_headers.parseDateTime(
'Monday, 11-Oct-04 14:56:50 GMT'), 1097506610)
self.assertEquals(http_headers.parseDateTime(
'Monday, 11-Oct-2004 14:56:50 GMT'), 1097506610)
def testGenerate(self):
self.assertEquals(http_headers.generateDateTime(784111777), 'Sun, 06 Nov 1994 08:49:37 GMT')
def testRoundtrip(self):
for i in range(2000):
time = random.randint(0, 2000000000)
timestr = http_headers.generateDateTime(time)
time2 = http_headers.parseDateTime(timestr)
self.assertEquals(time, time2)
class TestMimeType(unittest.TestCase):
def testEquality(self):
"""Test that various uses of the constructer are equal
"""
kwargMime = http_headers.MimeType('text', 'plain',
key='value',
param=None)
dictMime = http_headers.MimeType('text', 'plain',
{'param': None,
'key': 'value'})
tupleMime = http_headers.MimeType('text', 'plain',
(('param', None),
('key', 'value')))
stringMime = http_headers.MimeType.fromString('text/plain;key=value;param')
self.assertEquals(kwargMime, dictMime)
self.assertEquals(dictMime, tupleMime)
self.assertEquals(kwargMime, tupleMime)
self.assertEquals(kwargMime, stringMime)
| |
## @package dataio
# Module caffe2.python.dataio
"""
Defines the base interface for reading and writing operations.
Readers/Writers are objects that produce operations that read/write sequences
of data. Each operation reads or writes a list of BlobReferences.
Readers and Writers must be implemented such that read and write operations
are atomic and thread safe.
Examples of possible Readers and Writers:
QueueReader, QueueWriter,
DatasetReader, DatasetWriter,
See `dataset.py` for an example of implementation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
from caffe2.python.schema import Field, Struct, from_blob_list
import numpy as np
class Reader(object):
def __init__(self, schema=None):
if schema is not None:
assert isinstance(schema, Field)
self._schema = schema
def schema(self):
"""
Return the schema associated with the Reader
"""
assert self._schema is not None, 'Schema not provided for this reader.'
return self._schema
def _set_schema(self, schema):
self._schema = schema
def setup_ex(self, init_net, finish_net):
"""Nets to be executed once at startup and finish.
Experimental extension. Don't use yet"""
pass
def read_ex(self, local_init_net, local_finish_net):
"""Experimental extension to the interface. Don't use yet"""
read_net = core.Net('reader_body')
return ([read_net], ) + self.read(read_net)
def read_record_ex(self, local_init_net, local_finish_net):
"""Experimental extension to the interface. Don't use yet"""
nets, should_stop, fields = self.read_ex(
local_init_net, local_finish_net)
if self._schema:
fields = from_blob_list(self._schema, fields)
return nets, should_stop, fields
"""
Reader is an abstract class to be implemented in order to provide
operations capable of iterating through a dataset or stream of data.
A Reader must implement at least one operation, `read`, which
adds operations to a net that read the next batch of data. Readers can
optionally support the `reset` operation, which is useful when multiple
passes over the data are required.
"""
def read(self, read_net):
"""
Add operations to read_net that will read the read batch of data
and return a list of BlobReference representing the blobs that will
contain the batches produced.
Operations added to `read_net` must be thread safe and atomic, that is,
it should be possible to clone `read_net` and run multiple instances of
it in parallel.
Args:
read_net: the net that will be appended with read operations
Returns:
A tuple (should_stop, fields), with:
should_stop: BlobReference pointing to a boolean scalar
blob that indicates whether the read operation
was succesfull or whether the end of data has
been reached.
fields: A tuple of BlobReference containing the latest batch
of data that was read.
"""
raise NotImplementedError('Readers must implement `read`.')
def reset(self, net):
"""Append operations to `net` that will reset the reader.
This can be used to read the data multiple times.
Not all readers support this operation.
"""
raise NotImplementedError('This reader cannot be resetted.')
def read_record(self, read_net):
should_stop, fields = self.read(read_net)
if self._schema:
fields = from_blob_list(self._schema, fields)
return should_stop, fields
def execution_step(self, reader_net_name=None, external_should_stop=None):
"""Create an execution step with a net containing read operators.
The execution step will contain a `stop_blob` that knows how to stop
the execution loop when end of data was reached.
E.g.:
read_step, fields = reader.execution_step()
consume_net = core.Net('consume')
consume_net.Print(fields[0], [])
p = core.Plan('reader')
p.AddStep(read_step.AddNet(consume_net))
core.RunPlan(p)
Args:
reader_net_name: (optional) the name of the reader_net to be
created. The execution step will
be named accordingly.
Returns:
A tuple (read_step, fields), with:
read_step: A newly created execution step containing a net with
read operations. The step will have `stop_blob` set,
in order to stop the loop on end of data.
fields: A tuple of BlobReference containing the latest batch
of data that was read.
"""
reader_net = core.Net(reader_net_name or 'reader')
should_stop, fields = self.read_record(reader_net)
if external_should_stop is not None:
should_stop = reader_net.Or([external_should_stop, should_stop])
read_step = core.execution_step(
'{}_step'.format(reader_net_name),
reader_net,
should_stop_blob=should_stop)
return (read_step, fields)
class Writer(object):
"""
Writer is an abstract class to be implemented in order to provide
operations capable of feeding a data stream or a dataset.
A Writer must implement 2 operations:
`write`, which adds operations to a net that write the write batch of
data, and `commit`, which adds operations to a net in order to indicate
that no more data will be written.
"""
_schema = None
def schema(self):
return self._schema
def write(self, writer_net, fields):
"""Add operations to `writer_net` that write the next batch of data.
Operations added to the net must be thread-safe and unique, that is:
multiple writers must be able to write to the dataset in parallel.
Args:
fields: a tuple of BlobReference containing the batch of data to
write.
"""
raise NotImplementedError('Writers must implement write.')
def write_record(self, writer_net, fields):
if isinstance(fields, Field):
self._schema = fields
fields = fields.field_blobs()
self.write(writer_net, fields)
def setup_ex(self, init_net, finish_net):
"""Experimental, don't use yet"""
self.commit(finish_net)
def write_ex(self, fields, local_init_net, local_finish_net, stop_blob):
"""Experimental extension to the interface. Don't use yet"""
write_net = core.Net('write_net')
self.write(write_net, fields)
return [write_net]
def write_record_ex(
self, fields, local_init_net, local_finish_net, stop_blob=None):
"""Experimental extension to the interface. Don't use yet."""
if isinstance(fields, Field):
self._schema = fields
fields = fields.field_blobs()
if stop_blob is None:
stop_blob = local_init_net.NextName("dequeue_status")
write_nets = self.write_ex(
fields, local_init_net, local_finish_net, stop_blob)
return (write_nets, stop_blob)
def commit(self, finish_net):
"""Add operations to `finish_net` that signal end of data.
This must be implemented by all Writers, but may be no-op for some
of them.
"""
pass
class ReaderBuilder(object):
""" Allow usage of a reader in distributed fashion. """
def schema(self):
raise NotImplementedError()
def enqueue_splits(self, net, split_queue):
raise NotImplementedError()
def splits(self, net):
raise NotImplementedError()
def new_reader(self, split_reader=None):
raise NotImplementedError()
class PipedReaderBuilder(ReaderBuilder):
"""
ReaderBuilder that modifies underlying builder by calling `piper`
function on each new reader produced, and return the result of
the function. This way, it is possible to append data processing
pipelines that will be replicated for each reader that gets created.
E.g.:
PipedReaderBuilder(
ReaderBuilder(...),
lambda reader: pipe(reader, processor=my_proc))
"""
def __init__(self, builder, piper):
self._builder = builder
self._piper = piper
def schema(self):
return self._builder.schema()
def enqueue_splits(self, net, split_queue):
return self._builder.enqueue_splits(net, split_queue)
def splits(self, net):
return self._builder.splits(net)
def new_reader(self, split_reader=None):
output = self._piper(self._builder.new_reader(split_reader))
return output if isinstance(output, Reader) else output.reader()
class Pipe(object):
def __init__(self, schema=None, obj_key=None):
self._num_writers = 0
self._num_readers = 0
self._schema = schema
self._obj_key = obj_key
def schema(self):
return self._schema
def setup(self, global_init_net):
pass
def reader(self):
raise NotImplementedError()
def writer(self):
raise NotImplementedError()
def num_readers(self):
return self._num_readers
def num_writers(self):
return self._num_writers
def _new_writer(self, writer_schema, writer_init_net):
if writer_schema is not None and self._schema is None:
self._schema = writer_schema
self._num_writers += 1
if self._obj_key is not None:
writer_init_net.add_attribute(self._obj_key, self)
def _new_reader(self, reader_init_net):
self._num_readers += 1
if self._obj_key is not None:
reader_init_net.add_attribute(self._obj_key, self)
class CounterReader(Reader):
""" Reader that produces increasing integers. """
def __init__(self):
Reader.__init__(self, schema=Struct(('iter', np.int64)))
self.counter = None
self.should_stop = None
def setup_ex(self, global_init_net, global_finish_net):
if self.counter is None:
self.counter = global_init_net.CreateCounter([], init_count=0)
self.should_stop = global_init_net.ConstantFill(
[], shape=[], dtype=core.DataType.BOOL, value=False)
def read_ex(self, local_init_net, local_finish_net):
count_net = core.Net('limited_reader_counter')
value = count_net.CountUp([self.counter], 1)
return [count_net], self.should_stop, [value]
class ReaderWithLimit(Reader):
"""
Reader that stops after `num_iter` calls.
If num_iter is None it becomes just a simple reader that exports a global
flag for "out of data".
"""
def __init__(self, reader, num_iter=1):
Reader.__init__(self, schema=reader._schema)
self.reader = reader
self.counter = None
self.num_iter = num_iter
net = core.Net('reader_with_limit')
self._data_finished = net.AddExternalInput(
net.NextName('data_finished'))
if self.num_iter is not None:
self.counter = net.AddExternalInput(net.NextName('counter'))
def setup_ex(self, global_init_net, global_finish_net):
if self.counter:
global_init_net.CreateCounter(
[], [self.counter], init_count=int(self.num_iter))
self.reader.setup_ex(global_init_net, global_finish_net)
global_init_net.ConstantFill(
[], [self._data_finished],
shape=[], value=False, dtype=core.DataType.BOOL)
def read_ex(self, local_init_net, local_finish_net):
""" 1. check if we reached number of iterations and populate the same
should_stop blob """
count_net = core.Net('limited_reader_counter')
if self.counter:
should_stop = count_net.CountDown([self.counter], 1)
else:
should_stop = count_net.ConstantFill(
[], 1,
shape=[], value=False, dtype=core.DataType.BOOL)
""" 2. call original reader """
nets, local_data_finished, fields = self.reader.read_ex(
local_init_net, local_finish_net)
self._set_schema(self.reader._schema)
""" 3. check if original reader is done. """
check_done_net = core.Net('limited_reader_post')
# copy to the same blob as the counter output to trigger reader
# stopping
check_done_net.Copy(local_data_finished, should_stop)
# update global flag that underlying reader is done
check_done_net.Or([self._data_finished, local_data_finished],
[self._data_finished])
# this relies on `should_stop` being called after each net.
return [count_net] + nets + [check_done_net], should_stop, fields
def data_finished(self):
"""
Return a blob that can be checked after the end of the reading task,
which will contain a scalar float indicating whether the underlying
reader has been exhausted (True) or whether we stopped because reached
the limit of iterations (False).
"""
return self._data_finished
def CountUntil(num_iter):
return ReaderWithLimit(CounterReader(), num_iter)
| |
"""client.py contains the functionality to link the python steno3d
client with the steno3d website
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from functools import wraps
from os import mkdir
from os import path
from time import sleep
import requests
from six import string_types
from six.moves import input
from six.moves.urllib.parse import urlparse
from .user import User
__version__ = '0.3.12'
PRODUCTION_BASE_URL = 'https://steno3d.com/'
SLEEP_TIME = .75
DEVKEY_PROMPT = "If you have a Steno3D developer key, please enter it here > "
WELCOME_MESSAGE = """
>> Welcome to the Python client library for Steno3D!
"""
FIRST_LOGIN = """
If you do not have a Steno3D developer key, you need to request
one from the Steno3D website in order to access the API. Please
log in to the application (if necessary) and request a new key.
{base_url}settings/developer
If you are not yet signed up, you can do that here:
{base_url}signup
When you are ready, please enter the key above, or reproduce this
prompt by calling steno3d.login().
"""
LOGIN_FAILED = """
>> Oh no! We could not log you in. The API developer key that you provided
could not be validated. If your current API key has been lost or
invalidated, please request a new one at:
{base_url}settings/developer
Then, try `steno3d.login('YOUR-NEW-DEVEL-KEY')`
If the problem persists:
1) Restart your Python kernel and try again
2) Update steno3d with `pip install --upgrade steno3d`
3) Ask for <support@steno3d.com>
4) Open an issue https://github.com/seequent/steno3dpy/issues
"""
NOT_CONNECTED = """
>> Oh no! We could not connect to the server. Please ensure that you are:
1) Connected to the Internet
2) Can connect to {base_url}
3) If you are getting an InsecurePlatformWarning while using pip try:
a) Upgrading to Python 2.7.9 or above
b) Or `pip install --upgrade requests[security]`
4) Ask for <support@steno3d.com>
5) Open an issue https://github.com/seequent/steno3dpy/issues
"""
BAD_API_KEY = """
>> Oh no! Your API developer key format is incorrect.
It should be your username followed by '//' then 36 characters.
You may also use only your username if you have access to local saved
credentials. If you have not requested an API key or if you have lost
your API key, please request a new one at:
{base_url}settings/developer
"""
INVALID_VERSION = """
Oh no! Your version of steno3d is out of date.
{your_version}
{current_version}
Please update steno3d with `pip install --upgrade steno3d`.
"""
BETA_TEST = """
It looks like you are using a beta version of steno3d. Thank you for
trying it out!
Please, if you run into any problems or have any feedback, open an
issue at https://github.com/seequent/steno3dpy/issues
If you would like to switch to the most recent stable version:
> pip uninstall steno3d
> pip install steno3d
"""
ALREADY_LOGGED_IN = """
You are already logged in as @{user}. To log in as a different user
please `steno3d.logout()`, then login specifying a different
username or API developer key.
"""
def needs_login(func):
"""Wrapper used around functions that need you to be logged in"""
@wraps(func)
def func_wrapper(*args, **kwargs):
if Comms.user.logged_in:
return func(*args, **kwargs)
elif kwargs.get('verbose', True):
print("Please login: 'steno3d.login()'")
return func_wrapper
def pause():
"""Brief pause on localhost to simulate network delay"""
if 'localhost' in Comms.base_url:
sleep(SLEEP_TIME)
class _Comms(object):
"""Comms controls the interaction between the python client and the
Steno3D website.
"""
def __init__(self):
self.user = User()
self._base_url = PRODUCTION_BASE_URL
self._hard_devel_key = None
@property
def host(self):
"""hostname of url"""
parseresult = urlparse(self.base_url)
return parseresult.hostname
@property
def base_url(self):
"""base url endpoint for uploading"""
return getattr(self, '_base_url', PRODUCTION_BASE_URL)
@base_url.setter
def base_url(self, value):
assert isinstance(value, string_types), \
'Endpoint path must be a string'
# Patch '/' onto bare URL endpoints
if not value[-1] == '/':
value += '/'
# Check for HTTPS
parsed = urlparse(value)
if '.com' in parsed.hostname and parsed.scheme != 'https':
raise ValueError('Live endpoints require HTTPS.')
self._base_url = value
def login(self, devel_key=None, credentials_file=None,
skip_credentials=False, endpoint=None, verbose=True):
"""Login to steno3d.com to allow uploading resources. To obtain an
API developer key, you need a Steno3D account:
https://steno3d.com/signup
Then, you can request a devel key:
https://steno3d.com/settings/developer
Unless you choose to 'skip_credentials', your API key will be
saved locally and read next time you call `steno3d.login()`.
You can always login using a different devel key (or username if
the corresponding devel key is saved in the credentials file).
Optional arguments:
devel_key - API key from steno3d.com. Prompt will appear if
this is not provided or saved in credential
file. This may also be a username corresponding
to a devel key saved in the credentials file
credentials_file - Local file where devel keys are stored.
(Default: ~/.steno3d_client/credentials)
skip_credentials - If False (default), devel key will be read
from and written to local credentials file.
If True, only uses the provided devel key or
prompts for a new key.
endpoint - Target site (Default: steno3d.com)
"""
# Check user
if self.user.logged_in:
if verbose:
print(ALREADY_LOGGED_IN.format(user=self.user.username))
return
# Set endpoint
if endpoint is not None:
self.base_url = endpoint
# Check client version
if not self._version_ok(verbose):
if verbose:
print('Login failed.')
return
if verbose:
print(WELCOME_MESSAGE)
# Assess credential file options.
if skip_credentials:
self._login_with(devel_key, verbose)
return
# Extract credential file
if credentials_file is None:
credentials_file = path.sep.join([path.expanduser('~'),
'.steno3d_client',
'credentials'])
cred_dir = path.sep.join(credentials_file.split(path.sep)[:-1])
if not path.isdir(cred_dir):
mkdir(cred_dir)
elif isinstance(credentials_file, string_types):
credentials_file = path.realpath(path.expanduser(
credentials_file
))
cred_dir = path.sep.join(credentials_file.split(path.sep)[:-1])
else:
raise ValueError('credentials_file: must be the name of a file')
if not path.isdir(cred_dir):
raise ValueError(
'{}: credentials file directory must exist'.format(cred_dir)
)
if path.exists(credentials_file) and not path.isfile(credentials_file):
raise ValueError(
'{}: credentials file must be a file'.format(credentials_file)
)
if path.isfile(credentials_file):
if verbose:
print('Credentials file found: {}'.format(credentials_file))
with open(credentials_file, 'r') as cred:
devel_keys = cred.readlines()
devel_keys = [dk.strip() for dk in devel_keys
if self.is_key(dk.strip())]
usernames = [dk.split('//')[0] for dk in devel_keys]
else:
if verbose:
print('Creating new credentials file: {}'.format(credentials_file))
devel_keys = []
usernames = []
# Get key from credential file
if devel_key in usernames:
if verbose:
print('Accessing API developer key for @{}'.format(devel_key))
devel_key = devel_keys[usernames.index(devel_key)]
if devel_key is None and len(devel_keys) > 0:
if verbose:
print('Accessing API developer key for @{}'.format(usernames[0]))
devel_key = devel_keys[0]
self._login_with(devel_key, verbose)
# Update credential file
if self.user.logged_in:
updated_devel_keys = [self.user.devel_key]
for i, key in enumerate(devel_keys):
if (key == self.user.devel_key or
usernames[i] == self.user.username):
continue
updated_devel_keys += [key]
with open(credentials_file, 'w') as cred:
cred.writelines(['{}\n'.format(k) for k in updated_devel_keys])
def _version_ok(self, verbose=True):
"""Check current Steno3D client version in the database"""
try:
resp = requests.post(
self.base_url + 'api/client/steno3dpy',
dict(version=__version__),
timeout=120,
)
except requests.ConnectionError:
if verbose:
print(NOT_CONNECTED.format(base_url=self.base_url))
return False
if resp.status_code == 200:
resp_json = resp.json()
your_ver_str = resp_json['your_version']
your_ver = [int(v) for v in your_ver_str.split('.')]
curr_ver_str = resp_json['current_version']
curr_ver = [int(v) for v in curr_ver_str.split('.')]
if resp_json['valid'] or curr_ver_str == '0.0.0':
return True
elif your_ver[0] == curr_ver[0] and your_ver[1] == curr_ver[1]:
if verbose:
print(INVALID_VERSION.format(
your_version='Your version: ' + your_ver_str,
current_version='Current version: ' + curr_ver_str
))
return True
else:
if verbose:
print(INVALID_VERSION.format(
your_version='Your version: ' + your_ver_str,
current_version='Required version: ' + curr_ver_str
))
if your_ver[0] < curr_ver[0]:
return False
if your_ver[0] == curr_ver[0] and your_ver[1] < curr_ver[1]:
return False
return True
elif resp.status_code == 400 and verbose:
resp_json = resp.json()
if 'b' in __version__.split('.')[2]:
print(BETA_TEST)
else:
print(INVALID_VERSION.format(
your_version='Your version: ' + __version__,
current_version='Error: ' + resp_json['reason']
))
return True
def _login_with(self, devel_key, verbose=True):
"""Login with devel_key"""
if devel_key is None:
if verbose:
print(FIRST_LOGIN.format(base_url=self.base_url))
devel_key = input(DEVKEY_PROMPT)
if not self.is_key(devel_key):
if verbose:
print(BAD_API_KEY.format(base_url=self.base_url))
return
try:
resp = requests.get(
self.base_url + 'api/me',
headers={'sshKey': devel_key,
'client': 'steno3dpy:{}'.format(__version__)},
timeout=120,
)
except requests.ConnectionError:
if verbose:
print(NOT_CONNECTED.format(base_url=self.base_url))
return
if resp.status_code is not 200:
if verbose:
print(LOGIN_FAILED.format(base_url=self.base_url))
self.logout()
return
self.user.login_with_json(resp.json())
self.user.set_key(devel_key)
self._cookies = dict(resp.cookies)
if verbose:
print(
'Welcome to Steno3D! You are logged in as @{name}'.format(
name=self.user.username
)
)
@staticmethod
def is_key(devel_key):
"""Checks if devel_key is a valid API key string"""
if not isinstance(devel_key, string_types):
return False
split_key = devel_key.split('//')
return len(split_key) == 2 and len(split_key[1]) == 36
def logout(self, verbose=True):
"""Logout current user"""
if self.user.logged_in:
if verbose:
print('Logging out of steno3d...')
headers = {'sshKey': Comms.user.devel_key,
'client': 'steno3dpy:{}'.format(__version__)}
requests.get(
Comms.base_url + 'signout',
headers=headers,
cookies=Comms._cookies,
timeout=120,
)
if verbose:
print('Goodbye, @{}.'.format(self.user.username))
self._base_url = PRODUCTION_BASE_URL
self.user.logout()
@staticmethod
def post(url, data=None, files=None):
"""Post data and files to the steno3d online endpoint"""
return _Comms._communicate(requests.post, url, data, files)
@staticmethod
def put(url, data=None, files=None):
"""Put data and files to the steno3d online endpoint"""
return _Comms._communicate(requests.put, url, data, files)
@staticmethod
def get(url):
"""Make a get request from a steno3d online endpoint"""
return _Comms._communicate(requests.get, url, None, None)
@staticmethod
def _communicate(request_fcn, url, data, files):
"""Post data and files to the steno3d online endpoint"""
data = {} if data is None else data
files = {} if files is None else files
filedict = {}
for filename in files:
if hasattr(files[filename], 'dtype'):
filedict[filename] = files[filename].file
filedict[filename + 'Type'] = files[filename].dtype
else:
filedict[filename] = files[filename]
headers = {'sshKey': Comms.user.devel_key,
'client': 'steno3dpy:{}'.format(__version__)}
if getattr(Comms, 'extra_headers', None):
headers.update(Comms.extra_headers)
req = request_fcn(
Comms.base_url + url,
data=data,
files=filedict,
headers=headers,
cookies=Comms._cookies,
timeout=120,
)
if req.status_code < 210:
Comms._cookies.update(req.cookies)
for key in files:
files[key].file.close()
try:
resp = req.json()
except ValueError:
resp = req
return {"status_code": req.status_code, "json": resp}
Comms = _Comms()
def plot(url):
"""Return an IFrame plot"""
from IPython.display import IFrame
return IFrame(url, width='100%', height=500)
| |
import numpy as np
import os.path as op
from numpy.testing import assert_array_almost_equal, assert_allclose
from scipy.signal import welch
import pytest
from mne import pick_types, Epochs, read_events
from mne.io import RawArray, read_raw_fif
from mne.utils import run_tests_if_main
from mne.time_frequency import psd_welch, psd_multitaper, psd_array_welch
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
event_fname = op.join(base_dir, 'test-eve.fif')
def test_psd_nan():
"""Test handling of NaN in psd_array_welch."""
n_samples, n_fft, n_overlap = 2048, 1024, 512
x = np.random.RandomState(0).randn(1, n_samples)
psds, freqs = psd_array_welch(x[:, :n_fft + n_overlap], float(n_fft),
n_fft=n_fft, n_overlap=n_overlap)
x[:, n_fft + n_overlap:] = np.nan # what Raw.get_data() will give us
psds_2, freqs_2 = psd_array_welch(x, float(n_fft), n_fft=n_fft,
n_overlap=n_overlap)
assert_allclose(freqs, freqs_2)
assert_allclose(psds, psds_2)
# 1-d
psds_2, freqs_2 = psd_array_welch(
x[0], float(n_fft), n_fft=n_fft, n_overlap=n_overlap)
assert_allclose(freqs, freqs_2)
assert_allclose(psds[0], psds_2)
def test_psd():
"""Tests the welch and multitaper PSD."""
raw = read_raw_fif(raw_fname)
picks_psd = [0, 1]
# Populate raw with sinusoids
rng = np.random.RandomState(40)
data = 0.1 * rng.randn(len(raw.ch_names), raw.n_times)
freqs_sig = [8., 50.]
for ix, freq in zip(picks_psd, freqs_sig):
data[ix, :] += 2 * np.sin(np.pi * 2. * freq * raw.times)
first_samp = raw._first_samps[0]
raw = RawArray(data, raw.info)
tmin, tmax = 0, 20 # use a few seconds of data
fmin, fmax = 2, 70 # look at frequencies between 2 and 70Hz
n_fft = 128
# -- Raw --
kws_psd = dict(tmin=tmin, tmax=tmax, fmin=fmin, fmax=fmax,
picks=picks_psd) # Common to all
kws_welch = dict(n_fft=n_fft)
kws_mt = dict(low_bias=True)
funcs = [(psd_welch, kws_welch),
(psd_multitaper, kws_mt)]
for func, kws in funcs:
kws = kws.copy()
kws.update(kws_psd)
psds, freqs = func(raw, proj=False, **kws)
psds_proj, freqs_proj = func(raw, proj=True, **kws)
assert psds.shape == (len(kws['picks']), len(freqs))
assert np.sum(freqs < 0) == 0
assert np.sum(psds < 0) == 0
# Is power found where it should be
ixs_max = np.argmax(psds, axis=1)
for ixmax, ifreq in zip(ixs_max, freqs_sig):
# Find nearest frequency to the "true" freq
ixtrue = np.argmin(np.abs(ifreq - freqs))
assert (np.abs(ixmax - ixtrue) < 2)
# Make sure the projection doesn't change channels it shouldn't
assert_array_almost_equal(psds, psds_proj)
# Array input shouldn't work
pytest.raises(ValueError, func, raw[:3, :20][0])
# test n_per_seg in psd_welch (and padding)
psds1, freqs1 = psd_welch(raw, proj=False, n_fft=128, n_per_seg=128,
**kws_psd)
psds2, freqs2 = psd_welch(raw, proj=False, n_fft=256, n_per_seg=128,
**kws_psd)
assert (len(freqs1) == np.floor(len(freqs2) / 2.))
assert (psds1.shape[-1] == np.floor(psds2.shape[-1] / 2.))
kws_psd.update(dict(n_fft=tmax * 1.1 * raw.info['sfreq']))
with pytest.raises(ValueError, match='n_fft is not allowed to be > n_tim'):
psd_welch(raw, proj=False, n_per_seg=None,
**kws_psd)
kws_psd.update(dict(n_fft=128, n_per_seg=64, n_overlap=90))
with pytest.raises(ValueError, match='n_overlap cannot be greater'):
psd_welch(raw, proj=False, **kws_psd)
with pytest.raises(ValueError, match='No frequencies found'):
psd_array_welch(np.zeros((1, 1000)), 1000., fmin=10, fmax=1)
# -- Epochs/Evoked --
events = read_events(event_fname)
events[:, 0] -= first_samp
tmin, tmax, event_id = -0.5, 0.5, 1
epochs = Epochs(raw, events[:10], event_id, tmin, tmax, picks=picks_psd,
proj=False, preload=True, baseline=None)
evoked = epochs.average()
tmin_full, tmax_full = -1, 1
epochs_full = Epochs(raw, events[:10], event_id, tmin_full, tmax_full,
picks=picks_psd, proj=False, preload=True,
baseline=None)
kws_psd = dict(tmin=tmin, tmax=tmax, fmin=fmin, fmax=fmax,
picks=picks_psd) # Common to all
funcs = [(psd_welch, kws_welch),
(psd_multitaper, kws_mt)]
for func, kws in funcs:
kws = kws.copy()
kws.update(kws_psd)
psds, freqs = func(
epochs[:1], proj=False, **kws)
psds_proj, freqs_proj = func(
epochs[:1], proj=True, **kws)
psds_f, freqs_f = func(
epochs_full[:1], proj=False, **kws)
# this one will fail if you add for example 0.1 to tmin
assert_array_almost_equal(psds, psds_f, 27)
# Make sure the projection doesn't change channels it shouldn't
assert_array_almost_equal(psds, psds_proj, 27)
# Is power found where it should be
ixs_max = np.argmax(psds.mean(0), axis=1)
for ixmax, ifreq in zip(ixs_max, freqs_sig):
# Find nearest frequency to the "true" freq
ixtrue = np.argmin(np.abs(ifreq - freqs))
assert (np.abs(ixmax - ixtrue) < 2)
assert (psds.shape == (1, len(kws['picks']), len(freqs)))
assert (np.sum(freqs < 0) == 0)
assert (np.sum(psds < 0) == 0)
# Array input shouldn't work
pytest.raises(ValueError, func, epochs.get_data())
# Testing evoked (doesn't work w/ compute_epochs_psd)
psds_ev, freqs_ev = func(
evoked, proj=False, **kws)
psds_ev_proj, freqs_ev_proj = func(
evoked, proj=True, **kws)
# Is power found where it should be
ixs_max = np.argmax(psds_ev, axis=1)
for ixmax, ifreq in zip(ixs_max, freqs_sig):
# Find nearest frequency to the "true" freq
ixtrue = np.argmin(np.abs(ifreq - freqs_ev))
assert (np.abs(ixmax - ixtrue) < 2)
# Make sure the projection doesn't change channels it shouldn't
assert_array_almost_equal(psds_ev, psds_ev_proj, 27)
assert (psds_ev.shape == (len(kws['picks']), len(freqs)))
@pytest.mark.parametrize('kind', ('raw', 'epochs', 'evoked'))
def test_psd_welch_average_kwarg(kind):
"""Test `average` kwarg of psd_welch()."""
raw = read_raw_fif(raw_fname)
picks_psd = [0, 1]
# Populate raw with sinusoids
rng = np.random.RandomState(40)
data = 0.1 * rng.randn(len(raw.ch_names), raw.n_times)
freqs_sig = [8., 50.]
for ix, freq in zip(picks_psd, freqs_sig):
data[ix, :] += 2 * np.sin(np.pi * 2. * freq * raw.times)
first_samp = raw._first_samps[0]
raw = RawArray(data, raw.info)
tmin, tmax = -0.5, 0.5
fmin, fmax = 0, np.inf
n_fft = 256
n_per_seg = 128
n_overlap = 0
event_id = 2
events = read_events(event_fname)
events[:, 0] -= first_samp
kws = dict(fmin=fmin, fmax=fmax, tmin=tmin, tmax=tmax, n_fft=n_fft,
n_per_seg=n_per_seg, n_overlap=n_overlap, picks=picks_psd)
if kind == 'raw':
inst = raw
elif kind == 'epochs':
inst = Epochs(raw, events[:10], event_id, tmin, tmax, picks=picks_psd,
proj=False, preload=True, baseline=None)
elif kind == 'evoked':
inst = Epochs(raw, events[:10], event_id, tmin, tmax, picks=picks_psd,
proj=False, preload=True, baseline=None).average()
else:
raise ValueError('Unknown parametrization passed to test, check test '
'for typos.')
psds_mean, freqs_mean = psd_welch(inst=inst, average='mean', **kws)
psds_median, freqs_median = psd_welch(inst=inst, average='median', **kws)
psds_unagg, freqs_unagg = psd_welch(inst=inst, average=None, **kws)
# Frequencies should be equal across all "average" types, as we feed in
# the exact same data.
assert_allclose(freqs_mean, freqs_median)
assert_allclose(freqs_mean, freqs_unagg)
# For `average=None`, the last dimension contains the un-aggregated
# segments.
assert psds_mean.shape == psds_median.shape
assert psds_mean.shape == psds_unagg.shape[:-1]
assert_allclose(psds_mean, psds_unagg.mean(axis=-1))
# Compare with manual median calculation
assert_allclose(psds_median, np.median(psds_unagg, axis=-1))
@pytest.mark.slowtest
def test_compares_psd():
"""Test PSD estimation on raw for plt.psd and scipy.signal.welch."""
raw = read_raw_fif(raw_fname)
exclude = raw.info['bads'] + ['MEG 2443', 'EEG 053'] # bads + 2 more
# picks MEG gradiometers
picks = pick_types(raw.info, meg='grad', eeg=False, stim=False,
exclude=exclude)[:2]
tmin, tmax = 0, 10 # use the first 60s of data
fmin, fmax = 2, 70 # look at frequencies between 5 and 70Hz
n_fft = 2048
# Compute psds with the new implementation using Welch
psds_welch, freqs_welch = psd_welch(raw, tmin=tmin, tmax=tmax, fmin=fmin,
fmax=fmax, proj=False, picks=picks,
n_fft=n_fft, n_jobs=1)
# Compute psds with plt.psd
start, stop = raw.time_as_index([tmin, tmax])
data, times = raw[picks, start:(stop + 1)]
out = [welch(d, fs=raw.info['sfreq'], nperseg=n_fft, noverlap=0)
for d in data]
freqs_mpl = out[0][0]
psds_mpl = np.array([o[1] for o in out])
mask = (freqs_mpl >= fmin) & (freqs_mpl <= fmax)
freqs_mpl = freqs_mpl[mask]
psds_mpl = psds_mpl[:, mask]
assert_array_almost_equal(psds_welch, psds_mpl)
assert_array_almost_equal(freqs_welch, freqs_mpl)
assert (psds_welch.shape == (len(picks), len(freqs_welch)))
assert (psds_mpl.shape == (len(picks), len(freqs_mpl)))
assert (np.sum(freqs_welch < 0) == 0)
assert (np.sum(freqs_mpl < 0) == 0)
assert (np.sum(psds_welch < 0) == 0)
assert (np.sum(psds_mpl < 0) == 0)
run_tests_if_main()
| |
#!/usr/bin/env python
"""
This runner is controlled by the evennia launcher and should normally
not be launched directly. It manages the two main Evennia processes
(Server and Portal) and most importantly runs a passive, threaded loop
that makes sure to restart Server whenever it shuts down.
Since twistd does not allow for returning an optional exit code we
need to handle the current reload state for server and portal with
flag-files instead. The files, one each for server and portal either
contains True or False indicating if the process should be restarted
upon returning, or not. A process returning != 0 will always stop, no
matter the value of this file.
"""
from __future__ import print_function
import os
import sys
from argparse import ArgumentParser
from subprocess import Popen
import Queue
import thread
import evennia
try:
# check if launched with pypy
import __pypy__ as is_pypy
except ImportError:
is_pypy = False
SERVER = None
PORTAL = None
EVENNIA_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
EVENNIA_BIN = os.path.join(EVENNIA_ROOT, "bin")
EVENNIA_LIB = os.path.dirname(evennia.__file__)
SERVER_PY_FILE = os.path.join(EVENNIA_LIB, 'server', 'server.py')
PORTAL_PY_FILE = os.path.join(EVENNIA_LIB, 'server', 'portal', 'portal.py')
GAMEDIR = None
SERVERDIR = "server"
SERVER_PIDFILE = None
PORTAL_PIDFILE = None
SERVER_RESTART = None
PORTAL_RESTART = None
SERVER_LOGFILE = None
PORTAL_LOGFILE = None
HTTP_LOGFILE = None
PPROFILER_LOGFILE = None
SPROFILER_LOGFILE = None
# messages
CMDLINE_HELP = \
"""
This program manages the running Evennia processes. It is called
by evennia and should not be started manually. Its main task is to
sit and watch the Server and restart it whenever the user reloads.
The runner depends on four files for its operation, two PID files
and two RESTART files for Server and Portal respectively; these
are stored in the game's server/ directory.
"""
PROCESS_ERROR = \
"""
{component} process error: {traceback}.
"""
PROCESS_IOERROR = \
"""
{component} IOError: {traceback}
One possible explanation is that 'twistd' was not found.
"""
PROCESS_RESTART = "{component} restarting ..."
PROCESS_DOEXIT = "Deferring to external runner."
# Functions
def set_restart_mode(restart_file, flag="reload"):
"""
This sets a flag file for the restart mode.
"""
with open(restart_file, 'w') as f:
f.write(str(flag))
def getenv():
"""
Get current environment and add PYTHONPATH
"""
sep = ";" if os.name == "nt" else ":"
env = os.environ.copy()
sys.path.insert(0, GAMEDIR)
env['PYTHONPATH'] = sep.join(sys.path)
return env
def get_restart_mode(restart_file):
"""
Parse the server/portal restart status
"""
if os.path.exists(restart_file):
with open(restart_file, 'r') as f:
return f.read()
return "shutdown"
def get_pid(pidfile):
"""
Get the PID (Process ID) by trying to access
an PID file.
"""
pid = None
if os.path.exists(pidfile):
with open(pidfile, 'r') as f:
pid = f.read()
return pid
def cycle_logfile(logfile):
"""
Rotate the old log files to <filename>.old
"""
logfile_old = logfile + '.old'
if os.path.exists(logfile):
# Cycle the old logfiles to *.old
if os.path.exists(logfile_old):
# E.g. Windows don't support rename-replace
os.remove(logfile_old)
os.rename(logfile, logfile_old)
# Start program management
def start_services(server_argv, portal_argv, doexit=False):
"""
This calls a threaded loop that launches the Portal and Server
and then restarts them when they finish.
"""
global SERVER, PORTAL
processes = Queue.Queue()
def server_waiter(queue):
try:
rc = Popen(server_argv, env=getenv()).wait()
except Exception as e:
print(PROCESS_ERROR.format(component="Server", traceback=e))
return
# this signals the controller that the program finished
queue.put(("server_stopped", rc))
def portal_waiter(queue):
try:
rc = Popen(portal_argv, env=getenv()).wait()
except Exception as e:
print(PROCESS_ERROR.format(component="Portal", traceback=e))
return
# this signals the controller that the program finished
queue.put(("portal_stopped", rc))
if portal_argv:
try:
if not doexit and get_restart_mode(PORTAL_RESTART) == "True":
# start portal as interactive, reloadable thread
PORTAL = thread.start_new_thread(portal_waiter, (processes, ))
else:
# normal operation: start portal as a daemon;
# we don't care to monitor it for restart
PORTAL = Popen(portal_argv, env=getenv())
except IOError as e:
print(PROCESS_IOERROR.format(component="Portal", traceback=e))
return
try:
if server_argv:
if doexit:
SERVER = Popen(server_argv, env=getenv())
else:
# start server as a reloadable thread
SERVER = thread.start_new_thread(server_waiter, (processes, ))
except IOError as e:
print(PROCESS_IOERROR.format(component="Server", traceback=e))
return
if doexit:
# Exit immediately
return
# Reload loop
while True:
# this blocks until something is actually returned.
from twisted.internet.error import ReactorNotRunning
try:
try:
message, rc = processes.get()
except KeyboardInterrupt:
# this only matters in interactive mode
break
# restart only if process stopped cleanly
if (message == "server_stopped" and int(rc) == 0 and
get_restart_mode(SERVER_RESTART) in ("True", "reload", "reset")):
print(PROCESS_RESTART.format(component="Server"))
SERVER = thread.start_new_thread(server_waiter, (processes, ))
continue
# normally the portal is not reloaded since it's run as a daemon.
if (message == "portal_stopped" and int(rc) == 0 and
get_restart_mode(PORTAL_RESTART) == "True"):
print(PROCESS_RESTART.format(component="Portal"))
PORTAL = thread.start_new_thread(portal_waiter, (processes, ))
continue
break
except ReactorNotRunning:
break
def main():
"""
This handles the command line input of the runner, usually created by
the evennia launcher
"""
parser = ArgumentParser(description=CMDLINE_HELP)
parser.add_argument('--noserver', action='store_true', dest='noserver',
default=False, help='Do not start Server process')
parser.add_argument('--noportal', action='store_true', dest='noportal',
default=False, help='Do not start Portal process')
parser.add_argument('--logserver', action='store_true', dest='logserver',
default=False, help='Log Server output to logfile')
parser.add_argument('--iserver', action='store_true', dest='iserver',
default=False, help='Server in interactive mode')
parser.add_argument('--iportal', action='store_true', dest='iportal',
default=False, help='Portal in interactive mode')
parser.add_argument('--pserver', action='store_true', dest='pserver',
default=False, help='Profile Server')
parser.add_argument('--pportal', action='store_true', dest='pportal',
default=False, help='Profile Portal')
parser.add_argument('--nologcycle', action='store_false', dest='nologcycle',
default=True, help='Do not cycle log files')
parser.add_argument('--doexit', action='store_true', dest='doexit',
default=False, help='Immediately exit after processes have started.')
parser.add_argument('gamedir', help="path to game dir")
parser.add_argument('twistdbinary', help="path to twistd binary")
parser.add_argument('slogfile', help="path to server log file")
parser.add_argument('plogfile', help="path to portal log file")
parser.add_argument('hlogfile', help="path to http log file")
args = parser.parse_args()
global GAMEDIR
global SERVER_LOGFILE, PORTAL_LOGFILE, HTTP_LOGFILE
global SERVER_PIDFILE, PORTAL_PIDFILE
global SERVER_RESTART, PORTAL_RESTART
global SPROFILER_LOGFILE, PPROFILER_LOGFILE
GAMEDIR = args.gamedir
sys.path.insert(1, os.path.join(GAMEDIR, SERVERDIR))
SERVER_PIDFILE = os.path.join(GAMEDIR, SERVERDIR, "server.pid")
PORTAL_PIDFILE = os.path.join(GAMEDIR, SERVERDIR, "portal.pid")
SERVER_RESTART = os.path.join(GAMEDIR, SERVERDIR, "server.restart")
PORTAL_RESTART = os.path.join(GAMEDIR, SERVERDIR, "portal.restart")
SERVER_LOGFILE = args.slogfile
PORTAL_LOGFILE = args.plogfile
HTTP_LOGFILE = args.hlogfile
TWISTED_BINARY = args.twistdbinary
SPROFILER_LOGFILE = os.path.join(GAMEDIR, SERVERDIR, "logs", "server.prof")
PPROFILER_LOGFILE = os.path.join(GAMEDIR, SERVERDIR, "logs", "portal.prof")
# set up default project calls
server_argv = [TWISTED_BINARY,
'--nodaemon',
'--logfile=%s' % SERVER_LOGFILE,
'--pidfile=%s' % SERVER_PIDFILE,
'--python=%s' % SERVER_PY_FILE]
portal_argv = [TWISTED_BINARY,
'--logfile=%s' % PORTAL_LOGFILE,
'--pidfile=%s' % PORTAL_PIDFILE,
'--python=%s' % PORTAL_PY_FILE]
# Profiling settings (read file from python shell e.g with
# p = pstats.Stats('server.prof')
pserver_argv = ['--savestats',
'--profiler=cprofile',
'--profile=%s' % SPROFILER_LOGFILE]
pportal_argv = ['--savestats',
'--profiler=cprofile',
'--profile=%s' % PPROFILER_LOGFILE]
# Server
pid = get_pid(SERVER_PIDFILE)
if pid and not args.noserver:
print("\nEvennia Server is already running as process %(pid)s. Not restarted." % {'pid': pid})
args.noserver = True
if args.noserver:
server_argv = None
else:
set_restart_mode(SERVER_RESTART, "shutdown")
if not args.logserver:
# don't log to server logfile
del server_argv[2]
print("\nStarting Evennia Server (output to stdout).")
else:
if not args.nologcycle:
cycle_logfile(SERVER_LOGFILE)
print("\nStarting Evennia Server (output to server logfile).")
if args.pserver:
server_argv.extend(pserver_argv)
print("\nRunning Evennia Server under cProfile.")
# Portal
pid = get_pid(PORTAL_PIDFILE)
if pid and not args.noportal:
print("\nEvennia Portal is already running as process %(pid)s. Not restarted." % {'pid': pid})
args.noportal = True
if args.noportal:
portal_argv = None
else:
if args.iportal:
# make portal interactive
portal_argv[1] = '--nodaemon'
set_restart_mode(PORTAL_RESTART, True)
print("\nStarting Evennia Portal in non-Daemon mode (output to stdout).")
else:
if not args.nologcycle:
cycle_logfile(PORTAL_LOGFILE)
cycle_logfile(HTTP_LOGFILE)
set_restart_mode(PORTAL_RESTART, False)
print("\nStarting Evennia Portal in Daemon mode (output to portal logfile).")
if args.pportal:
portal_argv.extend(pportal_argv)
print("\nRunning Evennia Portal under cProfile.")
if args.doexit:
print(PROCESS_DOEXIT)
# Windows fixes (Windows don't support pidfiles natively)
if os.name == 'nt':
if server_argv:
del server_argv[-2]
if portal_argv:
del portal_argv[-2]
# Start processes
start_services(server_argv, portal_argv, doexit=args.doexit)
if __name__ == '__main__':
main()
| |
import unittest
from functools import partial
import numpy
from tvtk.api import tvtk
from simphony.core.cuba import CUBA
from simphony.core.data_container import DataContainer
from simphony.core.keywords import KEYWORDS
from simphony.testing.utils import compare_data_containers
from simphony_mayavi.core.cuba_data import CubaData, AttributeSetType
class TestCubaData(unittest.TestCase):
def setUp(self):
self.addTypeEqualityFunc(
DataContainer, partial(compare_data_containers, testcase=self))
self.values = {
'TEMPERATURE': [1.0, 2.0, 3.0],
'RADIUS': [4.0, 2.0, 1.0],
'VELOCITY': [[4.0, 2.0, 1.0], [3.0, 2.0, 5.0], [4.0, 5.0, 1.0]]}
self.point_data = point_data = tvtk.PointData()
for key in self.values:
index = point_data.add_array(self.values[key])
point_data.get_array(index).name = key
self.data = CubaData(attribute_data=point_data)
def test_len(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data)
# then
self.assertEqual(len(data), 0)
# when
point_data.add_array([0, 0, 0, 3])
# then
self.assertEqual(len(data), 4)
def test_len_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=19)
# then
self.assertEqual(len(data), 19)
def test_initialize_empty_points(self):
# given
data = CubaData.empty()
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
self.assertIsInstance(data._data, tvtk.PointData)
def test_initialize_empty_cells(self):
# given
data = CubaData.empty(AttributeSetType.CELLS)
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
self.assertIsInstance(data._data, tvtk.CellData)
def test_initialize_empty_and_size(self):
# given
data = CubaData.empty(size=12)
# then
self.assertEqual(len(data), 12)
self.assertEqual(data.cubas, set([]))
self.assertIsInstance(data._data, tvtk.PointData)
def test_initialize_with_some_masked_point_data(self):
# given
point_data = tvtk.PointData()
index = point_data.add_array([1, 2, 3])
point_data.get_array(index).name = CUBA.TEMPERATURE.name
index = point_data.add_array([4, 2, 1])
point_data.get_array(index).name = CUBA.RADIUS.name
masks = tvtk.FieldData()
index = masks.add_array(numpy.array([(1, 0), (0, 0), (1, 0)],
dtype=numpy.int8))
masks.get_array(index).name = CUBA.RADIUS.name
# when
data = CubaData(attribute_data=point_data, masks=masks)
# then
self.assertEqual(len(data), 3)
self.assertEqual(data.cubas, {CUBA.TEMPERATURE, CUBA.RADIUS})
self.assertSequenceEqual(
point_data.get_array(CUBA.TEMPERATURE.name), [1, 2, 3])
self.assertSequenceEqual(
point_data.get_array(CUBA.RADIUS.name), [4, 2, 1])
self.assertSequenceEqual(
data.masks.get_array(CUBA.TEMPERATURE.name),
[(1, 0), (1, 0), (1, 0)])
self.assertSequenceEqual(
data.masks.get_array(CUBA.RADIUS.name),
[(1, 0), (0, 0), (1, 0)])
def test_initialize_with_unmasked_point_data(self):
# given
point_data = tvtk.PointData()
index = point_data.add_array([1, 2, 3])
point_data.get_array(index).name = CUBA.TEMPERATURE.name
index = point_data.add_array([4, 2, 1])
point_data.get_array(index).name = CUBA.RADIUS.name
# when
data = CubaData(attribute_data=point_data)
# then
self.assertEqual(len(data), 3)
self.assertEqual(data.cubas, {CUBA.TEMPERATURE, CUBA.RADIUS})
self.assertSequenceEqual(
point_data.get_array(CUBA.TEMPERATURE.name), [1, 2, 3])
self.assertSequenceEqual(
point_data.get_array(CUBA.RADIUS.name), [4, 2, 1])
self.assertSequenceEqual(
data.masks.get_array(CUBA.TEMPERATURE.name),
[(1, 0), (1, 0), (1, 0)])
self.assertSequenceEqual(
data.masks.get_array(CUBA.RADIUS.name), [(1, 0), (1, 0), (1, 0)])
def test_initialize_with_no_cuba_point_data(self):
# given
point_data = tvtk.PointData()
index = point_data.add_array([1, 2, 3])
point_data.get_array(index).name = 'my name'
index = point_data.add_array([4, 2, 1])
point_data.get_array(index).name = 'my other name'
# when/then
with self.assertRaises(ValueError):
CubaData(attribute_data=point_data)
def test_initialize_with_point_data_and_size(self):
# given
point_data = tvtk.PointData()
index = point_data.add_array([1, 2, 3])
point_data.get_array(index).name = 'MASS'
# when/then
with self.assertRaises(ValueError):
CubaData(attribute_data=point_data, size=3)
# when/then
with self.assertRaises(ValueError):
CubaData(attribute_data=point_data, size=11)
def test_initialize_with_variable_length_point_data(self):
# given
point_data = tvtk.PointData()
index = point_data.add_array([1, 2, 3])
point_data.get_array(index).name = CUBA.TEMPERATURE.name
index = point_data.add_array([4, 2, 1, 5])
point_data.get_array(index).name = CUBA.RADIUS.name
# when/then
with self.assertRaises(ValueError):
CubaData(attribute_data=point_data)
def test_getitem(self):
# given
data = self.data
values = self.values
# when/then
for index in range(3):
result = data[index]
expected = DataContainer(
RADIUS=values['RADIUS'][index],
TEMPERATURE=values['TEMPERATURE'][index],
VELOCITY=values['VELOCITY'][index])
self.assertEqual(result, expected)
# when/then
with self.assertRaises(IndexError):
data[4]
def test_getitem_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when/then
for index in range(5):
self.assertEqual(data[index], DataContainer())
# when/then
with self.assertRaises(IndexError):
data[6]
# when/then
with self.assertRaises(IndexError):
data[-6]
def test_getitem_on_empty_container(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data)
# when/then
with self.assertRaises(IndexError):
data[4]
def test_getitem_with_long_int_index(self):
# given
data = self.data
values = self.values
# when/then
for index in range(3):
result = data[long(index)]
expected = DataContainer(
RADIUS=values['RADIUS'][index],
TEMPERATURE=values['TEMPERATURE'][index],
VELOCITY=values['VELOCITY'][index])
self.assertEqual(result, expected)
# when/then
with self.assertRaises(IndexError):
data[long(4)]
def test_getitem_with_integer_values(self):
# given
data = self.data
values = self.values
point_data = self.point_data
masks = self.data.masks
# We need to get a CUBA key that has an int value.
INTEGER_CUBA_KEY = next(
key for key in KEYWORDS if KEYWORDS[key].dtype == numpy.int32)
array_id = point_data.add_array([1, 0, 1])
point_data.get_array(array_id).name = INTEGER_CUBA_KEY
mask = tvtk.BitArray()
mask.number_of_components = 2
mask.name = INTEGER_CUBA_KEY
mask.from_array(numpy.array([(1, 0), (1, 0), (1, 0)]))
masks.add_array(mask)
# when/then
for index in range(3):
result = data[long(index)]
expected = DataContainer(
RADIUS=values['RADIUS'][index],
TEMPERATURE=values['TEMPERATURE'][index],
VELOCITY=values['VELOCITY'][index])
expected[CUBA[INTEGER_CUBA_KEY]] = [1, 0, 1][index]
self.assertEqual(result, expected)
# when/then
with self.assertRaises(IndexError):
data[long(4)]
def test_setitem(self):
# given
data = self.data
# when
for index in range(3):
data[index] = DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54])
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54]))
self._assert_len(data, 3)
# when/then
with self.assertRaises(IndexError):
data[4] = DataContainer(RADIUS=0.2, TEMPERATURE=-4.5)
def test_none_set(self):
# given
data = self.data
# when
for index in range(3):
data[index] = DataContainer(
TEMPERATURE=[-1, None, -3][index],
)
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
TEMPERATURE=[-1, None, -3][index],
))
def test_setitem_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when
for index in range(3):
data[index] = DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54])
# then
self._assert_len(data, 5)
for index in range(3):
self.assertEqual(
data[index], DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54]))
for index in range(3, 5):
self.assertEqual(data[index], DataContainer())
# when/then
with self.assertRaises(IndexError):
data[7] = DataContainer(RADIUS=0.2, TEMPERATURE=-4.5)
def test_setitem_empty_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=3)
# when
for index in range(3):
data[index] = DataContainer()
# then
for index in range(3):
self.assertEqual(data[index], DataContainer())
self._assert_len(data, 0)
# when/then
with self.assertRaises(IndexError):
data[4] = DataContainer()
def test_setitem_with_unsupported_cuba(self):
# given
data = self.data
# when
for index in range(3):
data[index] = DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54],
NAME=str(index))
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54]))
self._assert_len(data, 3)
# when/then
with self.assertRaises(IndexError):
data[4] = DataContainer(RADIUS=0.2, TEMPERATURE=-4.5)
def test_setitem_with_long_int(self):
# given
data = self.data
# when
for index in range(3):
data[long(index)] = DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54])
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.2, -0.1, -0.54]))
self._assert_len(data, 3)
# when/then
with self.assertRaises(IndexError):
data[4] = DataContainer(RADIUS=0.2, TEMPERATURE=-4.5)
def test_setitem_with_new_scalar_cubas(self):
# given
data = self.data
# when
for index in range(3):
data[index] = DataContainer(
RADIUS=[34, 32, 31][index], TEMPERATURE=[-1, -2, -3][index],
MASS=[0.1, 0.4, 0.3][index], VELOCITY=[0.2, -0.1, -0.54])
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
MASS=[0.1, 0.4, 0.3][index],
VELOCITY=[0.2, -0.1, -0.54]))
self._assert_len(data, 3)
def test_setitem_with_new_vector_cubas(self):
# given
data = self.data
# when
for index in range(3):
data[index] = DataContainer(
RADIUS=[34, 32, 31][index], TEMPERATURE=[-1, -2, -3][index],
DIRECTION=[1, 4, 3], VELOCITY=[0.1, 0.4, 0.3])
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
RADIUS=[34, 32, 31][index],
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.1, 0.4, 0.3],
DIRECTION=[1, 4, 3]))
def test_setitem_with_missing_scalar_cubas(self):
# given
data = self.data
# when
for index in range(3):
data[index] = DataContainer(
TEMPERATURE=[-1, -2, -3][index], VELOCITY=[0.1, 0.4, 0.3])
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
TEMPERATURE=[-1, -2, -3][index],
VELOCITY=[0.1, 0.4, 0.3]))
self._assert_len(data, 3)
def test_setitem_with_missing_vector_cubas(self):
# given
data = self.data
# when
for index in range(3):
data[index] = DataContainer(
TEMPERATURE=[-1, -2, -3][index],
RADIUS=[0.12, -33, 11][index])
# then
for index in range(3):
self.assertEqual(
data[index], DataContainer(
RADIUS=[0.12, -33, 11][index],
TEMPERATURE=[-1, -2, -3][index]))
self._assert_len(data, 3)
def test_setitem_with_invalid_index(self):
# given
data = self.data
# when
with self.assertRaises(IndexError):
data[5] = DataContainer(MASS=45)
# then
self.assertEqual(len(data), 3)
self.assertNotIn(CUBA.MASS, data[0])
self._assert_len(data, 3)
def test_delitem(self):
# given
data = self.data
values = self.values
# when
del data[1]
# the
self.assertEqual(len(data), 2)
for new_index, old_index in enumerate((0, 2)):
result = data[new_index]
self.assertEqual(
result, DataContainer(
RADIUS=values['RADIUS'][old_index],
TEMPERATURE=values['TEMPERATURE'][old_index],
VELOCITY=values['VELOCITY'][old_index]))
self._assert_len(data, 2)
def test_delitem_invalid(self):
# given
data = self.data
# then/when
with self.assertRaises(IndexError):
del data[145]
def test_delitem_to_empty_container(self):
# given
data = self.data
# when
for index in reversed(range(len(data))):
del data[index]
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
def test_delitem_with_initial_size_to_empty_container(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when
for index in reversed(range(len(data))):
del data[index]
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
def test_delitem_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when
del data[1]
# the
self.assertEqual(len(data), 4)
for index in range(4):
self.assertEqual(data[index], DataContainer())
self._assert_len(data, 4)
def test_append(self):
# given
data = self.data
values = self.values
# when
data.append(DataContainer(VELOCITY=[0, 0, 0.34]))
# then
self.assertEqual(len(data), 4)
for index in range(3):
result = data[index]
self.assertEqual(
result, DataContainer(
RADIUS=values['RADIUS'][index],
TEMPERATURE=values['TEMPERATURE'][index],
VELOCITY=values['VELOCITY'][index]))
self.assertEqual(data[3], DataContainer(VELOCITY=[0, 0, 0.34]))
self._assert_len(data, 4)
def test_append_on_empty(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data)
# when
data.append(DataContainer(VELOCITY=[0, 0, 0.34]))
data.append(DataContainer(VELOCITY=[0, 0, 0.24]))
# then
self.assertEqual(len(data), 2)
self.assertEqual(data[1], DataContainer(VELOCITY=[0, 0, 0.24]))
self.assertEqual(data[0], DataContainer(VELOCITY=[0, 0, 0.34]))
def test_append_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when
data.append(DataContainer(VELOCITY=[0, 0, 0.34]))
# then
self.assertEqual(len(data), 6)
for index in range(5):
self.assertEqual(data[index], DataContainer())
self.assertEqual(data[5], DataContainer(VELOCITY=[0, 0, 0.34]))
self._assert_len(data, 6)
def test_append_empty_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when
data.append(DataContainer())
# then
self.assertEqual(len(data), 6)
for index in range(5):
self.assertEqual(data[index], DataContainer())
self.assertEqual(data[5], DataContainer())
self._assert_len(data, 6)
def test_append_empty_on_empty_container(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=None)
# when
data.append(DataContainer())
# then
self.assertEqual(len(data), 1)
self.assertEqual(data[0], DataContainer())
def test_append_with_new_cuba(self):
# given
data = self.data
values = self.values
# when
data.append(DataContainer(MASS=34, VELOCITY=[0, 0, 0.34]))
# then
self.assertEqual(len(data), 4)
for index in range(3):
result = data[index]
self.assertEqual(
result, DataContainer(
RADIUS=values['RADIUS'][index],
TEMPERATURE=values['TEMPERATURE'][index],
VELOCITY=values['VELOCITY'][index]))
self.assertEqual(
data[3], DataContainer(MASS=34.0, VELOCITY=[0, 0, 0.34]))
def test_append_with_unsupported_cuba(self):
# given
data = self.data
values = self.values
# when
data.append(DataContainer(VELOCITY=[0, 0, 0.34], NAME='my name'))
# then
self.assertEqual(len(data), 4)
for index in range(3):
result = data[index]
self.assertEqual(
result, DataContainer(
RADIUS=values['RADIUS'][index],
TEMPERATURE=values['TEMPERATURE'][index],
VELOCITY=values['VELOCITY'][index]))
self.assertEqual(data[3], DataContainer(VELOCITY=[0, 0, 0.34]))
self._assert_len(data, 4)
def test_insert(self):
# given
data = self.data
values = self.values
# when
data.insert(1, DataContainer(VELOCITY=[0, 0, 0.34]))
self._assert_len(data, 4)
# then
self.assertEqual(len(data), 4)
for old_index, index in enumerate((0, 2, 3)):
result = data[index]
self.assertEqual(
result, DataContainer(
RADIUS=values['RADIUS'][old_index],
TEMPERATURE=values['TEMPERATURE'][old_index],
VELOCITY=values['VELOCITY'][old_index]))
self.assertEqual(data[1], DataContainer(VELOCITY=[0, 0, 0.34]))
def test_insert_on_empty(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data)
# when
data.insert(0, DataContainer(VELOCITY=[0, 0, 0.34]))
data.insert(0, DataContainer(VELOCITY=[0, 0, 0.24]))
# then
self.assertEqual(len(data), 2)
self.assertEqual(data[0], DataContainer(VELOCITY=[0, 0, 0.24]))
self.assertEqual(data[1], DataContainer(VELOCITY=[0, 0, 0.34]))
def test_insert_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when
data.insert(1, DataContainer(VELOCITY=[0, 0, 0.34]))
# then
self._assert_len(data, 6)
self.assertEqual(len(data), 6)
for old_index, index in enumerate((0, 2, 3, 4, 5)):
self.assertEqual(data[index], DataContainer())
self.assertEqual(data[1], DataContainer(VELOCITY=[0, 0, 0.34]))
def test_insert_empty_with_initial_size(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=5)
# when
data.insert(1, DataContainer())
# then
self.assertEqual(len(data), 6)
for index in range(6):
self.assertEqual(data[index], DataContainer())
def test_insert_empty_on_empty_container(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=None)
# when
data.insert(1, DataContainer())
# then
self.assertEqual(len(data), 1)
self.assertEqual(data[0], DataContainer())
def test_insert_with_new_cuba(self):
# given
data = self.data
values = self.values
# when
data.insert(1, DataContainer(VELOCITY=[0, 0, 0.34], MASS=0.3))
# then
self._assert_len(data, 4)
self.assertEqual(len(data), 4)
for old_index, index in enumerate((0, 2, 3)):
result = data[index]
self.assertEqual(
result, DataContainer(
RADIUS=values['RADIUS'][old_index],
TEMPERATURE=values['TEMPERATURE'][old_index],
VELOCITY=values['VELOCITY'][old_index]))
self.assertEqual(
data[1], DataContainer(VELOCITY=[0, 0, 0.34], MASS=0.3))
def test_insert_with_unsupported_cuba(self):
# given
data = self.data
values = self.values
# when
data.insert(1, DataContainer(VELOCITY=[0, 0, 0.34], NAME='my name2'))
self._assert_len(data, 4)
# then
self.assertEqual(len(data), 4)
for old_index, index in enumerate((0, 2, 3)):
result = data[index]
self.assertEqual(
result, DataContainer(
RADIUS=values['RADIUS'][old_index],
TEMPERATURE=values['TEMPERATURE'][old_index],
VELOCITY=values['VELOCITY'][old_index]))
self.assertEqual(data[1], DataContainer(VELOCITY=[0, 0, 0.34]))
def test_append_pop_cycle(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=3)
# when
for index in range(5):
data.append(DataContainer(MASS=index))
for _ in range(8):
data.pop(0)
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
# when
for index in range(5):
data.append(DataContainer(MASS=index))
for _ in range(5):
data.pop(0)
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
def test_append_delete_cycle(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=3)
# when
for index in range(5):
data.append(DataContainer(MASS=index))
for index in reversed(range(8)):
del data[index]
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
# when
for index in range(5):
data.append(DataContainer(MASS=index))
for index in reversed(range(5)):
del data[index]
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
def test_insert_delete_cycle(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data, size=3)
# when
for index in range(5):
data.insert(0, DataContainer(MASS=index))
for index in reversed(range(8)):
del data[index]
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
# when
for index in range(5):
data.insert(0, DataContainer(MASS=index))
for index in reversed(range(5)):
del data[index]
# then
self.assertEqual(len(data), 0)
self.assertEqual(data.cubas, set([]))
def test_swap_with_integer_cuba(self):
# given
point_data = tvtk.PointData()
data = CubaData(attribute_data=point_data)
for index in range(5):
data.append(DataContainer(STATUS=index))
# when
data[0] = data[4]
# then
self.assertEqual(data[0], DataContainer(STATUS=4))
for index in range(1, 5):
self.assertEqual(data[index], DataContainer(STATUS=index))
def _assert_len(self, data, length):
n = data._data.number_of_arrays
for array_id in range(n):
self.assertEqual(len(data._data.get_array(array_id)), length)
| |
#
# Copyright (C) 2010-2011, 2011 Canonical Ltd. All Rights Reserved
#
# This file was originally taken from txzookeeper and modified later.
#
# Authors:
# Kapil Thangavelu and the Kazoo team
#
# txzookeeper is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# txzookeeper is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with txzookeeper. If not, see <http://www.gnu.org/licenses/>.
import code
import os
import os.path
import shutil
import signal
import subprocess
import tempfile
import traceback
from itertools import chain
from collections import namedtuple
from glob import glob
def debug(sig, frame):
"""Interrupt running process, and provide a python prompt for
interactive debugging."""
d = {'_frame': frame} # Allow access to frame object.
d.update(frame.f_globals) # Unless shadowed by global
d.update(frame.f_locals)
i = code.InteractiveConsole(d)
message = "Signal recieved : entering python shell.\nTraceback:\n"
message += ''.join(traceback.format_stack(frame))
i.interact(message)
def listen():
if os.name != 'nt': # SIGUSR1 is not supported on Windows
signal.signal(signal.SIGUSR1, debug) # Register handler
listen()
def to_java_compatible_path(path):
if os.name == 'nt':
path = path.replace('\\', '/')
return path
ServerInfo = namedtuple(
"ServerInfo",
"server_id client_port election_port leader_port admin_port")
class ManagedZooKeeper(object):
"""Class to manage the running of a ZooKeeper instance for testing.
Note: no attempt is made to probe the ZooKeeper instance is
actually available, or that the selected port is free. In the
future, we may want to do that, especially when run in a
Hudson/Buildbot context, to ensure more test robustness."""
def __init__(self, software_path, server_info, peers=(), classpath=None):
"""Define the ZooKeeper test instance.
@param install_path: The path to the install for ZK
@param port: The port to run the managed ZK instance
"""
self.install_path = software_path
self._classpath = classpath
self.server_info = server_info
self.host = "127.0.0.1"
self.peers = peers
self.working_path = tempfile.mkdtemp()
self._running = False
def run(self):
"""Run the ZooKeeper instance under a temporary directory.
Writes ZK log messages to zookeeper.log in the current directory.
"""
if self.running:
return
config_path = os.path.join(self.working_path, "zoo.cfg")
log_path = os.path.join(self.working_path, "log")
log4j_path = os.path.join(self.working_path, "log4j.properties")
data_path = os.path.join(self.working_path, "data")
# various setup steps
if not os.path.exists(self.working_path):
os.mkdir(self.working_path)
if not os.path.exists(log_path):
os.mkdir(log_path)
if not os.path.exists(data_path):
os.mkdir(data_path)
with open(config_path, "w") as config:
config.write("""
tickTime=2000
dataDir=%s
clientPort=%s
maxClientCnxns=0
admin.serverPort=%s
""" % (to_java_compatible_path(data_path),
self.server_info.client_port,
self.server_info.admin_port)) # NOQA
# setup a replicated setup if peers are specified
if self.peers:
servers_cfg = []
for p in chain((self.server_info,), self.peers):
servers_cfg.append("server.%s=localhost:%s:%s" % (
p.server_id, p.leader_port, p.election_port))
with open(config_path, "a") as config:
config.write("""
initLimit=4
syncLimit=2
%s
""" % ("\n".join(servers_cfg)))
# Write server ids into datadir
with open(os.path.join(data_path, "myid"), "w") as myid_file:
myid_file.write(str(self.server_info.server_id))
with open(log4j_path, "w") as log4j:
log4j.write("""
# DEFAULT: console appender only
log4j.rootLogger=INFO, ROLLINGFILE
log4j.appender.ROLLINGFILE.layout=org.apache.log4j.PatternLayout
log4j.appender.ROLLINGFILE.layout.ConversionPattern=%d{ISO8601} [myid:%X{myid}] - %-5p [%t:%C{1}@%L] - %m%n
log4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender
log4j.appender.ROLLINGFILE.Threshold=DEBUG
log4j.appender.ROLLINGFILE.File=""" + to_java_compatible_path( # NOQA
self.working_path + os.sep + "zookeeper.log\n"))
self.process = subprocess.Popen(
args=["java",
"-cp", self.classpath,
"-Dreadonlymode.enabled=true",
"-Dzookeeper.log.dir=%s" % log_path,
"-Dzookeeper.root.logger=INFO,CONSOLE",
"-Dlog4j.configuration=file:%s" % log4j_path,
# "-Dlog4j.debug",
"org.apache.zookeeper.server.quorum.QuorumPeerMain",
config_path])
self._running = True
@property
def classpath(self):
"""Get the classpath necessary to run ZooKeeper."""
if self._classpath:
return self._classpath
# Two possibilities, as seen in zkEnv.sh:
# Check for a release - top-level zookeeper-*.jar?
jars = glob((os.path.join(
self.install_path, 'zookeeper-*.jar')))
if jars:
# Release build (`ant package`)
jars.extend(glob(os.path.join(
self.install_path,
"lib/*.jar")))
# support for different file locations on Debian/Ubuntu
jars.extend(glob(os.path.join(
self.install_path,
"log4j-*.jar")))
jars.extend(glob(os.path.join(
self.install_path,
"slf4j-api-*.jar")))
jars.extend(glob(os.path.join(
self.install_path,
"slf4j-log4j-*.jar")))
else:
# Development build (plain `ant`)
jars = glob((os.path.join(
self.install_path, 'build/zookeeper-*.jar')))
jars.extend(glob(os.path.join(
self.install_path,
"build/lib/*.jar")))
return os.pathsep.join(jars)
@property
def address(self):
"""Get the address of the ZooKeeper instance."""
return "%s:%s" % (self.host, self.client_port)
@property
def running(self):
return self._running
@property
def client_port(self):
return self.server_info.client_port
def reset(self):
"""Stop the zookeeper instance, cleaning out its on disk-data."""
self.stop()
shutil.rmtree(os.path.join(self.working_path, "data"))
os.mkdir(os.path.join(self.working_path, "data"))
with open(os.path.join(self.working_path, "data", "myid"), "w") as fh:
fh.write(str(self.server_info.server_id))
def stop(self):
"""Stop the Zookeeper instance, retaining on disk state."""
if not self.running:
return
self.process.terminate()
self.process.wait()
self._running = False
def destroy(self):
"""Stop the ZooKeeper instance and destroy its on disk-state"""
# called by at exit handler, reimport to avoid cleanup race.
import shutil
self.stop()
shutil.rmtree(self.working_path)
class ZookeeperCluster(object):
def __init__(self, install_path=None, classpath=None,
size=3, port_offset=20000):
self._install_path = install_path
self._classpath = classpath
self._servers = []
# Calculate ports and peer group
port = port_offset
peers = []
for i in range(size):
info = ServerInfo(i + 1, port, port + 1, port + 2, port + 3)
peers.append(info)
port += 10
# Instantiate Managed ZK Servers
for i in range(size):
server_peers = list(peers)
server_info = server_peers.pop(i)
self._servers.append(
ManagedZooKeeper(
self._install_path, server_info, server_peers,
classpath=self._classpath))
def __getitem__(self, k):
return self._servers[k]
def __iter__(self):
return iter(self._servers)
def start(self):
# Zookeeper client expresses a preference for either lower ports or
# lexicographical ordering of hosts, to ensure that all servers have a
# chance to startup, start them in reverse order.
for server in reversed(list(self)):
server.run()
# Giving the servers a moment to start, decreases the overall time
# required for a client to successfully connect (2s vs. 4s without
# the sleep).
import time
time.sleep(2)
def stop(self):
for server in self:
server.stop()
self._servers = []
def terminate(self):
for server in self:
server.destroy()
def reset(self):
for server in self:
server.reset()
| |
from __future__ import division, print_function
import subprocess
import numpy as np
import os, sys
from os.path import dirname, realpath, join
from PIL import Image
from numpy import uint8, float32
#import threading
#__hesaff_lock = threading.Lock()
def reload_module():
import imp
import sys
imp.reload(sys.modules[__name__])
EXE_EXT = {'win32':'.exe', 'darwin':'.mac', 'linux2':'.ln'}[sys.platform]
#__file__ = os.path.realpath('external_feature_interface.py')
EXE_PATH = realpath(dirname(__file__))
try: # for debugging
__IPYTHON__
EXE_PATH = realpath('tpl/extern_feat')
except Exception as ex:
pass
HESAFF_EXE = join(EXE_PATH, 'hesaff'+EXE_EXT)
INRIA_EXE = join(EXE_PATH, 'compute_descriptors'+EXE_EXT)
# Create directory for temporary files (if needed)
TMP_DIR = os.path.join(EXE_PATH, '.tmp_external_features')
if not os.path.exists(TMP_DIR):
print('Making directory: '+TMP_DIR)
os.mkdir(TMP_DIR)
#---------------------------------------
# Defined temp compute functions
# Create directory for temporary files (if needed)
#TMP_DIR = os.path.join(EXE_PATH, '.tmp_external_features')
#if not os.path.exists(TMP_DIR):
#print('Making directory: '+TMP_DIR)
#os.mkdir(TMP_DIR)
#def temp_compute(rchip, compute_fn):
#tmp_fpath = TMP_DIR + '/tmp.ppm'
#rchip_pil = Image.fromarray(rchip)
#rchip_pil.save(tmp_fpath, 'PPM')
#(kpts, desc) = compute_fn(tmp_fpath)
#return (kpts, desc)
#def compute_perdoch(rchip, dict_args):
#return temp_compute(rchip,compute_hesaff)
#def compute_inria(rchip, detect_type, extract_type):
#return temp_compute(rchip, compute_hesaff)
#---------------------------------------
# Define precompute functions
def __precompute(rchip_fpath, feat_fpath, compute_fn):
kpts, desc = compute_fn(rchip_fpath)
np.savez(feat_fpath, kpts, desc)
return kpts, desc
# TODO Dynamiclly add descriptor types
valid_extractors = ['sift', 'gloh']
valid_detectors = ['mser', 'hessaff']
def precompute_harris(rchip_fpath, feat_fpath):
return __precompute(rchip_fpath, feat_fpath, __compute_harris)
def precompute_mser(rchip_fpath, feat_fpath):
return __precompute(rchip_fpath, feat_fpath, __compute_mser)
def precompute_hesaff(rchip_fpath, feat_fpath):
return __precompute(rchip_fpath, feat_fpath, __compute_hesaff)
#---------------------------------------
# Defined temp compute functions
def __temp_compute(rchip, compute_fn):
tmp_fpath = TMP_DIR + '/tmp.ppm'
rchip_pil = Image.fromarray(rchip)
rchip_pil.save(tmp_fpath, 'PPM')
(kpts, desc) = compute_fn(tmp_fpath)
return (kpts, desc)
def compute_hesaff(rchip):
return __temp_compute(rchip, __compute_hesaff)
def compute_descriptors(rchip, detect_type, extract_type):
return __temp_compute(rchip, __compute_hesaff)
#---------------------------------------
# Work functions which call the external feature detectors
def inria_cmd(rchip_fpath, detect_type, extract_type):
detect_arg = '-'+detect_type
extract_arg = '-'+extract_type
input_arg = '-i "' + rchip_fpath + '"'
other_args = '-noangle'
args = INRIA_EXE + ' ' + ' '.join([input_arg, detect_arg,
extract_arg, other_args])
return args
def __compute_descriptors(rchip_fpath, detect_type, extract_type):
'Runs external keypoint detetectors like hesaff'
outname = rchip_fpath + '.'+detect_type+'.'+extract_type
cmd = inria_cmd(rchip_fpath, detect_type, extract_type)
__execute_extern(cmd)
kpts, desc = __read_text_feat_file(outname)
return kpts, desc
def __compute_mser(rchip_fpath):
__compute_descriptors(rchip_fpath, 'mser', 'sift')
def __compute_harris(rchip_fpath):
__compute_descriptors(rchip_fpath, 'harris', 'sift')
def __compute_hesaff(rchip_fpath):
'Runs external keypoint detetectors like hesaff'
outname = rchip_fpath + '.hesaff.sift'
args = '"' + rchip_fpath + '"'
cmd = HESAFF_EXE + ' ' + args
print(cmd)
__execute_extern(cmd)
kpts, desc = __read_text_feat_file(outname)
return kpts, desc
#---------------------------------------
def rectify_up_is_up(abcd):
abcdT = abcd.T
(a, b, c, d) = abcdT
# Logic taken from Perdoch's code
sqrt_det = np.sqrt(np.abs(a*d - b*c))
sqrt_b2a2 = np.sqrt(b*b + a*a)
a11 = sqrt_b2a2 / sqrt_det
a12 = 0
a21 = (d*b + c*a)/(sqrt_b2a2*sqrt_det)
a22 = sqrt_det/sqrt_b2a2
acd = np.vstack([a11, a21, a22]).T
return acd, sqrt_det
DESC_FACTOR = 3.0*np.sqrt(3.0)
from numpy.linalg import svd, det, inv
from numpy import diag, sqrt, abs
import numpy.linalg as npla
def expand_invET(invET):
# Put the inverse elleq in a list of matrix structure
e11 = invET[0]; e12 = invET[1]
e21 = invET[1]; e22 = invET[2]
invE_list = np.array(((e11, e12), (e21, e22))).T
return invE_list
def expand_acd(acd):
A_list = [np.array(((a,0),(c,d))) for (a,c,d) in acd]
return A_list
def convert_invE_to_abcd(invET):
''' Transforms:
[E_a, E_b] [A_a, 0]
[E_b, E_d] ---> [A_c, A_d]
'''
invE_list = expand_invET(invET)
# Decompose using singular value decomposition
USV_list = [svd(invE) for invE in invE_list]
U_list, S_list, V_list = zip(*USV_list)
# Deintegrate the scale
sc_list = [1.0 / (sqrt(sqrt(S[0] * S[1]))) for S in S_list]
sigma_list = [sc / DESC_FACTOR for sc in sc_list]
# Rebuild the ellipse -> circle matrix
abcd_list = [(U.dot(diag(sqrt(S[::-1])*sc)).dot(V)).flatten() for (sc, (U,S,V)) in zip(sc_list, USV_list)]
abcd = np.vstack(abcd_list)
# Enforce a lower triangular matrix
acd = rectify_up_is_up(abcd)
# Helper function to read external file formats
def __read_text_feat_file(outname):
'Reads output from external keypoint detectors like hesaff'
file = open(outname, 'r')
# Read header
ndims = int(file.readline())
nkpts = int(file.readline())
lines = file.readlines()
file.close()
# Preallocate output
kpts = np.zeros((nkpts, 5), dtype=float)
desc = np.zeros((nkpts, ndims), dtype=uint8)
for kx, line in enumerate(lines):
data = line.split(' ')
kpts[kx,:] = np.array([float32(_) for _ in data[0:5]], dtype=float32)
desc[kx,:] = np.array([uint8(_) for _ in data[5: ]], dtype=uint8)
# Hack to put things into acd foramat
invET = kpts.T[2:5]
#
acd = kpts.T[2:5]
det = acd[0] * acd[2]
is_valid = np.bitwise_and(det.T < 1E-3, det.T > 1E-7)
kpts = kpts[is_valid.flatten()]
desc = desc[is_valid.flatten()]
return (kpts, desc)
# Helper function to call commands
def __execute_extern(cmd):
#print('tpl.execute_extern> '+cmd)
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = proc.communicate()
if proc.returncode != 0:
raise Exception('\n'.join(['* External detector returned 0',
'* Failed calling: '+cmd,
'* Process output: ',
'------------------',
out,
'------------------']))
def test_inria_feats():
detect_type_list = [_.strip() for _ in '''
harris, hessian, harmulti, hesmulti,
harhesmulti, harlap, heslap, dog,
mser, haraff, hesaff, dense 6 6
'''.strip(' \n').split(',')]
extract_type_list = ['sift','gloh']
extract_type = 'sift'
rchip_fpath = os.path.realpath('lena.png')
for detect_type in detect_type_list:
for extract_type in extract_type_list:
cmd = inria_cmd(rchip_fpath, detect_type, extract_type)
print('Execute: '+cmd)
__execute_extern(cmd+' -DP')
def test_deintegrate_scale(invET):
'''
%run feature_compute2.py
__file__ = 'tpl/extern_feat/extern_feat.py'
exec(open('tpl/extern_feat/extern_feat.py'))
outname = rchip_fpath + '.hesaff.sift'
args = '"' + rchip_fpath + '"'
cmd = HESAFF_EXE + ' ' + args
print(cmd)
__execute_extern(cmd)
file = open(outname, 'r')
# Read header
ndims = int(file.readline())
nkpts = int(file.readline())
lines = file.readlines()
file.close()
# Preallocate output
kpts = np.zeros((nkpts, 5), dtype=float)
desc = np.zeros((nkpts, ndims), dtype=uint8)
for kx, line in enumerate(lines):
data = line.split(' ')
kpts[kx,:] = np.array([float32(_) for _ in data[0:5]], dtype=float32)
desc[kx,:] = np.array([uint8(_) for _ in data[5: ]], dtype=uint8)
# Hack to put things into acd foramat
invET = kpts.T[2:5]
invE = array([[ 0.00226126, -0.00166135],
[-0.00166135, 0.0224354 ]])
'''
def A_from_E(E=None, invE=None):
if E is None:
E = inv(invE)
# convert E to A
det_invE = np.linalg.det(invE)
det_E = np.linalg.det(E)
#
invX,invW,invYt = np.linalg.svd(invE)
E = invX.dot(diag(1/invW[::-1])).dot(invYt)
X,W,Yt = np.linalg.svd(E)
invE2 = X.dot(diag(1/W[::-1])).dot(Yt)
invX,invW,invYt = np.linalg.svd(invE2)
E2 = invX.dot(diag(1/invW[::-1])).dot(invYt)
X,W,Yt = np.linalg.svd(E2)
invE3 = X.dot(diag(1/W[::-1])).dot(Yt)
A = invX.dot(diag(1/sqrt(invW[::-1])))
U,S,Vt = np.linalg.svd(A)
E3 = Vt.dot(diag(S**2)).dot(Vt.T)
E3 = A.dot(A.T)
print(E3)
print(E2)
#invE4 = A.T.dot(A)
#invE4 = U.dot(diag(S)).dot(U.T)
#invE4 = U.dot(diag(S[::-1])).dot(Vt.T)
#invE4 = V.dot(diag(S[::-1])).dot(Vt.T)
#invE4 = U.T.dot(diag(S[::-1])).dot(Vt.T)
#print(invE)
#print(invE4)
(a, b, c, d) = A.flatten()
det_2 = sqrt(abs(a*d - b*c))
b2a2 = sqrt(b*b + a*a)
a11 = b2a2 / det_2
a12 = 0
a21 = (d*b + c*a)/(b2a2*det_2)
a22 = det_2/b2a2
Aup = np.array(((a11, a12), (a21, a22)))
print(Aup.dot(Aup.T))
U,S,Vt = np.linalg.svd(Aup)
E4 = U.dot(diag(det_2**2 * S**2)).dot(U.T)
print(E4)
print(E3)
print(E)
#invA = U.dot(diag(1/S)).dot(Vt)
#invS1 = 1/S[::-1]
#invU,invS,invVt = np.linalg.svd(invA)
#A2 = invV.T.dot(diag()).dot(invU.T)
print(E)
print(E2)
print(E3)
print('--')
print(invE)
print(invE2)
print(invE3)
print(invE4)
print('---')
print(A)
print(A2)
sigma = np.linalg.det(S)
print('E')
print(E)
print(invE)
print(A)
print(invA)
print(det_invE)
print(det_E)
U = X
S = np.diag(np.sqrt(W))
A = U.dot(S)
det_A = np.linalg.det(A)
print(det_A)
U,S,Vt = np.linalg.svd(invA)
#Breakpoint 1
#isE1 = A.dot(A.T)
#print('=====')
#print('isE1? svd(A.dot(A.T))=')
#print(isE1)
#print(E)
#print('----')
#print('\n'.join(map(str, list(svd(isE)))))
#print('----')
A = A.dot(np.eye(2)/(det*det))
det_ = np.linalg.det(A)
def recify_up(Au):
(a, b, c, d) = Au.flatten()
det_2 = sqrt(abs(a*d - b*c))
b2a2 = sqrt(b*b + a*a)
a11 = b2a2 / det_2
a12 = 0
a21 = (d*b + c*a)/(b2a2*det_2)
a22 = det_2/b2a2
Aup = np.array(((a11, a12), (a21, a22)))
A = Aup * sqrt(det_2)
print(isE)
print(E)
assert all(abs(isE - E) < 1E18)
return A, Au, det_2
A, Au, det = A_from_E(invE=invE)
invET
import helpers
import textwrap
helpers.rrr()
invE_list = expand_invET(invET)
# Decompose using singular value decomposition
invE = invE_list[0]
hstr = helpers.horiz_string
np.set_printoptions(precision=8)
import cv2
def print_2x2_svd(M, name=''):
#S, U, V = cv2.SVDecomp(M, flags=cv2.SVD_FULL_UV)
#S = S.flatten()
#print(hstr([U,S,V]))
U, S, V = np.linalg.svd(M)
#print(hstr([U,S,V]))
# Try and conform to opencv
Sm = diag(S)
print('---- SVD of '+name+' ----')
print(name+' =\n%s' % M)
print('= U * S * V =')
print(hstr([U, ' * ', Sm, ' * ', V]))
print('=')
print(U.dot(Sm).dot(V))
print('--')
# SVD is not rotation, scale rotation...
# That is only for a shear matrix
asin = np.arcsin; acos = np.arccos
thetaU11 = acos(U[0,0])
thetaU12 = -asin(U[0,1])
thetaU21 = asin(U[1,0])
thetaU22 = acos(U[1,1])
print([thetaU11,thetaU12,thetaU21,thetaU22])
thetaV11 = acos(V.T[0,0])
thetaV12 = -asin(V.T[0,1])
thetaV21 = asin(V.T[1,0])
thetaV22 = acos(V.T[1,1])
print([thetaV11,thetaV12,thetaV21,thetaV22])
thetaU = thetaU11*360/(2*np.pi)
thetaV = thetaV11*360/(2*np.pi)
print('theta_U = %r' % thetaU)
print('theta_V = %r' % thetaV)
print('---------------------\n')
return U, S, V, Sm
U, S, V, Sm = print_2x2_svd(invE, 'invE')
def print_extract_scale(S):
sc = (1.0 / (sqrt(sqrt(S[0] * S[1]))))
sigma = sc / DESC_FACTOR
print('---- Scale Extraction ---')
print('sc = 1.0 / (sqrt(sqrt(S[0] * S[1])))')
print('sc = 1.0 / (sqrt(sqrt(%f * %f])))' % (S[0], S[1]))
print('sc = %.3f' % sc)
print('sigma = %.3f / DESC_FACTOR' % sc)
print('sigma = %.3f / %.3f' % (sc, DESC_FACTOR))
print('sigma = %.3f' % (sigma))
print('---------------------\n')
return sc, sigma
sc, sigma = print_extract_scale(S)
def print_reconstruct_unit(U, S, V, sc, name=''):
flip_Sm = diag(S[::-1])
Sm_unit = diag(sqrt(S[::-1])*sc)
M_unit = (U.dot(Sm_unit).dot(V)).flatten().reshape(2,2)
print('---- Reconstruct Unit A ---')
print('sc = %.3f' % sc)
print(name+'_unit = U * sqrt(S[::-1])*sc * V')
print('=')
print(hstr([U, ' * ', flip_Sm, '*' , ('%.3f' % sc), ' * ', V]))
print('=')
print(hstr([U, ' * ', Sm_unit, ' * ', V]))
print('=')
print(M_unit)
print('--')
print('sqrt(det('+name+'_unit)) = %.3f' % sqrt(det(M_unit)))
print('---------------------\n')
return M_unit
A_unit = print_reconstruct_unit(U, S, V, sc, name='A')
def print_rectify_up(M, name=''):
print('---- Recify '+name+' up is up ---')
(a, b, c, d) = M.flatten()
print(name+' =\n%s' % M)
det_ = sqrt(abs(a*d - b*c))
b2a2 = sqrt(b*b + a*a)
a11 = b2a2 / det_
a12 = 0
a21 = (d*b + c*a)/(b2a2*det_)
a22 = det_/b2a2
M_up = np.array(((a11, a12), (a21, a22)))
print('det = sqrt(abs(a*d - b*c))')
print('det = sqrt(abs(%.3f*%.3f - %.3f*%.3f))' % (a, b, c, d))
print('det = %.3f' % det_)
print('--')
print('b2a2 = sqrt(b*b + a*a)')
print('b2a2 = sqrt(%.3f*%.3f + %.3f*%.3f)' % (b, b, a, a))
print('b2a2 = %.3f' % b2a2)
print('--')
print('A ='+textwrap.dedent('''
[[ b2a2/det, 0 ],
[(d*b + c*a)/(b2a2*det), det/b2a2]]'''))
print('= '+name+'_up = ')
print(M_up)
print('--')
print('det('+name+'_up) = %.3f' % det(M_up))
print('---------------------\n')
return M_up
A_up = print_rectify_up(A_unit, 'A')
A = A_up
def print_integrate_scale(M, sc, name):
U, S, V = svd(M)
#S, U, V = cv2.SVDecomp(M)
#S = S.flatten()
scaled_Sm = diag(S) / sc
print('---- Integrate scale into '+name+' ----')
print('sc = %.3f' % sc)
print(name+' =\n%s' % M)
print(name+'\' = U * (S/sc) * V =')
print(hstr([U, ' * ', scaled_Sm, ' * ', V]))
print('=')
scaled_M = U.dot(scaled_Sm).dot(V)
scaled_M = helpers.correct_zeros(scaled_M)
print(scaled_M)
print('---------------------\n')
return scaled_M
scaled_M = print_integrate_scale(A, sc, 'A')
print(hstr(('A.T.dot(A) =', str(A.T.dot(A)))))
print(hstr(('A.dot(A.T) =', str(A.dot(A.T)))))
print_2x2_svd(A, 'A')
print('A =\n%s' % (A,))
print('Sm =\n%s' % (Sm,))
print(Sm.dot(A))
print(A.dot(Sm))
print('---- Check things ---')
#AScaleUnit = A_up.dot(
if __name__ == '__main__':
#import cv2
#test_inria_feats()
rchip_fpath = 'tpl/extern_feat/lena.png'
rchip_fpath = os.path.realpath(rchip_fpath)
'''
Interest points:
-harris - harris detector
-hessian - hessian detector
-harmulti - multi-scale harris detector
-hesmulti - multi-scale hessian detector
-harhesmulti - multi-scale harris-hessian detector
-harlap - harris-laplace detector
-heslap - hessian-laplace detector
-dog - DoG detector
-mser - mser detector
-haraff - harris-affine detector
-hesaff - hessian-affine detector
-harhes - harris-hessian-laplace detector
-dense dx dy - dense sampling
Interest points parameters:
-density 100 - feature density per pixels (1 descriptor per 100pix)
-harThres - harris threshold [100]
-hesThres - hessian threshold [200]
-edgeLThres - lower canny threshold [5]
-edgeHThres - higher canny threshold [10]
Descriptors:
-sift - sift [D. Lowe]
-gloh - gloh [KM]
Descriptor paramenters:
-color - color sift [KM]
-dradius - patch radius for computing descriptors at scale 1
-fface ..../facemodel.dat - frontal face detector
Input/Output:
-i image.png - input image pgm, ppm, png, jpg, tif
-p1 image.pgm.points - input regions format 1
-p2 image.pgm.points - input regions format 2
-o1 out.desc - saves descriptors in out.desc output format 1
-o2 out.desc - saves descriptors in out.desc output format 2
-noangle - computes rotation variant descriptors (no rotation esimation)
-DP - draws features as points in out.desc.png
-DC - draws regions as circles in out.desc.png
-DE - draws regions as ellipses in out.desc.png
-c 255 - draws points in grayvalue [0,...,255]
-lparams params.par - load parameter settings from file
-sparams params.par - save parameter settings to file
-pca input.basis - projects the descriptors with pca basis
example: compute_descriptors.exe -sift -i image.png -p1 image.png.points -DR
compute_descriptors.exe -harlap -sift -i image.png -DC -pca harhessift.basis
compute_descriptors.exe -harhes -sift -color -i image.png -DC
compute_descriptors.exe -params har.params -i image.png
--------------------
file format 2:
#comments: x y cornerness scale=patch_size angle object_index point_type laplacian_value extremum_type mi11 mi12 mi21 mi
22 ...sift descriptor
m_nb_of_descriptors_in file
k_number_of_parameters
n_descriptor_dimension
p1_1 ... p1_k d1_1 d1_2 d1_3 ... d1_n
:
pm_1 ... pm_k dm_1 dm_2 dm_3 ... dm_n
--------------------
file format 1:
n_descriptor_dimension
m_nb_of_descriptors_in file
y1 a1 b1 c1 desc1_1 desc1_2 ......desc1_descriptor_dimension
:
ym am bm cm descm_1 descm_2 ......descm_descriptor_dimension
--------------------
where a(x-u)(x-u)+2b(x-u)(y-v)+c(y-v)(y-v)=1
file format 2:
vector_dimension
nb_of_descriptors
x y cornerness scale/patch_size angle object_index point_type laplacian_value extremum_type mi11 mi12 mi21 mi22 desc_1
...... desc_vector_dimension
--------------------
distance=(descA_1-descB_1)^2+...+(descA_vector_dimension-descB_vector_dimension)^2
input.basis format:
nb_of_dimensions
mean_v1
mean_v2
.
.
mean_vnb_of_dimensions
nb_of_dimensions*nb_of_pca_vectors
pca_vector_v1
pca_vector_v2
.
.
--------------------
'''
| |
"""Adapted from:
@longcw faster_rcnn_pytorch: https://github.com/longcw/faster_rcnn_pytorch
@rbgirshick py-faster-rcnn https://github.com/rbgirshick/py-faster-rcnn
Licensed under The MIT License [see LICENSE for details]
"""
from __future__ import print_function
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
from torch.autograd import Variable
from data import VOC_ROOT, VOCAnnotationTransform, VOCDetection, BaseTransform
from data import VOC_CLASSES as labelmap
import torch.utils.data as data
from ssd import build_ssd
import sys
import os
import time
import argparse
import numpy as np
import pickle
import cv2
if sys.version_info[0] == 2:
import xml.etree.cElementTree as ET
else:
import xml.etree.ElementTree as ET
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
parser = argparse.ArgumentParser(
description='Single Shot MultiBox Detector Evaluation')
parser.add_argument('--trained_model',
default='weights/ssd300_mAP_77.43_v2.pth', type=str,
help='Trained state_dict file path to open')
parser.add_argument('--save_folder', default='eval/', type=str,
help='File path to save results')
parser.add_argument('--confidence_threshold', default=0.01, type=float,
help='Detection confidence threshold')
parser.add_argument('--top_k', default=5, type=int,
help='Further restrict the number of predictions to parse')
parser.add_argument('--cuda', default=True, type=str2bool,
help='Use cuda to train model')
parser.add_argument('--voc_root', default=VOC_ROOT,
help='Location of VOC root directory')
parser.add_argument('--cleanup', default=True, type=str2bool,
help='Cleanup and remove results files following eval')
args = parser.parse_args()
if not os.path.exists(args.save_folder):
os.mkdir(args.save_folder)
if torch.cuda.is_available():
if args.cuda:
torch.set_default_tensor_type('torch.cuda.FloatTensor')
if not args.cuda:
print("WARNING: It looks like you have a CUDA device, but aren't using \
CUDA. Run with --cuda for optimal eval speed.")
torch.set_default_tensor_type('torch.FloatTensor')
else:
torch.set_default_tensor_type('torch.FloatTensor')
annopath = os.path.join(args.voc_root, 'VOC2007', 'Annotations', '%s.xml')
imgpath = os.path.join(args.voc_root, 'VOC2007', 'JPEGImages', '%s.jpg')
imgsetpath = os.path.join(args.voc_root, 'VOC2007', 'ImageSets',
'Main', '{:s}.txt')
YEAR = '2007'
devkit_path = args.voc_root + 'VOC' + YEAR
dataset_mean = (104, 117, 123)
set_type = 'test'
class Timer(object):
"""A simple timer."""
def __init__(self):
self.total_time = 0.
self.calls = 0
self.start_time = 0.
self.diff = 0.
self.average_time = 0.
def tic(self):
# using time.time instead of time.clock because time time.clock
# does not normalize for multithreading
self.start_time = time.time()
def toc(self, average=True):
self.diff = time.time() - self.start_time
self.total_time += self.diff
self.calls += 1
self.average_time = self.total_time / self.calls
if average:
return self.average_time
else:
return self.diff
def parse_rec(filename):
""" Parse a PASCAL VOC xml file """
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text) - 1,
int(bbox.find('ymin').text) - 1,
int(bbox.find('xmax').text) - 1,
int(bbox.find('ymax').text) - 1]
objects.append(obj_struct)
return objects
def get_output_dir(name, phase):
"""Return the directory where experimental artifacts are placed.
If the directory does not exist, it is created.
A canonical path is built using the name from an imdb and a network
(if not None).
"""
filedir = os.path.join(name, phase)
if not os.path.exists(filedir):
os.makedirs(filedir)
return filedir
def get_voc_results_file_template(image_set, cls):
# VOCdevkit/VOC2007/results/det_test_aeroplane.txt
filename = 'det_' + image_set + '_%s.txt' % (cls)
filedir = os.path.join(devkit_path, 'results')
if not os.path.exists(filedir):
os.makedirs(filedir)
path = os.path.join(filedir, filename)
return path
def write_voc_results_file(all_boxes, dataset):
for cls_ind, cls in enumerate(labelmap):
print('Writing {:s} VOC results file'.format(cls))
filename = get_voc_results_file_template(set_type, cls)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(dataset.ids):
dets = all_boxes[cls_ind+1][im_ind]
if dets == []:
continue
# the VOCdevkit expects 1-based indices
for k in range(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index[1], dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1,
dets[k, 2] + 1, dets[k, 3] + 1))
def do_python_eval(output_dir='output', use_07=True):
cachedir = os.path.join(devkit_path, 'annotations_cache')
aps = []
# The PASCAL VOC metric changed in 2010
use_07_metric = use_07
print('VOC07 metric? ' + ('Yes' if use_07_metric else 'No'))
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, cls in enumerate(labelmap):
filename = get_voc_results_file_template(set_type, cls)
rec, prec, ap = voc_eval(
filename, annopath, imgsetpath.format(set_type), cls, cachedir,
ovthresh=0.5, use_07_metric=use_07_metric)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
with open(os.path.join(output_dir, cls + '_pr.pkl'), 'wb') as f:
pickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('--------------------------------------------------------------')
def voc_ap(rec, prec, use_07_metric=True):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:True).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=True):
"""rec, prec, ap = voc_eval(detpath,
annopath,
imagesetfile,
classname,
[ovthresh],
[use_07_metric])
Top level function that does the PASCAL VOC evaluation.
detpath: Path to detections
detpath.format(classname) should produce the detection results file.
annopath: Path to annotations
annopath.format(imagename) should be the xml annotations file.
imagesetfile: Text file containing the list of images, one image per line.
classname: Category name (duh)
cachedir: Directory for caching the annotations
[ovthresh]: Overlap threshold (default = 0.5)
[use_07_metric]: Whether to use VOC07's 11 point AP computation
(default True)
"""
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, 'annots.pkl')
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if not os.path.isfile(cachefile):
# load annots
recs = {}
for i, imagename in enumerate(imagenames):
recs[imagename] = parse_rec(annopath % (imagename))
if i % 100 == 0:
print('Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames)))
# save
print('Saving cached annotations to {:s}'.format(cachefile))
with open(cachefile, 'wb') as f:
pickle.dump(recs, f)
else:
# load
with open(cachefile, 'rb') as f:
recs = pickle.load(f)
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
if any(lines) == 1:
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin, 0.)
ih = np.maximum(iymax - iymin, 0.)
inters = iw * ih
uni = ((bb[2] - bb[0]) * (bb[3] - bb[1]) +
(BBGT[:, 2] - BBGT[:, 0]) *
(BBGT[:, 3] - BBGT[:, 1]) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
else:
rec = -1.
prec = -1.
ap = -1.
return rec, prec, ap
def test_net(save_folder, net, cuda, dataset, transform, top_k,
im_size=300, thresh=0.05):
num_images = len(dataset)
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(len(labelmap)+1)]
# timers
_t = {'im_detect': Timer(), 'misc': Timer()}
output_dir = get_output_dir('ssd300_120000', set_type)
det_file = os.path.join(output_dir, 'detections.pkl')
for i in range(num_images):
im, gt, h, w = dataset.pull_item(i)
x = Variable(im.unsqueeze(0))
if args.cuda:
x = x.cuda()
_t['im_detect'].tic()
detections = net(x).data
detect_time = _t['im_detect'].toc(average=False)
# skip j = 0, because it's the background class
for j in range(1, detections.size(1)):
dets = detections[0, j, :]
mask = dets[:, 0].gt(0.).expand(5, dets.size(0)).t()
dets = torch.masked_select(dets, mask).view(-1, 5)
if dets.size(0) == 0:
continue
boxes = dets[:, 1:]
boxes[:, 0] *= w
boxes[:, 2] *= w
boxes[:, 1] *= h
boxes[:, 3] *= h
scores = dets[:, 0].cpu().numpy()
cls_dets = np.hstack((boxes.cpu().numpy(),
scores[:, np.newaxis])).astype(np.float32,
copy=False)
all_boxes[j][i] = cls_dets
print('im_detect: {:d}/{:d} {:.3f}s'.format(i + 1,
num_images, detect_time))
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
print('Evaluating detections')
evaluate_detections(all_boxes, output_dir, dataset)
def evaluate_detections(box_list, output_dir, dataset):
write_voc_results_file(box_list, dataset)
do_python_eval(output_dir)
if __name__ == '__main__':
# load net
num_classes = len(labelmap) + 1 # +1 for background
net = build_ssd('test', 300, num_classes) # initialize SSD
net.load_state_dict(torch.load(args.trained_model))
net.eval()
print('Finished loading model!')
# load data
dataset = VOCDetection(args.voc_root, [('2007', set_type)],
BaseTransform(300, dataset_mean),
VOCAnnotationTransform())
if args.cuda:
net = net.cuda()
cudnn.benchmark = True
# evaluation
test_net(args.save_folder, net, args.cuda, dataset,
BaseTransform(net.size, dataset_mean), args.top_k, 300,
thresh=args.confidence_threshold)
| |
# -*- coding: utf-8 -*-
#
# pyspark documentation build configuration file, created by
# sphinx-quickstart on Thu Aug 28 15:17:47 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shutil
import errno
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# Remove previously generated rst files. Ignore errors just in case it stops
# generating whole docs.
shutil.rmtree(
"%s/reference/api" % os.path.dirname(os.path.abspath(__file__)), ignore_errors=True)
try:
os.mkdir("%s/reference/api" % os.path.dirname(os.path.abspath(__file__)))
except OSError as e:
if e.errno != errno.EEXIST:
raise
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.mathjax',
'sphinx.ext.autosummary',
'nbsphinx', # Converts Jupyter Notebook to reStructuredText files for Sphinx.
# For ipython directive in reStructuredText files. It is generated by the notebook.
'IPython.sphinxext.ipython_console_highlighting',
'numpydoc', # handle NumPy documentation formatted docstrings.
]
numpydoc_show_class_members = False
# Links used globally in the RST files.
# These are defined here to allow link substitutions dynamically.
rst_epilog = """
.. |binder| replace:: Live Notebook
.. _binder: https://mybinder.org/v2/gh/apache/spark/{0}?filepath=python%2Fdocs%2Fsource%2Fgetting_started%2Fquickstart.ipynb
.. |examples| replace:: Examples
.. _examples: https://github.com/apache/spark/tree/{0}/examples/src/main/python
.. |downloading| replace:: Downloading
.. _downloading: https://spark.apache.org/docs/{1}/building-spark.html
.. |building_spark| replace:: Building Spark
.. _building_spark: https://spark.apache.org/docs/{1}/#downloading
""".format(os.environ.get("RELEASE_TAG", "master"), os.environ.get('RELEASE_VERSION', "latest"))
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'PySpark'
copyright = ''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'master'
# The full version, including alpha/beta/rc tags.
release = os.environ.get('RELEASE_VERSION', version)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '.DS_Store', '**.ipynb_checkpoints']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for autodoc --------------------------------------------------
# Look at the first line of the docstring for function and method signatures.
autodoc_docstring_signature = True
autosummary_generate = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'pydata_sphinx_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "../../../docs/img/spark-logo-reverse.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = [
'css/pyspark.css',
]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pysparkdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pyspark.tex', 'pyspark Documentation',
'Author', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyspark', 'pyspark Documentation',
['Author'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyspark', 'pyspark Documentation',
'Author', 'pyspark', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'pyspark'
epub_author = 'Author'
epub_publisher = 'Author'
epub_copyright = '2014, Author'
# The basename for the epub file. It defaults to the project name.
#epub_basename = 'pyspark'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
def setup(app):
# The app.add_javascript() is deprecated.
getattr(app, "add_js_file", getattr(app, "add_javascript"))('copybutton.js')
# Skip sample endpoint link (not expected to resolve)
linkcheck_ignore = [r'https://kinesis.us-east-1.amazonaws.com']
| |
# -*- coding: utf-8 -*-
# Django settings for group project.
import os.path
import posixpath
import pinax
PINAX_ROOT = os.path.abspath(os.path.dirname(pinax.__file__))
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# tells Pinax to use the default theme
PINAX_THEME = 'default'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# tells Pinax to serve media through the staticfiles app.
SERVE_MEDIA = DEBUG
INTERNAL_IPS = (
'127.0.0.1',
)
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # Add "postgresql_psycopg2", "postgresql", "mysql", "sqlite3" or "oracle".
"NAME": "dev.db", # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'US/Eastern'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'site_media', 'media')
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = '/site_media/media/'
# Absolute path to the directory that holds static files like app media.
# Example: "/home/media/media.lawrence.com/apps/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'site_media', 'static')
# URL that handles the static files like app media.
# Example: "http://media.lawrence.com"
STATIC_URL = '/site_media/static/'
# Additional directories which hold static files
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'media'),
os.path.join(PINAX_ROOT, 'media', PINAX_THEME),
)
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = posixpath.join(STATIC_URL, "admin/")
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django_openid.consumer.SessionConsumer',
'django.contrib.messages.middleware.MessageMiddleware',
'account.middleware.LocaleMiddleware',
'django.middleware.doc.XViewMiddleware',
'pagination.middleware.PaginationMiddleware',
'django_sorting.middleware.SortingMiddleware',
'pinax.middleware.security.HideSensistiveFieldsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
ROOT_URLCONF = 'sample_group_project.urls'
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, "templates"),
os.path.join(PINAX_ROOT, "templates", PINAX_THEME),
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"pinax.core.context_processors.pinax_settings",
"notification.context_processors.notification",
"announcements.context_processors.site_wide_announcements",
"account.context_processors.openid",
"account.context_processors.account",
)
INSTALLED_APPS = (
# included
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.humanize',
'pinax.templatetags',
# external
'notification', # must be first
'django_openid',
'emailconfirmation',
'mailer',
'announcements',
'pagination',
'groups',
'timezones',
'ajax_validation',
'tagging',
'uni_form',
'wiki',
'avatar',
'threadedcomments',
'tribes',
'projects',
'gravatar',
'django_sorting',
'photologue',
'attachments',
'django_markup',
'django_filters',
'staticfiles',
'debug_toolbar',
# internal (for now)
'basic_profiles',
'account',
'signup_codes',
'tag_app',
'tagging_utils',
'threadedcomments_extras',
'topics',
'tasks',
'photos',
'basic_groups',
'about',
)
MESSAGE_STORAGE = "django.contrib.messages.storage.session.SessionStorage"
ABSOLUTE_URL_OVERRIDES = {
"auth.user": lambda o: "/profiles/profile/%s/" % o.username,
}
MARKUP_FILTER_FALLBACK = 'none'
MARKUP_CHOICES = (
('restructuredtext', u'reStructuredText'),
('textile', u'Textile'),
('markdown', u'Markdown'),
('creole', u'Creole'),
)
WIKI_MARKUP_CHOICES = MARKUP_CHOICES
AUTH_PROFILE_MODULE = 'basic_profiles.Profile'
NOTIFICATION_LANGUAGE_MODULE = 'account.Account'
ACCOUNT_OPEN_SIGNUP = True
ACCOUNT_REQUIRED_EMAIL = False
ACCOUNT_EMAIL_VERIFICATION = False
ACCOUNT_EMAIL_AUTHENTICATION = False
ACCOUNT_UNIQUE_EMAIL = EMAIL_CONFIRMATION_UNIQUE_EMAIL = False
if ACCOUNT_EMAIL_AUTHENTICATION:
AUTHENTICATION_BACKENDS = (
"account.auth_backends.EmailModelBackend",
)
else:
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
)
EMAIL_CONFIRMATION_DAYS = 2
EMAIL_DEBUG = DEBUG
CONTACT_EMAIL = "feedback@example.com"
SITE_NAME = "Pinax"
LOGIN_URL = "/account/login/"
LOGIN_REDIRECT_URLNAME = "what_next"
DEBUG_TOOLBAR_CONFIG = {
"INTERCEPT_REDIRECTS": False,
}
# local_settings.py can be used to override environment-specific settings
# like database and email that differ between development and production.
try:
from local_settings import *
except ImportError:
pass
| |
# wwwhisper - web access control.
# Copyright (C) 2012-2015 Jan Wrobel <jan@mixedbit.org>
"""Views that allow to manage access control list.
Expose REST interface for adding/removing locations and users and for
granting/revoking access to locations.
"""
from django.forms import ValidationError
from functools import wraps
from wwwhisper_auth import http
from wwwhisper_auth.models import LimitExceeded
import logging
logger = logging.getLogger(__name__)
def _full_url(request):
return request.site_url + request.path
def set_collection(decorated_function):
@wraps(decorated_function)
def wrapper(self, request, **kwargs):
self.collection = getattr(request.site, self.collection_name)
return decorated_function(self, request, **kwargs)
return wrapper
class CollectionView(http.RestView):
"""Generic view over a collection of resources.
Allows to get json representation of all resources in the
collection and to add new resources to the collection.
Attributes:
collection_name: Name of the collection that view represents.
"""
collection_name = None
@set_collection
def post(self, request, **kwargs):
"""Ads a new resource to the collection.
Args:
**kwargs: holds collection dependent arguments that are
used to create the resource.
Returns json representation of the added resource."""
try:
created_item = self.collection.create_item(**kwargs)
except ValidationError as ex:
# ex.messages is a list of errors.
return http.HttpResponseBadRequest(', '.join(ex.messages))
except LimitExceeded as ex:
return http.HttpResponseLimitExceeded(str(ex))
attributes_dict = created_item.attributes_dict(request.site_url)
response = http.HttpResponseCreated(attributes_dict)
response['Location'] = attributes_dict['self']
response['Content-Location'] = attributes_dict['self']
return response
@set_collection
def get(self, request):
"""Returns json representation of all resources in the collection."""
items_list = [item.attributes_dict(request.site_url)
for item in self.collection.all()]
return http.HttpResponseOKJson({
'self' : _full_url(request),
self.collection_name: items_list
})
class ItemView(http.RestView):
"""Generic view over a single resource stored in a collection.
Allows to get json representation of the resource and to delete
the resource.
Attributes:
collection_name: Name of the collection that view uses to retrieve
the resource.
"""
collection_name = None
@set_collection
def get(self, request, uuid):
"""Returns json representation of a resource with a given uuid."""
item = self.collection.find_item(uuid)
if item is None:
return http.HttpResponseNotFound(
'%s not found' % self.collection.item_name.capitalize())
return http.HttpResponseOKJson(item.attributes_dict(request.site_url))
@set_collection
def delete(self, request, uuid):
"""Deletes a resource with a given uuid."""
deleted = self.collection.delete_item(uuid)
if not deleted:
return http.HttpResponseNotFound(
'%s not found' % self.collection.item_name.capitalize())
return http.HttpResponseNoContent()
class OpenAccessView(http.RestView):
"""Manages resources that define if a location is open.
An open location can be accessed without authentication.
"""
@staticmethod
def _attributes_dict(request):
"""Attributes representing a resource to which a request is related."""
return {
'self' : _full_url(request)
}
def put(self, request, location_uuid):
"""Creates a resource that enables open access to a given location."""
location = request.site.locations.find_item(location_uuid)
if location is None:
return http.HttpResponseNotFound('Location not found.')
if location.open_access_granted():
return http.HttpResponseOKJson(self._attributes_dict(request))
location.grant_open_access()
response = http.HttpResponseCreated(self._attributes_dict(request))
response['Location'] = _full_url(request)
return response
def get(self, request, location_uuid):
"""Check if a resource that enables open access to a location exists."""
location = request.site.locations.find_item(location_uuid)
if location is None:
return http.HttpResponseNotFound('Location not found.')
if not location.open_access_granted():
return http.HttpResponseNotFound(
'Open access to location disallowed.')
return http.HttpResponseOKJson(self._attributes_dict(request))
def delete(self, request, location_uuid):
"""Deletes a resource.
Disables open access to a given location.
"""
location = request.site.locations.find_item(location_uuid)
if location is None:
return http.HttpResponseNotFound('Location not found.')
if not location.open_access_granted():
return http.HttpResponseNotFound(
'Open access to location already disallowed.')
location.revoke_open_access()
return http.HttpResponseNoContent()
class AllowedUsersView(http.RestView):
"""Manages resources that define which users can access locations."""
def put(self, request, location_uuid, user_uuid):
"""Creates a resource.
Grants access to a given location by a given user.
"""
location = request.site.locations.find_item(location_uuid)
if not location:
return http.HttpResponseNotFound('Location not found.')
try:
(permission, created) = location.grant_access(user_uuid)
attributes_dict = permission.attributes_dict(request.site_url)
if created:
response = http.HttpResponseCreated(attributes_dict)
response['Location'] = attributes_dict['self']
else:
response = http.HttpResponseOKJson(attributes_dict)
return response
except LookupError as ex:
return http.HttpResponseNotFound(str(ex))
def get(self, request, location_uuid, user_uuid):
"""Checks if a resource that grants access exists.
This is not equivalent of checking if the user can access the
location. If the location is open, but the user is not
explicitly granted access, not found failure is returned.
"""
location = request.site.locations.find_item(location_uuid)
if location is None:
return http.HttpResponseNotFound('Location not found.')
try:
permission = location.get_permission(user_uuid)
return http.HttpResponseOKJson(
permission.attributes_dict(request.site_url))
except LookupError as ex:
return http.HttpResponseNotFound(str(ex))
def delete(self, request, location_uuid, user_uuid):
"""Deletes a resource.
Revokes access to a given location by a given user. If the
location is open, the user will still be able to access the
location after this call succeeds.
"""
location = request.site.locations.find_item(location_uuid)
if not location:
return http.HttpResponseNotFound('Location not found.')
try:
location.revoke_access(user_uuid)
return http.HttpResponseNoContent()
except LookupError as ex:
return http.HttpResponseNotFound(str(ex))
class SkinView(http.RestView):
"""Configures the login page."""
def put(self, request, title, header, message, branding):
try:
request.site.update_skin(title=title, header=header,
message=message, branding=branding)
except ValidationError as ex:
return http.HttpResponseBadRequest(
'Failed to update login page: ' + ', '.join(ex.messages))
return http.HttpResponseOKJson(request.site.skin())
def get(self, request):
return http.HttpResponseOKJson(request.site.skin())
| |
from app import db, bcrypt
import datetime
from flask import redirect, url_for
from flask_admin import expose, AdminIndexView
from flask_admin.contrib.sqla import ModelView
from flask_admin.form import SecureForm
from flask_login import current_user
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy.sql import select, func
class Event(db.Model):
__tablename__ = "event"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), index=True, unique=True, nullable=False)
details = db.Column(db.Text())
created = db.Column(db.DateTime, nullable=False)
confidence = db.Column(db.Integer, nullable=False)
status_id = db.Column(db.Integer, db.ForeignKey('status.id'), nullable=False)
source_id = db.Column(db.Integer, db.ForeignKey('source.id'), nullable=False)
tlp_id = db.Column(db.Integer, db.ForeignKey('tlp.id'), nullable=False)
impact_id = db.Column(db.Integer, db.ForeignKey('level.id'), nullable=False)
likelihood_id = db.Column(db.Integer, db.ForeignKey('likelihood.id'), nullable=False)
source = db.relationship('Source', foreign_keys=source_id)
tlp = db.relationship('Tlp', foreign_keys=tlp_id)
impact = db.relationship('Level', foreign_keys=impact_id)
likelihood = db.relationship('Likelihood', foreign_keys=likelihood_id)
status = db.relationship('Status', foreign_keys=status_id)
indicators = db.relationship('Indicator', backref='event', lazy='dynamic')
rel_events = db.relationship('Links', backref='event', lazy='dynamic')
notes = db.relationship('Note', backref='event', lazy='dynamic')
@hybrid_property
def indicator_count(self):
return self.indicators.count()
@indicator_count.expression
def indicator_count(cls):
return (select([func.count(Indicator.id)]).
where(Indicator.event_id == cls.id).
label("indicator_count")
)
def __init__(self, name, details, source, tlp, impact, likelihood, confidence=50):
self.name = name
self.details = details
self.confidence = confidence
self.source = source
self.tlp = tlp
self.impact = impact
self.likelihood = likelihood
self.status = Status.query.get(1)
self.created = datetime.datetime.utcnow()
def as_dict(self):
return '%s' % {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return '<Event %r>' % (self.name)
class Indicator(db.Model):
__tablename__ = "indicator"
id = db.Column(db.Integer, primary_key=True)
ioc = db.Column(db.String(64), index=True, nullable=False)
comment = db.Column(db.String(255))
enrich = db.Column(db.String(255))
enrich_full = db.Column(db.Text())
first_seen = db.Column(db.DateTime, nullable=False)
last_seen = db.Column(db.DateTime, index=True, nullable=False)
pending = db.Column(db.Boolean, nullable=False)
event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False)
control_id = db.Column(db.Integer, db.ForeignKey('control.id'), nullable=False)
itype_id = db.Column(db.Integer, db.ForeignKey('itype.id'), nullable=False)
control = db.relationship('Control', foreign_keys=control_id)
itype = db.relationship('Itype', foreign_keys=itype_id)
rel_indicators = db.relationship('Links', backref='indicator', lazy='dynamic')
mitigations = db.relationship('Mitigation', backref='indicator', lazy='dynamic')
__table_args__ = (db.UniqueConstraint("ioc", "event_id", "itype_id", "control_id"), )
@hybrid_property
def rel_list(self):
return ','.join([str(i.rel_event_id) for i in self.rel_indicators])
def __init__(self, event_id, ioc, comment, control, itype, pending=False, enrich=None, enrich_full=None):
self.event_id = event_id
self.ioc = ioc
self.comment = comment
self.control = control
self.itype = itype
self.pending = pending
self.enrich = enrich
self.enrich_full = enrich_full
self.first_seen = datetime.datetime.utcnow()
self.last_seen = datetime.datetime.utcnow()
def as_dict(self):
return '%s' % {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return '<Indicator %r>' % (self.ioc)
class Links(db.Model):
__tablename__ = "links"
id = db.Column(db.Integer, primary_key=True)
event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False)
indicator_id = db.Column(db.Integer, db.ForeignKey('indicator.id'), nullable=False)
rel_event_id = db.Column(db.Integer, nullable=False)
rel_indicator_id = db.Column(db.Integer, nullable=False)
def __init__(self, event_id, indicator_id, rel_event_id, rel_indicator_id):
self.event_id = event_id
self.indicator_id = indicator_id
self.rel_event_id = rel_event_id
self.rel_indicator_id = rel_indicator_id
def __repr__(self):
return '<Links %r:%r -> %r:%r>' % (self.event_id, self.indicator_id, self.rel_event_id, self.rel_indicator_id)
class Note(db.Model):
__tablename__ = "note"
id = db.Column(db.Integer, primary_key=True)
created = db.Column(db.DateTime, nullable=False)
details = db.Column(db.Text())
event_id = db.Column(db.Integer, db.ForeignKey('event.id'), nullable=False)
def __init__(self, event_id, details):
self.details= details
self.event_id = event_id
self.created = datetime.datetime.utcnow()
def __repr__(self):
return '<Note %r>' % (self.details)
class Mitigation(db.Model):
__tablename__ = "mitigation"
id = db.Column(db.Integer, primary_key=True)
description = db.Column(db.String(255), nullable=False)
created = db.Column(db.DateTime, nullable=False)
ttl = db.Column(db.Integer, nullable=False)
destination_id = db.Column(db.Integer, db.ForeignKey('destination.id'), nullable=False)
indicator_id = db.Column(db.Integer, db.ForeignKey('indicator.id'), nullable=False)
pending = db.Column(db.Boolean, nullable=False)
active = db.Column(db.Boolean, nullable=False)
destination = db.relationship('Destination', foreign_keys=destination_id)
def __init__(self, destination_id, ttl, description):
self.created = datetime.datetime.utcnow()
self.pending = True
self.active = True
self.destination_id = destination_id
self.ttl = ttl
self.description = description
def __repr__(self):
return '<Mitigation %r>' % (self.id)
class Destination(db.Model):
__tablename__ = "destination"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), nullable=False)
description = db.Column(db.String(255))
formatter = db.Column(db.String(64), nullable=False)
__table_args__ = (db.UniqueConstraint("name"),)
def __repr__(self):
return '<Destination %r>' % (self.name)
class Tlp(db.Model):
__tablename__ = "tlp"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
def __repr__(self):
return '<Tlp %r>' % (self.name)
class Level (db.Model):
__tablename__ = "level"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
def __repr__(self):
return '<Level %r>' % (self.name)
class Likelihood (db.Model):
__tablename__ = "likelihood"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
def __repr__(self):
return '<Level %r>' % (self.name)
class Source(db.Model):
__tablename__ = "source"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
def __repr__(self):
return '<Source %r>' % (self.name)
class Itype(db.Model):
__tablename__ = "itype"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
regex = db.Column(db.String(255))
def __init__(self, name, regex):
self.name = name
self.regex = regex
def __repr__(self):
return '<Itype %r>' % (self.name)
class Control(db.Model):
__tablename__ = "control"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
def __repr__(self):
return '<Control %r>' % (self.name)
class Status(db.Model):
__tablename__ = "status"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
def __repr__(self):
return '<Status %r>' % (self.name)
class Users(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String, unique=True, nullable=False)
_password = db.Column(db.Binary(60), nullable=False)
authenticated = db.Column(db.Boolean, default=False)
role = db.Column(db.String, default='user')
def __init__(self, email, plaintext_password, role):
self.email = email
self.password = plaintext_password
self.authenticated = False
self.role = role
@hybrid_property
def password(self):
return self._password
@password.setter
def set_password(self, plaintext_password):
self._password = bcrypt.generate_password_hash(plaintext_password)
@hybrid_method
def is_correct_password(self, plaintext_password):
return bcrypt.check_password_hash(self.password, plaintext_password)
@property
def is_authenticated(self):
"""Return True if the user is authenticated."""
return self.authenticated
@property
def is_active(self):
"""Always True, as all users are active."""
return True
@property
def is_anonymous(self):
"""Always False, as anonymous users aren't supported."""
return False
def get_id(self):
"""Return the email address to satisfy Flask-Login's requirements."""
"""Requires use of Python 3"""
return str(self.id)
def __repr__(self):
return '<User {0}>'.format(self.name)
class HomeView(AdminIndexView):
"""Customised home view for flask-admin administration panel"""
form_base_class = SecureForm # csrf protection
@expose('/')
def index(self):
"""exposes custom homepage to the user rather than the default one"""
return self.render("admin/index.html")
def inaccessible_callback(self, name, **kwargs):
"""redirects to login page if user doesn't have access"""
return redirect(url_for('index'))
def is_accessible(self):
"""defines admin panel access policy"""
return current_user.is_authenticated and current_user.role == 'admin'
class UserView(ModelView):
"""Customised user view for flask-admin administration panel"""
form_base_class = SecureForm # csrf protection
column_list = ['email', 'role']
column_searchable_list = ['email', 'role']
column_filters = ['email']
column_editable_list = ['email', 'role']
page_size = 50
can_create = False
can_edit = False
form_choices = {
'role': [
('admin', 'admin'),
('user', 'user')
]
}
def inaccessible_callback(self, name, **kwargs):
"""redirects to login page if user doesn't have access"""
return redirect(url_for('index'))
def is_accessible(self):
"""defines admin panel access policy"""
return current_user.is_authenticated and current_user.role == 'admin'
| |
#!/usr/bin/env python
# 12.01.2007, c
import os.path as op
import shutil
from optparse import OptionParser
import sfepy
from sfepy.base.base import *
from sfepy.base.conf import ProblemConf, get_standard_keywords
from sfepy.fem import ProblemDefinition
from sfepy.fem.evaluate import assemble_by_blocks
from sfepy.homogenization.phono import transform_plot_data, plot_logs, \
plot_gaps, detect_band_gaps, compute_cat, compute_polarization_angles
from sfepy.homogenization.engine import HomogenizationEngine
from sfepy.applications import SimpleApp
from sfepy.solvers import Solver, eig
from sfepy.base.plotutils import plt
def make_save_hook( base_name, post_process_hook = None, file_per_var = None ):
def save_phono_correctors( state, problem, ir, ic ):
problem.save_state( (base_name % (ir, ic)) + '.vtk', state,
post_process_hook = post_process_hook,
file_per_var = file_per_var )
return save_phono_correctors
def try_set_defaults( obj, attr, defaults ):
try:
values = getattr( obj, attr )
set_defaults( values, defaults )
except:
values = defaults
return values
def report_iw_cat( iw_dir, christoffel ):
output( 'incident wave direction:' )
output( iw_dir )
output( 'Christoffel acoustic tensor:' )
output( christoffel )
class AcousticBandGapsApp( SimpleApp ):
def process_options( options ):
"""Application options setup. Sets default values for missing
non-compulsory options."""
get = options.get_default_attr
clear_cache = get( 'clear_cache', {} )
eigensolver = get( 'eigensolver', 'eig.sgscipy' )
eig_problem = get( 'eig_problem', 'simple' )
schur = get( 'schur', None )
elasticity_contrast = get( 'elasticity_contrast', 1.0 )
scale_epsilon = get( 'scale_epsilon', 1.0 )
incident_wave_dir = get( 'incident_wave_dir', None )
dispersion = get( 'dispersion', 'simple' )
dispersion_conf = get( 'dispersion_conf', None )
homogeneous = get( 'homogeneous', False )
save = get( 'save_eig_vectors', (0, 0) )
eig_range = get( 'eig_range', None )
freq_margins = get( 'freq_margins', (5, 5) )
# Given in per cent.
freq_margins = 0.01 * nm.array( freq_margins, dtype = nm.float64 )
fixed_eig_range = get( 'fixed_eig_range', None )
# Given in per cent.
freq_step = 0.01 * get( 'freq_step', 5 )
feps = get( 'feps', 1e-8 )
zeps = get( 'zeps', 1e-8 )
teps = get( 'teps', 1e-4 )
teps_rel = get( 'teps_rel', True )
eig_vector_transform = get( 'eig_vector_transform', None )
plot_transform = get( 'plot_transform', None )
plot_transform_wave = get( 'plot_transform_wave', None )
plot_transform_angle = get( 'plot_transform_angle', None )
plot_options = get( 'plot_options', {'show' : True,'legend' : False,} )
fig_name = get( 'fig_name', None )
fig_name_wave = get( 'fig_name_wave', None )
fig_name_angle = get( 'fig_name_angle', None )
aux = {
'resonance' : 'eigenfrequencies',
'masked' : 'masked eigenfrequencies',
'eig_min' : 'min eig($M^*$)',
'eig_mid' : 'mid eig($M^*$)',
'eig_max' : 'max eig($M^*$)',
'y_axis' : 'eigenvalues of mass matrix $M^*$',
}
plot_labels = try_set_defaults( options, 'plot_labels', aux )
aux = {
'resonance' : 'eigenfrequencies',
'masked' : 'masked eigenfrequencies',
'eig_min' : r'$\kappa$(min)',
'eig_mid' : r'$\kappa$(mid)',
'eig_max' : r'$\kappa$(max)',
'y_axis' : 'polarization angles',
}
plot_labels_angle = try_set_defaults( options, 'plot_labels_angle', aux )
aux = {
'resonance' : 'eigenfrequencies',
'masked' : 'masked eigenfrequencies',
'eig_min' : r'wave number (min)',
'eig_mid' : r'wave number (mid)',
'eig_max' : r'wave number (max)',
'y_axis' : 'wave numbers',
}
plot_labels_wave = try_set_defaults( options, 'plot_labels_wave', aux )
plot_rsc = {
'resonance' : {'linewidth' : 0.5, 'color' : 'r', 'linestyle' : '-' },
'masked' : {'linewidth' : 0.5, 'color' : 'r', 'linestyle' : ':' },
'x_axis' : {'linewidth' : 0.5, 'color' : 'k', 'linestyle' : '--' },
'eig_min' : {'linewidth' : 0.5, 'color' : 'b', 'linestyle' : '--' },
'eig_mid' : {'linewidth' : 0.5, 'color' : 'b', 'linestyle' : '-.' },
'eig_max' : {'linewidth' : 0.5, 'color' : 'b', 'linestyle' : '-' },
'strong_gap' : {'linewidth' : 0, 'facecolor' : (1, 1, 0.5) },
'weak_gap' : {'linewidth' : 0, 'facecolor' : (1, 1, 1) },
'propagation' : {'linewidth' : 0, 'facecolor' : (0.5, 1, 0.5) },
'params' : {'axes.labelsize': 'large',
'text.fontsize': 'large',
'legend.fontsize': 'large',
'xtick.labelsize': 'large',
'ytick.labelsize': 'large',
'text.usetex': False},
}
plot_rsc = try_set_defaults( options, 'plot_rsc', plot_rsc )
eigenmomentum = get( 'eigenmomentum', None,
'missing "eigenmomentum" in options!' )
region_to_material = get( 'region_to_material', None,
'missing "region_to_material" in options!' )
tensor_names = get( 'tensor_names', None,
'missing "tensor_names" in options!' )
volume = get( 'volume', None, 'missing "volume" in options!' )
if eig_problem == 'simple_liquid':
liquid_region = get('liquid_region', None,
'missing "liquid_region" in options!')
else:
liquid_region = None
return Struct( **locals() )
process_options = staticmethod( process_options )
def process_options_pv( options ):
"""Application options setup for phase velocity computation. Sets
default values for missing non-compulsory options."""
get = options.get_default_attr
clear_cache = get( 'clear_cache', {} )
eigensolver = get( 'eigensolver', 'eig.sgscipy' )
incident_wave_dir = get( 'incident_wave_dir', None )
dispersion = get( 'dispersion', 'simple' )
dispersion_conf = get( 'dispersion_conf', None )
homogeneous = get( 'homogeneous', False )
fig_suffix = get( 'fig_suffix', '.pdf' )
region_to_material = get( 'region_to_material', None,
'missing "region_to_material" in options!' )
tensor_names = get( 'tensor_names', None,
'missing "tensor_names" in options!' )
volume = get( 'volume', None, 'missing "volume" in options!' )
return Struct( **locals() )
process_options_pv = staticmethod( process_options_pv )
def __init__( self, conf, options, output_prefix, **kwargs ):
SimpleApp.__init__( self, conf, options, output_prefix,
init_equations = False )
self.setup_options()
self.cached_coefs = None
self.cached_iw_dir = None
self.cached_christoffel = None
self.cached_evp = None
output_dir = self.problem.output_dir
shutil.copyfile( conf._filename,
op.join( output_dir, op.basename( conf._filename ) ) )
def setup_options( self ):
SimpleApp.setup_options( self )
if self.options.phase_velocity:
process_options = AcousticBandGapsApp.process_options_pv
else:
process_options = AcousticBandGapsApp.process_options
self.app_options += process_options( self.conf.options )
def call( self ):
"""In parametric runs, cached data (homogenized coefficients,
Christoffel acoustic tensor and eigenvalue problem solution) are
cleared according to 'clear_cache' aplication options.
Example:
clear_cache = {'cached_christoffel' : True, 'cached_evp' : True}
"""
options = self.options
for key, val in self.app_options.clear_cache.iteritems():
if val and key.startswith('cached_'):
setattr(self, key, None)
if options.phase_velocity:
# No band gaps in this case.
return self.compute_phase_velocity()
evp = self.solve_eigen_problem()
self.fix_eig_range( evp.eigs.shape[0] )
if options.detect_band_gaps:
bg = detect_band_gaps( self.problem, evp.kind,
evp.eigs_rescaled, evp.eig_vectors,
self.app_options, self.conf.funmod )
if options.plot:
plot_range, teigs = transform_plot_data( bg.logs.eigs,
bg.opts.plot_transform,
self.conf.funmod )
plot_rsc = bg.opts.plot_rsc
plot_opts = bg.opts.plot_options
plot_labels = bg.opts.plot_labels
plt.rcParams.update( plot_rsc['params'] )
fig = plot_gaps( 1, plot_rsc, bg.gaps, bg.kinds,
bg.freq_range_margins, plot_range,
clear = True )
fig = plot_logs( 1, plot_rsc, plot_labels, bg.logs.freqs, teigs,
bg.valid[bg.eig_range],
bg.freq_range_initial,
plot_range, False,
show_legend = plot_opts['legend'],
new_axes = True )
fig_name = bg.opts.fig_name
if fig_name is not None:
fig.savefig( fig_name )
if plot_opts['show']:
plt.show()
elif options.analyze_dispersion:
christoffel, iw_dir = self.compute_cat(ret_iw_dir=True)
bg = detect_band_gaps( self.problem, evp.kind,
evp.eigs_rescaled, evp.eig_vectors,
self.app_options, self.conf.funmod,
christoffel = christoffel )
output( 'computing polarization angles...' )
pas = compute_polarization_angles( iw_dir, bg.logs.eig_vectors )
output( '...done' )
bg.polarization_angles = pas
output( 'computing phase velocity...' )
bg.phase_velocity = self.compute_phase_velocity()
output( '...done' )
if options.plot:
plot_rsc = bg.opts.plot_rsc
plot_opts = bg.opts.plot_options
plt.rcParams.update( plot_rsc['params'] )
aux = transform_plot_data( pas,
bg.opts.plot_transform_angle,
self.conf.funmod )
plot_range, pas = aux
plot_labels = bg.opts.plot_labels_angle
fig = plot_gaps( 1, plot_rsc, bg.gaps, bg.kinds,
bg.freq_range_margins, plot_range,
clear = True )
fig = plot_logs( 1, plot_rsc, plot_labels, bg.logs.freqs, pas,
bg.valid[bg.eig_range],
bg.freq_range_initial,
plot_range, False,
show_legend = plot_opts['legend'],
new_axes = True )
fig_name = bg.opts.fig_name_angle
if fig_name is not None:
fig.savefig( fig_name )
aux = transform_plot_data( bg.logs.eigs,
bg.opts.plot_transform_wave,
self.conf.funmod )
plot_range, teigs = aux
plot_labels = bg.opts.plot_labels_wave
fig = plot_gaps( 2, plot_rsc, bg.gaps, bg.kinds,
bg.freq_range_margins, plot_range,
clear = True )
fig = plot_logs( 2, plot_rsc, plot_labels, bg.logs.freqs, teigs,
bg.valid[bg.eig_range],
bg.freq_range_initial,
plot_range, False,
show_legend = plot_opts['legend'],
new_axes = True )
fig_name = bg.opts.fig_name_wave
if fig_name is not None:
fig.savefig( fig_name )
if plot_opts['show']:
plt.show()
else:
bg = None
return evp, bg
def fix_eig_range( self, n_eigs ):
eig_range = get_default( self.app_options.eig_range, (0, n_eigs) )
if eig_range[-1] < 0:
eig_range[-1] += n_eigs + 1
assert_( eig_range[0] < (eig_range[1] - 1) )
assert_( eig_range[1] <= n_eigs )
self.app_options.eig_range = eig_range
def solve_eigen_problem( self, ofn_trunk = None, post_process_hook = None ):
if self.cached_evp is not None:
return self.cached_evp
problem = self.problem
ofn_trunk = get_default( ofn_trunk, problem.ofn_trunk,
'output file name trunk missing!' )
post_process_hook = get_default( post_process_hook,
self.post_process_hook )
conf = self.conf
eig_problem = self.app_options.eig_problem
if eig_problem in ['simple', 'simple_liquid']:
problem.set_equations( conf.equations )
problem.time_update()
mtx_a = problem.evaluate(conf.equations['lhs'], mode='weak',
auto_init=True, dw_mode='matrix')
mtx_m = problem.evaluate(conf.equations['rhs'], mode='weak',
dw_mode='matrix')
elif eig_problem == 'schur':
# A = K + B^T D^{-1} B.
mtx = assemble_by_blocks( conf.equations, self.problem,
ebcs = conf.ebcs,
epbcs = conf.epbcs )
problem.set_equations( conf.equations )
problem.time_update()
ls = Solver.any_from_conf( problem.ls_conf,
presolve = True, mtx = mtx['D'] )
mtx_b, mtx_m = mtx['B'], mtx['M']
mtx_dib = nm.empty( mtx_b.shape, dtype = mtx_b.dtype )
for ic in xrange( mtx_b.shape[1] ):
mtx_dib[:,ic] = ls( mtx_b[:,ic].toarray().squeeze() )
mtx_a = mtx['K'] + mtx_b.T * mtx_dib
else:
raise NotImplementedError
## from sfepy.base.plotutils import spy, plt
## spy( mtx_b, eps = 1e-12 )
## plt.show()
## mtx_a.save( 'a.txt', format='%d %d %.12f\n' )
## mtx_b.save( 'b.txt', format='%d %d %.12f\n' )
## pause()
output( 'computing resonance frequencies...' )
tt = [0]
if isinstance( mtx_a, sc.sparse.spmatrix ):
mtx_a = mtx_a.toarray()
if isinstance( mtx_m, sc.sparse.spmatrix ):
mtx_m = mtx_m.toarray()
eigs, mtx_s_phi = eig(mtx_a, mtx_m, return_time=tt,
method=self.app_options.eigensolver)
eigs[eigs<0.0] = 0.0
output( '...done in %.2f s' % tt[0] )
output( 'original eigenfrequencies:' )
output( eigs )
opts = self.app_options
epsilon2 = opts.scale_epsilon * opts.scale_epsilon
eigs_rescaled = (opts.elasticity_contrast / epsilon2) * eigs
output( 'rescaled eigenfrequencies:' )
output( eigs_rescaled )
output( 'number of eigenfrequencies: %d' % eigs.shape[0] )
try:
assert_( nm.isfinite( eigs ).all() )
except ValueError:
debug()
# B-orthogonality check.
## print nm.dot( mtx_s_phi[:,5], nm.dot( mtx_m, mtx_s_phi[:,5] ) )
## print nm.dot( mtx_s_phi[:,5], nm.dot( mtx_m, mtx_s_phi[:,0] ) )
## debug()
n_eigs = eigs.shape[0]
variables = problem.get_variables()
mtx_phi = nm.empty( (variables.di.ptr[-1], mtx_s_phi.shape[1]),
dtype = nm.float64 )
make_full = variables.make_full_vec
if eig_problem in ['simple', 'simple_liquid']:
for ii in xrange( n_eigs ):
mtx_phi[:,ii] = make_full( mtx_s_phi[:,ii] )
eig_vectors = mtx_phi
elif eig_problem == 'schur':
# Update also eliminated variables.
schur = self.app_options.schur
primary_var = schur['primary_var']
eliminated_var = schur['eliminated_var']
mtx_s_phi_schur = - sc.dot( mtx_dib, mtx_s_phi )
aux = nm.empty( (variables.adi.ptr[-1],),
dtype = nm.float64 )
set = variables.set_state_part
for ii in xrange( n_eigs ):
set( aux, mtx_s_phi[:,ii], primary_var, stripped = True )
set( aux, mtx_s_phi_schur[:,ii], eliminated_var,
stripped = True )
mtx_phi[:,ii] = make_full( aux )
indx = variables.get_indx( primary_var )
eig_vectors = mtx_phi[indx,:]
save = self.app_options.save
out = {}
for ii in xrange( n_eigs ):
if (ii >= save[0]) and (ii < (n_eigs - save[1])): continue
aux = problem.state_to_output( mtx_phi[:,ii] )
for name, val in aux.iteritems():
out[name+'%03d' % ii] = val
if post_process_hook is not None:
out = post_process_hook( out, problem, mtx_phi )
problem.domain.mesh.write( ofn_trunk + '.vtk', io = 'auto', out = out )
fd = open( ofn_trunk + '_eigs.txt', 'w' )
eigs.tofile( fd, ' ' )
fd.close()
evp = Struct( kind = eig_problem,
eigs = eigs, eigs_rescaled = eigs_rescaled,
eig_vectors = eig_vectors )
self.cached_evp = evp
return evp
def eval_homogenized_coefs( self ):
if self.cached_coefs is not None:
return self.cached_coefs
opts = self.app_options
if opts.homogeneous:
rtm = opts.region_to_material
mat_region = rtm.keys()[0]
mat_name = rtm[mat_region]
self.problem.update_materials()
mat = self.problem.materials[mat_name]
coefs = mat.get_data( mat_region, 0, opts.tensor_names )
else:
dc = opts.dispersion_conf
dconf = ProblemConf.from_dict( dc['input'], dc['module'] )
dconf.materials = self.conf.materials
dconf.fe = self.conf.fe
dconf.regions.update( self.conf.regions )
dconf.options['output_dir'] = self.problem.output_dir
volume = opts.volume(self.problem, 'Y')
problem = ProblemDefinition.from_conf(dconf, init_equations=False)
he = HomogenizationEngine( problem, self.options, volume = volume )
coefs = he()
## print coefs
## pause()
output.prefix = self.output_prefix
self.cached_coefs = coefs
return coefs
def compute_cat( self, ret_iw_dir=False ):
"""Compute the Christoffel acoustic tensor, given the incident wave
direction."""
opts = self.app_options
iw_dir = nm.array( opts.incident_wave_dir, dtype = nm.float64 )
dim = self.problem.get_dim()
assert_( dim == iw_dir.shape[0] )
iw_dir = iw_dir / nla.norm( iw_dir )
if self.cached_christoffel is not None:
christoffel = self.cached_christoffel
else:
coefs = self.eval_homogenized_coefs()
christoffel = compute_cat( coefs, iw_dir,
self.app_options.dispersion )
report_iw_cat( iw_dir, christoffel )
self.cached_christoffel = christoffel
if ret_iw_dir:
return christoffel, iw_dir
else:
return christoffel
def compute_phase_velocity( self ):
from sfepy.homogenization.phono import compute_density_volume_info
opts = self.app_options
dim = self.problem.domain.mesh.dim
christoffel = self.compute_cat()
self.problem.update_materials()
dv_info = compute_density_volume_info( self.problem, opts.volume,
opts.region_to_material )
output( 'average density:', dv_info.average_density )
eye = nm.eye( dim, dim, dtype = nm.float64 )
mtx_mass = eye * dv_info.average_density
meigs, mvecs = eig( mtx_mass, mtx_b = christoffel,
eigenvectors = True, method = opts.eigensolver )
phase_velocity = 1.0 / nm.sqrt( meigs )
return phase_velocity
usage = """%prog [options] filename_in"""
help = {
'filename' :
'basename of output file(s) [default: <basename of input file>]',
'detect_band_gaps' :
'detect frequency band gaps',
'analyze_dispersion' :
'analyze dispersion properties (low frequency domain)',
'plot' :
'plot frequency band gaps, assumes -b',
'phase_velocity' :
'compute phase velocity (frequency-independet mass only)'
}
def main():
parser = OptionParser(usage = usage, version = "%prog " + sfepy.__version__)
parser.add_option( "-o", "", metavar = 'filename',
action = "store", dest = "output_filename_trunk",
default = None, help = help['filename'] )
parser.add_option( "-b", "--band-gaps",
action = "store_true", dest = "detect_band_gaps",
default = False, help = help['detect_band_gaps'] )
parser.add_option( "-d", "--dispersion",
action = "store_true", dest = "analyze_dispersion",
default = False, help = help['analyze_dispersion'] )
parser.add_option( "-p", "--plot",
action = "store_true", dest = "plot",
default = False, help = help['plot'] )
parser.add_option( "--phase-velocity",
action = "store_true", dest = "phase_velocity",
default = False, help = help['phase_velocity'] )
options, args = parser.parse_args()
if options.plot:
if plt is None:
output( 'matplotlib.pyplot cannot be imported, ignoring option -p!' )
options.plot = False
elif options.analyze_dispersion == False:
options.detect_band_gaps = True
if (len( args ) == 1):
filename_in = args[0];
else:
parser.print_help(),
return
required, other = get_standard_keywords()
required.remove( 'solver_[0-9]+|solvers' )
if options.phase_velocity:
required.remove( 'ebc_[0-9]+|ebcs' )
required.remove( 'equations' )
conf = ProblemConf.from_file( filename_in, required, other )
app = AcousticBandGapsApp( conf, options, 'eigen:' )
opts = conf.options
if hasattr( opts, 'parametric_hook' ): # Parametric study.
parametric_hook = getattr( conf, opts.parametric_hook )
app.parametrize( parametric_hook )
app()
if __name__ == '__main__':
## mtx_k = io.read_sparse_matrix_hdf5( '1todo/K.h5', output_format = 'csr' )
## print mtx_k.__repr__()
## mtx_m = io.read_sparse_matrix_hdf5( '1todo/M.h5', output_format = 'csr' )
## print mtx_m.__repr__()
## mtx_k.save( 'k.txt', format='%d %d %.12f\n' )
## mtx_m.save( 'm.txt', format='%d %d %.12f\n' )
## eigs, mtx_s_phi = eig( mtx_k.toarray(), mtx_m.toarray(),
## print_time = True )
## print eigs
## eigs, aux = eig( mtx_m.toarray(),
## print_time = True )
## print eigs
## pause()
main()
| |
# -*- coding: utf-8 -*-
# Copyright Yassine Lamgarchal <lamgarchal.yassine@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import pytest
import signal
import six
import zmq
from chillaxd.raft import message
from chillaxd.raft import peer
from chillaxd.raft import server
from chillaxd.raft import serverstate
class TestServer(object):
_DEFAULT_ARGUMENTS = {'public_endpoint': '127.0.0.1:27001',
'private_endpoint': '127.0.0.1:2406',
'remote_endpoints': ['127.0.0.1:2407',
'127.0.0.1:2408'],
'leader_heartbeat_interval': 50,
'min_election_timeout': 200,
'max_election_timeout': 300}
def setup_method(self, method):
self.server = server.RaftServer(**TestServer._DEFAULT_ARGUMENTS)
self.server._remote_peers = mock.MagicMock()
self.server._server_state = mock.Mock()
self.server._server_state.term.return_value = 0
self.server._apply_committed_log_entries_to_state_machine = \
mock.Mock()
self.server._remote_peers.__getitem__.return_value = \
mock.Mock(spec=peer.Peer)
@mock.patch("chillaxd.raft.server.zmq.eventloop.ioloop.ZMQIOLoop",
spec=zmq.eventloop.ioloop.ZMQIOLoop)
@mock.patch("chillaxd.raft.server.zmq.Context", spec=zmq.Context)
def test_setup(self, m_zmq_context, m_zmq_ioloop):
self.server._remote_peers = {}
self.server._setup()
m_zmq_context.assert_called_once_with()
assert self.server._server_state.init_indexes.call_count == 2
assert len(self.server._remote_peers) == 2
ioloop_instance = m_zmq_ioloop().instance
ioloop_instance.assert_called_once_with()
assert ioloop_instance().add_handler.call_count == 2
assert isinstance(self.server.checking_leader_timeout,
zmq.eventloop.ioloop.PeriodicCallback)
assert (self.server.checking_leader_timeout.callback ==
self.server._election_timeout_task)
assert (self.server.heartbeating.callback ==
self.server.broadcast_append_entries)
@mock.patch("chillaxd.raft.server.signal", spec=signal)
@mock.patch(
"chillaxd.raft.server.zmq.eventloop.ioloop.PeriodicCallback",
spec=zmq.eventloop.ioloop.PeriodicCallback)
@mock.patch("chillaxd.raft.server.zmq.eventloop.ioloop.ZMQIOLoop",
spec=zmq.eventloop.ioloop.ZMQIOLoop)
@mock.patch("chillaxd.raft.server.zmq.Context", spec=zmq.Context)
def test_start(self, m_zmq_context, m_zmq_ioloop, m_zmq_periodic_callback,
m_signal):
self.server.start()
assert m_zmq_context().socket().bind.call_count == 2
assert m_signal.signal.call_count == 2
for remote_server in six.itervalues(self.server._remote_peers):
assert remote_server._is_started is True
assert self.server._is_started is True
m_zmq_ioloop().instance().start.assert_called_once_with()
@mock.patch("chillaxd.raft.server.signal", spec=signal)
@mock.patch(
"chillaxd.raft.server.zmq.eventloop.ioloop.PeriodicCallback",
spec=zmq.eventloop.ioloop.PeriodicCallback)
@mock.patch("chillaxd.raft.server.zmq.eventloop.ioloop.ZMQIOLoop",
spec=zmq.eventloop.ioloop.ZMQIOLoop)
@mock.patch("chillaxd.raft.server.zmq.Context", spec=zmq.Context)
def test_stop(self, m_zmq_context, m_zmq_ioloop, m_zmq_periodic_callback,
m_signal):
self.server.start()
self.server.checking_leader_timeout = mock.Mock()
self.server.heartbeating = mock.Mock()
self.server._socket_for_commands = mock.Mock()
self.server._socket_for_consensus = mock.Mock()
self.server.stop()
(self.server.checking_leader_timeout.stop.
assert_called_once_with())
self.server.heartbeating.stop.assert_called_once_with()
self.server._socket_for_commands.close.assert_called_once_with()
self.server._socket_for_consensus.close.assert_called_once_with()
for remote_server in six.itervalues(self.server._remote_peers):
assert remote_server._is_started is False
m_zmq_context().destroy.assert_called_once_with(linger=0)
m_zmq_ioloop().instance().stop.assert_called_once_with()
assert self.server._is_started is False
@mock.patch(
"chillaxd.raft.server.zmq.eventloop.ioloop.PeriodicCallback",
spec=zmq.eventloop.ioloop.PeriodicCallback)
@mock.patch("chillaxd.raft.server.zmq.eventloop.ioloop.ZMQIOLoop",
spec=zmq.eventloop.ioloop.ZMQIOLoop)
@mock.patch("chillaxd.raft.server.zmq.Context", spec=zmq.Context)
def test_handle_signals(self, zmq_context, zmq_ioloop,
zmq_periodic_callback):
self.server.start()
assert self.server._is_started is True
self.server._handle_signals(None, None)
assert self.server._is_started is False
def test_is_standalone(self):
assert self.server.is_standalone() is False
self.server._remote_endpoints = {}
assert self.server.is_standalone() is True
def test_process_internal_raft_message(self):
self.server._process_append_entry_request = mock.Mock()
self.server._process_append_entry_response = mock.Mock()
self.server._process_request_vote = mock.Mock()
self.server._process_request_vote_response = mock.Mock()
mock_socket = mock.Mock(spec=zmq.sugar.socket.Socket)
# Append entry request.
aereq = (1, 2, 3, 4, ())
aereq_packed = message.build_append_entry_request(*aereq)
mock_socket.recv_multipart.return_value = ("identifier", aereq_packed)
self.server._process_internal_message(mock_socket, zmq.POLLIN)
self.server._process_append_entry_request.assert_called_once_with(
"identifier", *aereq)
# Append entry response.
aeresp = (1, True, 0, None)
aeresp_packed = message.build_append_entry_response(*aeresp)
mock_socket.recv_multipart.return_value = ("identifier", aeresp_packed)
self.server._process_internal_message(mock_socket, zmq.POLLIN)
self.server._process_append_entry_response.\
assert_called_once_with("identifier", *aeresp)
# Request vote.
rv = (1, 2, 3)
rv_packed = message.build_request_vote(*rv)
mock_socket.recv_multipart.return_value = ("identifier", rv_packed)
self.server._process_internal_message(mock_socket, zmq.POLLIN)
self.server._process_request_vote.assert_called_once_with(
"identifier", *rv)
# Request vote response.
rvresp = (0, False)
rvresp_packed = message.build_request_vote_response(*rvresp)
mock_socket.recv_multipart.return_value = ("identifier", rvresp_packed)
self.server._process_internal_message(mock_socket, zmq.POLLIN)
self.server._process_request_vote_response.\
assert_called_once_with("identifier", *rvresp)
def test_process_append_entry_request_stale_term(self):
ae_req = (-1, 2, 3, 4, ())
self.server._process_append_entry_request("peer_id", *ae_req)
ae_response = message.build_append_entry_response(
self.server._server_state.term(), False, None, None)
self.server._remote_peers["peer_id"].send_message.\
assert_called_once_with(ae_response)
self.server._remote_peers["peer_id"].send_message.reset_mock()
def test_process_append_entry_request_outdated_term(self):
ae_req = (1, 2, 3, 4, ())
self.server._process_append_entry_request("peer_id", *ae_req)
self.server._server_state.switch_to_follower.\
assert_called_once_with(1, "peer_id")
self.server._server_state.switch_to_follower.reset_mock()
ae_response = message.build_append_entry_response(
self.server._server_state.term(), False, None, None)
self.server._remote_peers["peer_id"].send_message.\
assert_called_once_with(ae_response)
self.server._remote_peers["peer_id"].send_message.reset_mock()
def test_process_append_entry_request_as_leader(self):
ae_req = (0, 0, 0, 4, ())
self.server._server_state.is_leader.return_value = True
self.server._process_append_entry_request("peer_id", *ae_req)
self.server._server_state.switch_to_follower.\
assert_called_once_with(0, None)
self.server._server_state.switch_to_follower.reset_mock()
def test_process_append_entry_request_as_follower(self):
ae_req = (0, 0, 0, 4, ())
self.server._server_state.is_leader.return_value = False
self.server._server_state.commit_index.return_value = 4
self.server._process_append_entry_request("peer_id", *ae_req)
self.server._server_state.update_leader.assert_called_once_with(
"peer_id")
self.server._server_state.update_leader.reset_mock()
ae_response = message.build_append_entry_response(
self.server._server_state.term(), True, 0, None)
self.server._remote_peers["peer_id"].\
send_message.assert_called_once_with(ae_response)
self.server._remote_peers["peer_id"].\
send_message.reset_mock()
def test_process_append_entry_request_as_candidate(self):
ae_req = (0, 0, 0, 4, ())
self.server._server_state.is_leader.return_value = False
self.server._server_state.is_candidate.return_value = True
self.server._server_state.commit_index.return_value = 4
self.server._process_append_entry_request("peer_id", *ae_req)
self.server._server_state.update_leader.assert_called_once_with(
"peer_id")
self.server._server_state.update_leader.reset_mock()
self.server._server_state.switch_to_follower.\
assert_called_once_with(0, "peer_id")
self.server._server_state.switch_to_follower.reset_mock()
ae_response = message.build_append_entry_response(
self.server._server_state.term(), True, 0, None)
self.server._remote_peers["peer_id"].\
send_message.assert_called_once_with(ae_response)
self.server._remote_peers["peer_id"].send_message.reset_mock()
def test_process_append_entry_request_induction_failed(self):
ae_req = (0, -1, -1, 4, ())
self.server._server_state.is_leader.return_value = False
self.server._process_append_entry_request("peer_id", *ae_req)
ae_response = message.build_append_entry_response(
self.server._server_state.term(), False, None, 0)
self.server._remote_peers["peer_id"].send_message.\
assert_called_once_with(ae_response)
self.server._remote_peers["peer_id"].send_message.reset_mock()
def test_process_append_entry_request_induction_succeed(self):
pass
# TODO(yassine)
def test_process_append_entry_response(self):
pass
def test_process_request_vote_stale_term(self):
ae_req_message = (-1, 2, 3)
self.server._process_request_vote("peer_id", *ae_req_message)
rv_response_ko = message.build_request_vote_response(
self.server._server_state.term(), False)
self.server._remote_peers["peer_id"].send_message.\
assert_called_once_with(rv_response_ko)
self.server._remote_peers.reset_mock()
def test_process_request_vote_outdated_term_grant_vote(self):
ae_req_message = (2, 2, 3)
self.server._server_state.switch_to_follower = mock.Mock()
self.server._is_candidate_log_up_to_date = mock.Mock()
self.server._is_candidate_log_up_to_date.return_value = True
self.server._server_state.grant_vote = mock.Mock()
self.server._server_state.grant_vote.return_value = True
self.server._process_request_vote("peer_id", *ae_req_message)
rv_response = message.build_request_vote_response(
self.server._server_state.term(), True)
self.server._server_state.switch_to_follower.\
assert_called_once_with(2, None)
self.server._remote_peers["peer_id"].send_message.\
assert_called_once_with(rv_response)
self.server._remote_peers["peer_id"].reset_mock()
self.server._server_state.switch_to_follower.reset_mock()
def test_process_request_vote_deny_vote(self):
ae_req_message = (0, 1, 1)
self.server._is_candidate_log_up_to_date = mock.Mock()
self.server._is_candidate_log_up_to_date.return_value = True
self.server._server_state.grant_vote = mock.Mock()
self.server._server_state.grant_vote.return_value = False
self.server._process_request_vote("peer_id", *ae_req_message)
rv_response_ko = message.build_request_vote_response(
self.server._server_state.term(), False)
self.server._remote_peers["peer_id"].send_message.\
assert_called_once_with(rv_response_ko)
self.server._remote_peers["peer_id"].reset_mock()
def test_process_request_vote_response_stale_term(self):
test_args = TestServer._DEFAULT_ARGUMENTS.copy()
test_args["remote_endpoints"] = ["peer_id"]
self.server = server.RaftServer(**test_args)
self.server._remote_peers = mock.MagicMock()
self.server._server_state.is_candidate = mock.Mock()
self.server._server_state.is_candidate.return_value = True
self.server._process_request_vote_response("peer_id", -1, True)
def test_process_request_vote_response_outdated_term(self):
self.server._server_state.is_candidate.return_value = \
serverstate.ServerState._CANDIDATE
self.server._process_request_vote_response("peer_id", 1, True)
self.server._server_state.switch_to_follower.\
assert_called_once_with(1, None)
self.server._server_state.switch_to_follower.reset_mock()
def test_process_request_vote_response_granted(self):
self.server._server_state._voters = {"peer_id"}
self.server._server_state.number_of_voters.return_value = 2
self.server._process_request_vote_response("peer_id2", 0, True)
self.server._server_state.add_voter.\
assert_called_once_with("peer_id2")
self.server._server_state.switch_to_leader.\
assert_called_once_with()
self.server._server_state.switch_to_leader.reset_mock()
def test_process_request_vote_response(self):
# equals term with request vote denied
self.server._process_request_vote_response("peer_id", 0, False)
assert self.server._server_state.switch_to_follower.call_count == 0
assert self.server._server_state.switch_to_follower.call_count == 0
assert self.server._server_state.switch_to_candidate.call_count == 0
def test_broadcast_append_entries(self):
self.server._server_state.is_leader.return_value = False
pytest.raises(serverstate.InvalidState,
self.server.broadcast_append_entries)
self.server._server_state.is_leader.return_value = True
ae_heartbeat = message.build_append_entry_request(
self.server._server_state.term(), 0, 0, 0, ())
self.server._server_state._next_index = mock.MagicMock()
self.server._server_state._next_index.__getitem__.return_value = 1
self.server._remote_peers = {0: mock.Mock(), 1: mock.Mock(),
2: mock.Mock()}
self.server._server_state.commit_index.return_value = 0
self.server._server_state.term.return_value = 0
for remote_server in six.iterkeys(self.server._remote_peers):
ts = self.server
ts._server_state._next_index[remote_server] = 1
ts._log = mock.Mock()
ts._log.prev_index_and_term_of_entry.return_value = (0, 0)
ts._log.entries_from_index.return_value = ()
self.server.broadcast_append_entries()
for remote_server in range(3):
m_remote_server = self.server._remote_peers[remote_server]
m_remote_server.send_message.assert_called_once_with(ae_heartbeat)
def test_election_timeout_task(self):
test_args = TestServer._DEFAULT_ARGUMENTS.copy()
test_args["remote_endpoints"] = []
self.server = server.RaftServer(**test_args)
self.server.checking_leader_timeout = mock.Mock()
self.server._server_state.is_leader = mock.Mock()
self.server._server_state.is_leader.return_value = True
pytest.raises(serverstate.InvalidState,
self.server._election_timeout_task)
self.server._server_state.is_leader.return_value = False
# with one node
self.server._state = serverstate.ServerState._FOLLOWER
self.server._server_state.switch_to_leader = mock.Mock()
self.server._server_state.switch_to_candidate = mock.Mock()
self.server._election_timeout_task()
self.server._server_state.switch_to_leader.\
assert_called_once_with()
assert self.server._server_state.switch_to_candidate.call_count == 0
# with several nodes
test_args["remote_endpoints"] = ["127.0.0.1:2407", "127.0.0.1:2408"]
self.server = server.RaftServer(**test_args)
self.server._state = serverstate.ServerState._FOLLOWER
self.server._server_state.switch_to_leader = mock.Mock()
self.server._server_state.switch_to_candidate = mock.Mock()
self.server._server_state.update_leader = mock.Mock()
self.server.checking_leader_timeout = mock.Mock()
self.server._election_timeout_task()
self.server._server_state.switch_to_candidate.\
assert_called_once_with()
self.server._server_state.switch_to_candidate.reset_mock()
assert self.server._server_state.switch_to_leader.call_count == 0
self.server._server_state.switch_to_leader.reset_mock()
self.server._server_state.update_leader.\
assert_called_once_with(None)
| |
#!/usr/bin/env python
#
# Copyright (c) Greenplum Inc 2008. All Rights Reserved.
#
'''
Greenplum hostcache file facilities.
This Module contains some helper functions for mapping network
interface names used in gp_configuration to a collapsed set
of hostnames.
example: sdw1-1, sdw1-2, sdw1-3, and sdw1-4 are all located
on sdw1.
The results of this collapsing will be stored in a file:
~/.gphostcache with entries of the form:
sdw1-1:sdw1
sdw1-2:sdw1
sdw1-3:sdw1
A big complication here is that we want to group all of the
segment databases for sdw1-1 thru sdw1-4 together but we can't
use the name returned by `hostname` as its not guaranteed to
have a trusted ssh environment setup for it.
'''
import os
from gppylib import gparray
from gppylib.commands import base
from gppylib.commands import unix
from gppylib import gplog
from gppylib.utils import readAllLinesFromFile
FILEDIR=os.path.expanduser("~")
FILENAME=".gphostcache"
CACHEFILE=FILEDIR + "/" + FILENAME
logger = gplog.get_default_logger()
class GpHost:
def __init__(self, hostname):
self.hostname = hostname
self.dbs=[]
def addDB(self,db):
self.dbs.append(db)
def __str__(self):
dirlist=[]
for db in self.dbs:
dirlist.append(db.datadir)
return "Host %s has Datadirs: [%s]" % (self.hostname,','.join(dirlist))
class GpInterfaceToHostNameCache:
def __init__(self, pool, interfacesToLookup, currentHostNameAnswersForInterfaces):
self.__hostCache={} # interface -> hostname
# Read the .gphostcache file if it exists
if os.path.isfile(CACHEFILE):
try:
for line in readAllLinesFromFile(CACHEFILE, stripLines=True, skipEmptyLines=True):
if line[0] == '#': # okay check because empty lines are skipped
continue
arr = line.split(':')
if len(arr) == 2 and len(arr[0]) > 0 and len(arr[1]) > 0:
(interface, hostname) = arr
self.__hostCache[interface.strip()]=hostname.strip()
except Exception, e:
logger.warn("Error reading file '%s': %s" % (CACHEFILE, str(e)))
#
# check to see which values are inconsistent with the cache and need lookup again
#
inconsistent = []
for i in range(len(interfacesToLookup)):
interface = interfacesToLookup[i]
hostname = currentHostNameAnswersForInterfaces[i]
# If we don't have this mapping yet set it, otherwise we simply
# validate consistency.
if interface not in self.__hostCache:
self.__hostCache[interface] = hostname
elif hostname is None:
# external source did not have a tentative answer, the first
# case above should have fired for the first entry on this
# interface and will force us to lookup the hostname.
# Additional hits on the interface can be ignored.
pass
elif self.__hostCache[interface] is None:
self.__hostCache[interface] = hostname
elif self.__hostCache[interface] != hostname:
logger.warn("inconsistent hostname '%s' for interface '%s' and expected hostname '%s'" % \
(self.__hostCache[interface], interface, hostname))
inconsistent.append(interface)
# Clear out any inconsistent hostnames to force a recheck.
for i in inconsistent:
self.__hostCache[i] = None
# Lookup any hostnames that we don't have answers for:
pending_cmds={}
for interface in self.__hostCache:
if self.__hostCache[interface] is None:
logger.debug("hostname lookup for %s" % interface)
cmd=unix.Hostname('host lookup', ctxt=base.REMOTE, remoteHost=interface)
pool.addCommand(cmd)
pending_cmds[interface] = cmd
# Fetch the results out of the WorkerPool
if len(pending_cmds) > 0:
pool.join()
for interface in pending_cmds:
cmd = pending_cmds[interface]
# Make sure the command completed successfully
if cmd.get_results().rc != 0:
logger.warn("Failed to resolve hostname for %s" % interface)
continue
self.__hostCache[interface] = cmd.get_hostname()
pool.empty_completed_items()
# Try to update the hostcache file if we executed any hostname commands
if len(pending_cmds) > 0:
try:
fp = open(CACHEFILE, 'w')
for interface in sorted(self.__hostCache.keys()):
hostname = self.__hostCache[interface]
# skip any dangling references we still have
if not hostname:
continue
fp.write("%s:%s\n" % (interface, hostname))
fp.close()
except Exception, e:
logger.warn(str(e))
logger.warn("Failed to write file '%s'" % CACHEFILE)
#
# returns the cached host name for the interface
#
# should only be called for interfaces that were passed to the constructor
#
# Will return None if lookup of the hostname was not possible
#
def getHostName(self, interface):
return self.__hostCache[interface]
class GpHostCache:
def __init__(self, gparray, pool, skiplist=[], withMasters=False):
self.gparray=gparray
self.gphost_map={} # hostname -> GpHost
# these are any db's that should be skipped.
skipmap={}
for db in skiplist:
skipmap[db.getSegmentDbId()]=db
# Go through the gparray and build list of interface and hostname that
# will be used to biuld the GpInterfaceToHostNameCache
#
# As Greeenplum 4.0 we have both interface and hostname information in
# the catalog, so the gparray should be able to supply all of the
# information.
#
# However if we have initialized from an old catalog, or from a flatfile
# then the interface->hostname mapping may not be available. In this
# case we still want to do the full pass first so that we don't lookup
# a given interface more than once.
interfaces = []
hostnames = []
# Get list of segment dbs, optionally including masters
if withMasters:
dblist = self.gparray.getDbList()
else:
dblist = self.gparray.getSegDbList()
# build the interface->host mapping
for db in dblist:
if db.getSegmentDbId() not in skipmap:
interfaces.append(db.getSegmentAddress())
hostnames.append(db.getSegmentHostName())
interfaceToHostMap = \
GpInterfaceToHostNameCache(pool, interfaces, hostnames)
# Build up the GpHosts using our interface->hostname lookup
for db in dblist:
# skip this dbid ?
if db.getSegmentDbId() in skipmap:
continue
interface = db.getSegmentAddress()
hostname = interfaceToHostMap.getHostName(interface)
# If the db didn't have hostname already set, (it was loaded from
# an old catalog?) set it based on the hostname from the interface
# lookup.
if db.getSegmentHostName() == None:
db.setSegmentHostName(hostname)
if hostname not in self.gphost_map:
self.gphost_map[hostname] = GpHost(hostname)
self.gphost_map[hostname].addDB(db)
######
def log_contents(self):
logger.debug("Construct host-->datadirs mapping:")
entries=[]
for key in self.gphost_map.keys():
gphost=self.gphost_map[key]
entries.append(gphost.__str__())
logger.debug('\n'.join(entries))
######
def get_hostnames(self):
hosts=[]
for key in self.gphost_map.keys():
gphost=self.gphost_map[key]
hosts.append(gphost.hostname)
return hosts
######
def get_hosts(self):
return self.gphost_map.values()
######
def get_host(self,hostname):
if hostname in self.gphost_map:
return self.gphost_map[hostname]
else:
raise Exception("map does not contain host: %s" % hostname)
#####
def ping_hosts(self, pool):
'''
go through all of the gphosts and try and ping all of the hosts.
If any fail then go back to using the interface names for those
segments.
throws an Exception if still can't ping on the interface names.
'''
failed_segs=[]
for key in self.gphost_map.keys():
p = unix.Ping('ping', key)
pool.addCommand(p)
pool.join()
cmds=pool.getCompletedItems()
for cmd in cmds:
# Look for commands that failed to ping
if not cmd.was_successful() != 0:
hostname=cmd.hostToPing
logger.warning("Ping to host: '%s' FAILED" % hostname)
logger.debug(" ping details: %s" % cmd)
gphost=self.get_host(hostname)
dblist=gphost.dbs
alternateHost=None
for db in dblist:
dbid = db.getSegmentDbId()
address = db.getSegmentAddress()
# It would be nice to handle these through a pool,
# but it is both a little difficult and not the
# expected case.
pingCmd = unix.Ping("dbid: %d" % dbid, address)
pingCmd.run()
if pingCmd.get_results().rc == 0:
alternateHost=address
logger.warning("alternate host: '%s' => '%s'" %
(hostname, address))
break
else:
logger.warning("Ping to host: '%s' FAILED" % hostname)
logger.debug(" ping details: %s" % pingCmd)
if alternateHost:
gphost.hostname=alternateHost
else:
# no interface to reach any of the segments, append all
# segments to the list of failed segments
failed_segs.extend(dblist)
# Removing the failed host from the cache.
#
# This seems a bit draconian, but that is what all callers
# of this function seem to want.
del self.gphost_map[hostname]
pool.empty_completed_items()
return failed_segs
| |
import wx
from db.schema import Exp, ExpSmith, ExpVNA, ExpACCoilProp, ExpPatchInfo, ExpMaterialProp, ExpVisProp
from db.schema import engine
from sqlalchemy.orm import scoped_session, sessionmaker
from core.listobject import *
import datetime
from sqlalchemy import func
from sqlalchemy.sql import label
from sqlalchemy import desc
class ListBoxItem():
def __init__(self, idx, id, string):
self.id = id
self.string = string
def __str__(self):
return self.string
class SearchDialog(wx.Dialog):
def CreateACCoilSizer(self):
self.ac_coil_list = []
sizer = wx.BoxSizer(wx.HORIZONTAL)
bmp = wx.ArtProvider.GetBitmap(wx.ART_TIP, wx.ART_OTHER, (16, 16))
ico = wx.StaticBitmap(self.panel, wx.ID_ANY, bmp)
self.acCoilLabel = wx.StaticText(self.panel, wx.ID_ANY, 'AC Coil Type', size = (90, -1), style=wx.ALIGN_RIGHT)
self.acCoilBox = wx.CheckListBox(self.panel, -1, (50, 50))
session = scoped_session(sessionmaker(
autoflush=False,
autocommit=False,
bind=engine))
rows = session.query(ExpACCoilProp).order_by(ExpACCoilProp.id).all()
j = 0
for row in rows:
self.ac_coil_list.append(row.id)
self.acCoilBox.Insert(item = ('#%d - %.2fx%.2fx%.2f (%s) (%dturn of %.2fmm wire)' % \
(row.id, row.width, row.height, row.length, row.typeAsString, row.turn, row.wire_diameter)),
pos = j)
j += 1
sizer.Add(ico, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
sizer.Add(self.acCoilLabel, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
sizer.Add(self.acCoilBox, 1, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5)
session.close()
return sizer
def CreateSourcePowerSizer(self):
self.src_power_list = []
sizer = wx.BoxSizer(wx.HORIZONTAL)
bmp = wx.ArtProvider.GetBitmap(wx.ART_TIP, wx.ART_OTHER, (16, 16))
ico = wx.StaticBitmap(self.panel, wx.ID_ANY, bmp)
self.sourcePowerLabel = wx.StaticText(self.panel, wx.ID_ANY, 'Source Power', size = (90, -1), style=wx.ALIGN_RIGHT)
self.sourcePowerBox = wx.CheckListBox(self.panel, -1, (50, 50))
j = 0
for i in range(15,-16,-1):
self.src_power_list.append(i)
self.sourcePowerBox.Insert(item = ('%d dBm' % (i)), pos = j)
j += 1
sizer.Add(ico, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
sizer.Add(self.sourcePowerLabel, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
sizer.Add(self.sourcePowerBox, 1, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5)
return sizer
def CreateDCCurrentSizer(self):
self.dc_current_list = []
sizer = wx.BoxSizer(wx.HORIZONTAL)
bmp = wx.ArtProvider.GetBitmap(wx.ART_TIP, wx.ART_OTHER, (16, 16))
ico = wx.StaticBitmap(self.panel, wx.ID_ANY, bmp)
self.dcCurrentLabel = wx.StaticText(self.panel, wx.ID_ANY, 'DC Current', size = (90, -1), style=wx.ALIGN_RIGHT)
self.dcCurrentBox = wx.CheckListBox(self.panel, -1, (50, 50))
session = scoped_session(sessionmaker(
autoflush=False,
autocommit=False,
bind=engine))
rows = session.query(Exp.id, Exp.dc_current, label('count', func.count(Exp.dc_current))).group_by(Exp.dc_current).order_by(desc('count')).all()
j = 0
for row in rows:
self.dc_current_list.append(row.dc_current)
self.dcCurrentBox.Insert(item = ('%.2f A (%d results)' % (row.dc_current, row.count)), pos = j)
j += 1
sizer.Add(ico, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
sizer.Add(self.dcCurrentLabel, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5)
sizer.Add(self.dcCurrentBox, 1, wx.ALL|wx.EXPAND|wx.ALIGN_CENTER_VERTICAL, 5)
session.close()
return sizer
def __init__(self, parent):
#self.settings = TerminalSetup() #placeholder for the settings
self.parent = parent
wx.Dialog.__init__(self, None, wx.ID_ANY, title='Form')
# And indicate we don't have a worker thread yet
self.src_powers = []
self.ac_coils = []
self.dc_currents = []
# Add a panel so it looks correct on all platforms
self.panel = wx.Panel(self, wx.ID_ANY)
#bmp = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_OTHER, (16, 16))
#titleIco = wx.StaticBitmap(self.panel, wx.ID_ANY, bmp)
#title = wx.StaticText(self.panel, wx.ID_ANY, 'My Title')
okBtn = wx.Button(self.panel, wx.ID_OK, 'OK')
cancelBtn = wx.Button(self.panel, wx.ID_CANCEL, 'Cancel')
self.Bind(wx.EVT_BUTTON, self.OnStart, okBtn)
self.Bind(wx.EVT_BUTTON, self.onCancel, cancelBtn)
topSizer = wx.BoxSizer(wx.VERTICAL)
#titleSizer = wx.BoxSizer(wx.HORIZONTAL)
acCoilSizer = self.CreateACCoilSizer()
sourcePowerSizer = self.CreateSourcePowerSizer()
dcCurrentSizer = self.CreateDCCurrentSizer()
btnSizer = wx.BoxSizer(wx.HORIZONTAL)
#titleSizer.Add(titleIco, 0, wx.ALL, 5)
#titleSizer.Add(title, 0, wx.ALL, 5)
btnSizer.Add(okBtn, 0, wx.ALL, 5)
btnSizer.Add(cancelBtn, 0, wx.ALL, 5)
#topSizer.Add(titleSizer, 0, wx.CENTER)
#topSizer.Add(wx.StaticLine(self.panel,), 0, wx.ALL|wx.EXPAND, 5)
topSizer.Add(acCoilSizer, 0, wx.ALL|wx.EXPAND, 5)
topSizer.Add(sourcePowerSizer, 0, wx.ALL|wx.EXPAND, 5)
topSizer.Add(dcCurrentSizer, 0, wx.ALL|wx.EXPAND, 5)
topSizer.Add(wx.StaticLine(self.panel), 0, wx.ALL|wx.EXPAND, 5)
topSizer.Add(btnSizer, 0, wx.ALL|wx.CENTER, 5)
self.panel.SetSizer(topSizer)
topSizer.Fit(self)
self.Bind(wx.EVT_CHECKLISTBOX, self.OnSourcePowerListBox, self.sourcePowerBox)
self.Bind(wx.EVT_CHECKLISTBOX, self.OnACCoilBox, self.acCoilBox)
self.Bind(wx.EVT_CHECKLISTBOX, self.OnDCCurrentBox, self.dcCurrentBox)
#self.Bind(wx.EVT_BUTTON, self.OnStart, id=ID_START)
#if not self.worker.isAlive():
# self.Close()
def OnDCCurrentBox(self, event):
index = event.GetSelection()
if self.dcCurrentBox.IsChecked(index):
self.dc_currents.append(index)
else:
self.dc_currents.remove(index)
self.dcCurrentBox.SetSelection(index) # so that (un)checking also selects (moves the highlight)
def OnSourcePowerListBox(self, event):
index = event.GetSelection()
if self.sourcePowerBox.IsChecked(index):
self.src_powers.append(index)
else:
self.src_powers.remove(index)
self.sourcePowerBox.SetSelection(index) # so that (un)checking also selects (moves the highlight)
def OnACCoilBox(self, event):
index = event.GetSelection()
if self.acCoilBox.IsChecked(index):
self.ac_coils.append(index)
else:
self.ac_coils.remove(index)
self.acCoilBox.SetSelection(index) # so that (un)checking also selects (moves the highlight)
def OnStart(self, event):
"""Start Computation."""
'''
if len(self.src_powers) == 0:
dlg = wx.MessageDialog(None, "Select one VNA Source Power at least", "Invalid Value.", wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
return
if len(self.ac_coils) == 0:
dlg = wx.MessageDialog(None, "Select AC Coil", "Invalid Value.", wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
return
'''
self.EndModal(wx.ID_OK)
def onCancel(self, event):
self.Destroy()
# Run the program
if __name__ == '__main__':
engine.echo = True
app = wx.PySimpleApp()
frame = VNAInputFrame().Show()
app.MainLoop()
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
subscription_id: str,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_update_request_initial(
subscription_id: str,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str'),
"privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class PrivateEndpointConnectionsOperations(object):
"""PrivateEndpointConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.iothub.v2020_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> List["_models.PrivateEndpointConnection"]:
"""List private endpoint connections.
List private endpoint connection properties.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of PrivateEndpointConnection, or the result of cls(response)
:rtype: list[~azure.mgmt.iothub.v2020_03_01.models.PrivateEndpointConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.PrivateEndpointConnection"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[PrivateEndpointConnection]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> "_models.PrivateEndpointConnection":
"""Get private endpoint connection.
Get private endpoint connection properties.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection, or the result of cls(response)
:rtype: ~azure.mgmt.iothub.v2020_03_01.models.PrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
def _update_initial(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
private_endpoint_connection: "_models.PrivateEndpointConnection",
**kwargs: Any
) -> "_models.PrivateEndpointConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(private_endpoint_connection, 'PrivateEndpointConnection')
request = build_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
@distributed_trace
def begin_update(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
private_endpoint_connection: "_models.PrivateEndpointConnection",
**kwargs: Any
) -> LROPoller["_models.PrivateEndpointConnection"]:
"""Update private endpoint connection.
Update the status of a private endpoint connection with the specified name.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:param private_endpoint_connection: The private endpoint connection with updated properties.
:type private_endpoint_connection:
~azure.mgmt.iothub.v2020_03_01.models.PrivateEndpointConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.iothub.v2020_03_01.models.PrivateEndpointConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
private_endpoint_connection=private_endpoint_connection,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> Optional["_models.PrivateEndpointConnection"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PrivateEndpointConnection"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
resource_name: str,
private_endpoint_connection_name: str,
**kwargs: Any
) -> LROPoller["_models.PrivateEndpointConnection"]:
"""Delete private endpoint connection.
Delete private endpoint connection with the specified name.
:param resource_group_name: The name of the resource group that contains the IoT hub.
:type resource_group_name: str
:param resource_name: The name of the IoT hub.
:type resource_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.iothub.v2020_03_01.models.PrivateEndpointConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
private_endpoint_connection_name=private_endpoint_connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/iotHubs/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
| |
"""OAuth 2.0 WSGI server middleware implements support for basic bearer
tokens and also X.509 certificates as access tokens
OAuth 2.0 Authorisation Server
"""
__author__ = "R B Wilkinson"
__date__ = "12/12/11"
__copyright__ = "(C) 2011 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "Philip.Kershaw@stfc.ac.uk"
__revision__ = "$Id$"
import json
import logging
import httplib
import urllib
from ndg.oauth.server.lib.access_token.make_access_token import \
make_access_token
from ndg.oauth.server.lib.oauth.access_token import AccessTokenRequest
from ndg.oauth.server.lib.oauth.authorize import (AuthorizeRequest,
AuthorizeResponse)
from ndg.oauth.server.lib.oauth.oauth_exception import OauthException
from ndg.oauth.server.lib.register.access_token import AccessTokenRegister
from ndg.oauth.server.lib.register.authorization_grant import \
AuthorizationGrantRegister
log = logging.getLogger(__name__)
class AuthorizationServer(object):
"""
Provides the core OAuth 2.0 server functions.
"""
AUTHZ_HDR_ENV_KEYNAME = 'HTTP_AUTHORIZATION'
BEARER_TOK_ID = 'Bearer'
MAC_TOK_ID = 'MAC'
TOKEN_TYPES = (BEARER_TOK_ID, MAC_TOK_ID)
def __init__(self, client_register, authorizer, client_authenticator,
resource_register, resource_authenticator,
access_token_generator, config):
self.client_register = client_register
self.authorizer = authorizer
self.client_authenticator = client_authenticator
self.resource_register = resource_register
self.resource_authenticator = resource_authenticator
self.access_token_generator = access_token_generator
self.access_token_register = AccessTokenRegister(config)
self.authorization_grant_register = AuthorizationGrantRegister(config)
def authorize(self, request, client_authorized):
"""Handle an authorization request.
It is assumed that the caller has checked whether the user is
authenticated and that the user has authorised the client and scope.
Request query parameters (from
http://tools.ietf.org/html/draft-ietf-oauth-v2-22):
response_type
REQUIRED. Value MUST be set to "code".
client_id
REQUIRED. The client identifier as described in Section 2.2.
redirect_uri
OPTIONAL, as described in Section 3.1.2.
scope
OPTIONAL. The scope of the access request as described by
Section 3.3.
state
RECOMMENDED. An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent back
to the client. The parameter SHOULD be used for preventing
cross-site request forgery as described in Section 10.12.
Response:
application/x-www-form-urlencoded format:
code
REQUIRED. The authorization code generated by the
authorization server. The authorization code MUST expire
shortly after it is issued to mitigate the risk of leaks. A
maximum authorization code lifetime of 10 minutes is
RECOMMENDED. The client MUST NOT use the authorization code
more than once. If an authorization code is used more than
once, the authorization server MUST deny the request and SHOULD
attempt to revoke all tokens previously issued based on that
authorization code. The authorization code is bound to the
client identifier and redirection URI.
state
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
@type request: webob.Request
@param request: HTTP request object
@type client_authorized: bool
@param client_authorized: True if resource owner has authorized client
@rtype: tuple: (str, int, str)
@return: tuple (
redirect_uri
HTTP status if error
error description
)
"""
log.debug("Starting authorization request")
# Parameters should only be taken from the query string.
params = request.GET
auth_request = AuthorizeRequest(params.get('response_type', None),
params.get('client_id', None),
params.get('redirect_uri', None),
params.get('scope', None),
params.get('state', None))
try:
self.check_request(request, params, post_only=False)
# Check for required parameters.
required_parameters = ['response_type', 'client_id']
for param in required_parameters:
if param not in params:
log.error("Missing request parameter %s from params: %s",
param, params)
raise OauthException('invalid_request',
"Missing request parameter: %s" % param)
if not client_authorized:
raise OauthException('access_denied',
'User has declined authorization')
response_type = params.get('response_type', None)
if response_type != 'code':
raise OauthException('unsupported_response_type',
"Response type %s not supported" %
response_type)
client_error = self.client_register.is_valid_client(
auth_request.client_id,
auth_request.redirect_uri)
if client_error:
log.error("Invalid client: %s", client_error)
return (None, httplib.BAD_REQUEST, client_error)
# redirect_uri must be included in the request if the client has
# more than one registered.
client = self.client_register.register[auth_request.client_id]
if len(client.redirect_uris) != 1 and not auth_request.redirect_uri:
log.error("An authorization request has been made without a "
"return URI")
return (None,
httplib.BAD_REQUEST,
('An authorization request has been made without a '
'return URI.'))
# Preconditions satisfied - generate grant.
(grant, code) = self.authorizer.generate_authorization_grant(
auth_request,
request)
auth_response = AuthorizeResponse(code, auth_request.state)
if not self.authorization_grant_register.add_grant(grant):
log.error('Registering grant failed')
raise OauthException('server_error',
'Authorization grant could not be created')
except OauthException, exc:
log.error("Redirecting back after error: %s - %s",
exc.error, exc.error_description)
return self._redirect_after_authorize(auth_request, None, exc.error,
exc.error_description)
log.debug("Redirecting back after successful authorization.")
return self._redirect_after_authorize(auth_request, auth_response)
def _redirect_after_authorize(self,
auth_request,
auth_response=None,
error=None,
error_description=None):
"""Redirects to the redirect URI after the authorization process as
completed.
@type resp: ndg.oauth.server.lib.oauth.authorize.AuthorizeRequest
@param resp: OAuth authorize request
@type resp: ndg.oauth.server.lib.oauth.authorize.AuthorizeResponse
@param resp: OAuth authorize response
@type error: str
@param error: OAuth error
@type error_description: str
@param error_description: error description
"""
# Check for inconsistencies that should be reported directly to the user.
if not auth_response and not error:
error = 'server_error'
error_description = 'Internal server error'
# Get the redirect URI.
client = self.client_register.register[auth_request.client_id]
redirect_uri = (
auth_request.redirect_uri if auth_request.redirect_uri else \
client.redirect_uris[0]
)
if not redirect_uri:
return (
None,
httplib.BAD_REQUEST,
'An authorization request has been made without a return URI.')
# Redirect back to client with authorization code or error.
if error:
url_parameters = [('error', error),
('error_description', error_description)]
else:
url_parameters = [('code', auth_response.code)]
full_redirect_uri = self._make_combined_url(redirect_uri,
url_parameters,
auth_request.state)
log.debug("Redirecting to URI: %s", full_redirect_uri)
return(full_redirect_uri, None, None)
@staticmethod
def _make_combined_url(base_url, parameters, state):
"""Constructs a URL from a base URL and parameters to be included in a
query string.
@type base_url: str
@param base_url: base URL to which to add query parameters
@type parameters: dict
@param parameters: parameter names and values
@type state: str
@param state: OAuth state parameter value, which whould not be URL
encoded
@rtype: str
@return: full URL
"""
url = base_url.rstrip('?')
url_parts = [url]
sep_with_ampersand = ('?' in url)
if parameters:
query_string = urllib.urlencode(parameters)
url_parts.extend([('&' if (sep_with_ampersand) else '?'),
query_string])
sep_with_ampersand = True
if state:
url_parts.extend([('&' if (sep_with_ampersand) else '?'),
'state=',
state])
return ''.join(url_parts)
def access_token(self, request):
"""
Handles a request for an access token.
Request parameters in post data (from
http://tools.ietf.org/html/draft-ietf-oauth-v2-22):
The client makes a request to the token endpoint by adding the
following parameters using the "application/x-www-form-urlencoded"
format in the HTTP request entity-body:
grant_type
REQUIRED. Value MUST be set to "authorization_code".
code
REQUIRED. The authorization code received from the
authorization server.
redirect_uri
REQUIRED, if the "redirect_uri" parameter was included in the
authorization request as described in Section 4.1.1, and their
values MUST be identical.
Response:
application/json format:
access_token
access token
token_type
token type
expires_in
lifetime of token in seconds
refresh_token
@type request: webob.Request
@param request: HTTP request object
@rtype: tuple: (str, int, str)
@return: tuple (
OAuth JSON response
HTTP status if error
error description
)
"""
log.debug("Starting access token request")
try:
# Parameters should only be taken from the body, not the URL query
# string.
params = request.POST
self.check_request(request, params, post_only=True)
# Check that the client is authenticated as a registered client.
client_id = self.client_authenticator.authenticate(request)
if client_id is None:
log.warn('Client authentication not performed')
else:
log.debug("Client id: %s", client_id)
# redirect_uri is only required if it was included in the
# authorization request.
required_parameters = ['grant_type', 'code']
for param in required_parameters:
if param not in params:
log.error("Missing request parameter %s from inputs: %s",
param, params)
raise OauthException(
'invalid_request',
"Missing request parameter: %s" % param)
except OauthException, exc:
return (self._error_access_token_response(exc.error,
exc.error_description),
None, None)
token_request = AccessTokenRequest(params.get('grant_type', None),
params.get('code', None),
params.get('redirect_uri', None))
try:
response = make_access_token(
token_request, client_id, self.access_token_register,
self.access_token_generator, self.authorization_grant_register,
request)
except OauthException, exc:
return (self._error_access_token_response(exc.error,
exc.error_description),
None, None)
if response:
return self._access_token_response(response), None, None
else:
return (None, httplib.INTERNAL_SERVER_ERROR,
'Access token generation failed.')
def _access_token_response(self, resp):
"""Constructs the JSON response to an access token request.
@type resp: ndg.oauth.server.lib.oauth.access_token.AccessTokenResponse
@param resp: OAuth access token response
@rtype: str
@return JSON formatted response
"""
log.debug("Responding successfully with access token.")
content_dict = resp.get_as_dict()
content = json.dumps(content_dict)
return content
def _error_access_token_response(self, error, error_description):
"""Constructs an error JSON response to an access token request.
@type error: str
@param error: OAuth error
@type error_description: str
@param error_description: error description
@rtype: str
@return JSON formatted response
"""
log.error("Responding with error: %s - %s", error, error_description)
error_dict = {'error': error}
if error_description:
error_dict['error_description'] = error_description
error_content = json.dumps(error_dict)
return error_content
def check_request(self, request, params, post_only=False):
"""
Checks that the request is valid in the following respects:
o Must be over HTTPS.
o Optionally, must use the POST method.
o Parameters must not be repeated.
If the request is directly from the client, the user must be
authenticated - it is assumed that the caller has checked this.
Raises OauthException if any check fails.
@type request: webob.Request
@param request: HTTP request object
@type params: dict
@param params: request parameters
@type post_only: bool
@param post_only: True if the HTTP method must be POST, otherwise False
"""
if request.scheme != 'https':
raise OauthException('invalid_request',
'Transport layer security must be used for '
'this request.')
if post_only and (request.method != 'POST'):
raise OauthException('invalid_request',
'HTTP POST method must be used for this '
'request.')
# Check for duplicate parameters.
param_counts = {}
for key in params.iterkeys():
count = param_counts.get(key, 0)
param_counts[key] = count + 1
for key, count in param_counts.iteritems():
if count > 1:
raise OauthException('invalid_request',
'Parameter "%s" is repeated.' % key)
return
def check_token(self, request, scope=None):
"""
Simple service that could be used to validate bearer tokens. It would
be called from a resource service that trusts this authorization
service. This is not part of the OAuth specification.
Request parameters
access_token
REQUIRED. Bearer token
scope
OPTIONAL. Scope
Response:
application/json format:
status
HTTP status indicating the access control decision
error
error as described in
http://tools.ietf.org/html/draft-ietf-oauth-v2-22#section-5.2
@type request: webob.Request
@param request: HTTP request object
@type scope: str
@param scope: required scope
@rtype: tuple: (str, int, str)
@return: tuple (
OAuth JSON response
HTTP status
error description
)
"""
params = request.params
# Check that the client is authenticated as a registered client.
resource_id = self.resource_authenticator.authenticate(request)
if resource_id is None:
log.warn('Resource authentication not performed')
else:
log.debug("Resource id: %s", resource_id)
# Retrieve access token
if 'access_token' not in params:
error = 'invalid_request'
else:
access_token = params['access_token']
if scope:
required_scope = scope
else:
required_scope = params.get('scope', None)
token, error = self.access_token_register.get_token(access_token,
required_scope)
# Formulate response
status = {'invalid_request': httplib.BAD_REQUEST,
'invalid_token': httplib.FORBIDDEN,
None: httplib.OK}.get(error, httplib.BAD_REQUEST)
content_dict = {'status': status}
if error:
content_dict['error'] = error
else:
# TODO only get additional data when resource is allowed to
content_dict['user_name'] = token.grant.additional_data.get('user_identifier')
content = json.dumps(content_dict)
return (content, status, error)
def get_registered_token(self, request, scope=None):
"""
Checks that a token in the request is valid. It would
be called from a resource service that trusts this authorization
service.
Request parameters:
set in Authorization header (OAuth spec., Section 7.1 Access
Token Types
token type: Bearer or MAC
access token: access token to obtain access
Response:
application/json format:
status
HTTP status indicating the access control decision
error
error as described in
http://tools.ietf.org/html/draft-ietf-oauth-v2-22#section-5.2
@type request: webob.Request
@param request: HTTP request object
@type scope: str
@param scope: required scope
@rtype: tuple: (str, int, str)
@return: tuple (
access token
HTTP status
error description
)
"""
authorization_hdr = request.environ.get(
self.__class__.AUTHZ_HDR_ENV_KEYNAME)
if authorization_hdr is None:
log.error('No Authorization header present for request to %r',
request.path_url)
error = 'invalid_request'
token = None
else:
authorization_hdr_parts = authorization_hdr.split()
if len(authorization_hdr_parts) < 2:
log.error('Expecting at least two Authorization header '
'elements for request to %r; '
'header is: %r', request.path_url, authorization_hdr)
error = 'invalid_request'
token_type, access_token = authorization_hdr_parts[:2]
# Currently only supports bearer type tokens
if token_type != self.__class__.BEARER_TOK_ID:
log.error('Token type retrieved is %r, expecting "Bearer" '
'type for request to %r', token_type)
error = 'invalid_request'
else:
token, error = self.access_token_register.get_token(
access_token,
scope)
status = {'invalid_request': httplib.BAD_REQUEST,
'invalid_token': httplib.FORBIDDEN,
'insufficient_scope': httplib.FORBIDDEN,
None: httplib.OK}.get(error, httplib.BAD_REQUEST)
return token, status, error
def is_registered_client(self, request):
"""Determines whether the client ID in the request is registered.
@type request: WebOb.request
@param request: request
@rtype: tuple (basestring, basestring) or (NoneType, NoneType)
@return: (error, error description) or None if client ID is found and
registered
"""
client_id = request.params.get('client_id', None)
if not client_id:
return 'invalid_request', 'Missing request parameter: client_id'
else:
error_description = self.client_register.is_registered_client(
client_id)
if error_description:
return 'unauthorized_client', error_description
return None, None
| |
from __future__ import absolute_import, print_function
import hashlib
import logging
import threading
import weakref
from django.conf import settings
from django.db import router
from django.db.models import Manager, Model
from django.db.models.signals import (
post_save, post_delete, post_init, class_prepared)
from django.utils.encoding import smart_str
import six
from cobra.core.cache import cache
from .query import create_or_update
__all__ = ('BaseManager',)
logger = logging.getLogger('cobra.errors')
class ImmutableDict(dict):
def __setitem__(self, key, value):
raise TypeError
def __delitem__(self, key):
raise TypeError
UNSAVED = ImmutableDict()
def __prep_value(model, key, value):
if isinstance(value, Model):
value = value.pk
else:
value = six.text_type(value)
return value
def __prep_key(model, key):
if key == 'pk':
return model._meta.pk.name
return key
def make_key(model, prefix, kwargs):
kwargs_bits = []
for k, v in sorted(kwargs.iteritems()):
k = __prep_key(model, k)
v = smart_str(__prep_value(model, k, v))
kwargs_bits.append('%s=%s' % (k, v))
kwargs_bits = ':'.join(kwargs_bits)
return '%s:%s:%s' % (prefix, model.__name__, hashlib.md5(kwargs_bits).hexdigest())
class BaseManager(Manager):
lookup_handlers = {
'iexact': lambda x: x.upper(),
}
use_for_related_fields = True
def __init__(self, *args, **kwargs):
self.cache_fields = kwargs.pop('cache_fields', [])
self.cache_ttl = kwargs.pop('cache_ttl', 60 * 5)
self.__local_cache = threading.local()
super(BaseManager, self).__init__(*args, **kwargs)
def _get_cache(self):
if not hasattr(self.__local_cache, 'value'):
self.__local_cache.value = weakref.WeakKeyDictionary()
return self.__local_cache.value
def _set_cache(self, value):
self.__local_cache.value = value
__cache = property(_get_cache, _set_cache)
def __getstate__(self):
d = self.__dict__.copy()
# we cant serialize weakrefs
d.pop('_BaseManager__cache', None)
d.pop('_BaseManager__local_cache', None)
return d
def __setstate__(self, state):
self.__dict__.update(state)
self.__local_cache = weakref.WeakKeyDictionary()
def __class_prepared(self, sender, **kwargs):
"""
Given the cache is configured, connects the required signals for invalidation.
"""
post_save.connect(self.post_save, sender=sender, weak=False)
post_delete.connect(self.post_delete, sender=sender, weak=False)
if not self.cache_fields:
return
post_init.connect(self.__post_init, sender=sender, weak=False)
post_save.connect(self.__post_save, sender=sender, weak=False)
post_delete.connect(self.__post_delete, sender=sender, weak=False)
def __cache_state(self, instance):
"""
Updates the tracked state of an instance.
"""
if instance.pk:
self.__cache[instance] = dict((f, getattr(instance, f)) for f in self.cache_fields)
else:
self.__cache[instance] = UNSAVED
def __post_init(self, instance, **kwargs):
"""
Stores the initial state of an instance.
"""
self.__cache_state(instance)
def __post_save(self, instance, **kwargs):
"""
Pushes changes to an instance into the cache, and removes invalid (changed)
lookup values.
"""
pk_name = instance._meta.pk.name
pk_names = ('pk', pk_name)
pk_val = instance.pk
for key in self.cache_fields:
if key in pk_names:
continue
# store pointers
cache.set(self.__get_lookup_cache_key(**{key: getattr(instance, key)}), pk_val, self.cache_ttl) # 1 hour
# Ensure we don't serialize the database into the cache
db = instance._state.db
instance._state.db = None
# store actual object
try:
cache.set(self.__get_lookup_cache_key(**{pk_name: pk_val}), instance, self.cache_ttl)
except Exception as e:
logger.error(e, exc_info=True)
instance._state.db = db
# Kill off any keys which are no longer valid
if instance in self.__cache:
for key in self.cache_fields:
if key not in self.__cache[instance]:
continue
value = self.__cache[instance][key]
if value != getattr(instance, key):
cache.delete(self.__get_lookup_cache_key(**{key: value}))
self.__cache_state(instance)
def __post_delete(self, instance, **kwargs):
"""
Drops instance from all cache storages.
"""
pk_name = instance._meta.pk.name
for key in self.cache_fields:
if key in ('pk', pk_name):
continue
# remove pointers
cache.delete(self.__get_lookup_cache_key(**{key: getattr(instance, key)}))
# remove actual object
cache.delete(self.__get_lookup_cache_key(**{pk_name: instance.pk}))
def __get_lookup_cache_key(self, **kwargs):
return make_key(self.model, 'modelcache', kwargs)
def contribute_to_class(self, model, name):
super(BaseManager, self).contribute_to_class(model, name)
class_prepared.connect(self.__class_prepared, sender=model)
def get_from_cache(self, **kwargs):
"""
Wrapper around QuerySet.get which supports caching of the
intermediate value. Callee is responsible for making sure
the cache key is cleared on save.
"""
if not self.cache_fields or len(kwargs) > 1:
return self.get(**kwargs)
key, value = kwargs.items()[0]
pk_name = self.model._meta.pk.name
if key == 'pk':
key = pk_name
# Kill __exact since it's the default behavior
if key.endswith('__exact'):
key = key.split('__exact', 1)[0]
if key in self.cache_fields or key == pk_name:
cache_key = self.__get_lookup_cache_key(**{key: value})
retval = cache.get(cache_key)
if retval is None:
result = self.get(**kwargs)
# Ensure we're pushing it into the cache
self.__post_save(instance=result)
return result
# If we didn't look up by pk we need to hit the reffed
# key
if key != pk_name:
return self.get_from_cache(**{pk_name: retval})
if type(retval) != self.model:
if settings.DEBUG:
raise ValueError('Unexpected value type returned from cache')
logger.error('Cache response returned invalid value %r', retval)
return self.get(**kwargs)
if key == pk_name and int(value) != retval.pk:
if settings.DEBUG:
raise ValueError('Unexpected value returned from cache')
logger.error('Cache response returned invalid value %r', retval)
return self.get(**kwargs)
retval._state.db = router.db_for_read(self.model, **kwargs)
return retval
else:
return self.get(**kwargs)
def create_or_update(self, **kwargs):
return create_or_update(self.model, **kwargs)
def bind_nodes(self, object_list, *node_names):
from cobra import singleton
object_node_list = []
for name in node_names:
object_node_list.extend((getattr(i, name) for i in object_list if getattr(i, name).id))
node_ids = [n.id for n in object_node_list]
if not node_ids:
return
node_results = singleton.nodestore.get_multi(node_ids)
for node in object_node_list:
node.bind_data(node_results.get(node.id) or {})
def uncache_object(self, instance_id):
pk_name = self.model._meta.pk.name
cache_key = self.__get_lookup_cache_key(**{pk_name: instance_id})
cache.delete(cache_key)
def post_save(self, instance, **kwargs):
"""
Triggered when a model bound to this manager is saved.
"""
def post_delete(self, instance, **kwargs):
"""
Triggered when a model bound to this manager is deleted.
"""
| |
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
###
### Codes from neutron wsgi
###
import logging
from xml.etree import ElementTree as etree
from xml.parsers import expat
from neutronclient.common import constants
from neutronclient.common import exceptions as exception
from neutronclient.openstack.common.gettextutils import _
from neutronclient.openstack.common import jsonutils
LOG = logging.getLogger(__name__)
class ActionDispatcher(object):
"""Maps method name to local methods through action name."""
def dispatch(self, *args, **kwargs):
"""Find and call local method."""
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class DictSerializer(ActionDispatcher):
"""Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
"""Default JSON request body serialization."""
def default(self, data):
def sanitizer(obj):
return unicode(obj)
return jsonutils.dumps(data, default=sanitizer)
class XMLDictSerializer(DictSerializer):
def __init__(self, metadata=None, xmlns=None):
"""XMLDictSerializer constructor.
:param metadata: information needed to deserialize xml into
a dictionary.
:param xmlns: XML namespace to include with serialized xml
"""
super(XMLDictSerializer, self).__init__()
self.metadata = metadata or {}
if not xmlns:
xmlns = self.metadata.get('xmlns')
if not xmlns:
xmlns = constants.XML_NS_V20
self.xmlns = xmlns
def default(self, data):
"""Default serializer of XMLDictSerializer.
:param data: expect data to contain a single key as XML root, or
contain another '*_links' key as atom links. Other
case will use 'VIRTUAL_ROOT_KEY' as XML root.
"""
try:
links = None
has_atom = False
if data is None:
root_key = constants.VIRTUAL_ROOT_KEY
root_value = None
else:
link_keys = [k for k in data.iterkeys() or []
if k.endswith('_links')]
if link_keys:
links = data.pop(link_keys[0], None)
has_atom = True
root_key = (len(data) == 1 and
data.keys()[0] or constants.VIRTUAL_ROOT_KEY)
root_value = data.get(root_key, data)
doc = etree.Element("_temp_root")
used_prefixes = []
self._to_xml_node(doc, self.metadata, root_key,
root_value, used_prefixes)
if links:
self._create_link_nodes(list(doc)[0], links)
return self.to_xml_string(list(doc)[0], used_prefixes, has_atom)
except AttributeError as e:
LOG.exception(str(e))
return ''
def __call__(self, data):
# Provides a migration path to a cleaner WSGI layer, this
# "default" stuff and extreme extensibility isn't being used
# like originally intended
return self.default(data)
def to_xml_string(self, node, used_prefixes, has_atom=False):
self._add_xmlns(node, used_prefixes, has_atom)
return etree.tostring(node, encoding='UTF-8')
#NOTE (ameade): the has_atom should be removed after all of the
# xml serializers and view builders have been updated to the current
# spec that required all responses include the xmlns:atom, the has_atom
# flag is to prevent current tests from breaking
def _add_xmlns(self, node, used_prefixes, has_atom=False):
node.set('xmlns', self.xmlns)
node.set(constants.TYPE_XMLNS, self.xmlns)
if has_atom:
node.set(constants.ATOM_XMLNS, constants.ATOM_NAMESPACE)
node.set(constants.XSI_NIL_ATTR, constants.XSI_NAMESPACE)
ext_ns = self.metadata.get(constants.EXT_NS, {})
for prefix in used_prefixes:
if prefix in ext_ns:
node.set('xmlns:' + prefix, ext_ns[prefix])
def _to_xml_node(self, parent, metadata, nodename, data, used_prefixes):
"""Recursive method to convert data members to XML nodes."""
result = etree.SubElement(parent, nodename)
if ":" in nodename:
used_prefixes.append(nodename.split(":", 1)[0])
#TODO(bcwaldon): accomplish this without a type-check
if isinstance(data, list):
if not data:
result.set(
constants.TYPE_ATTR,
constants.TYPE_LIST)
return result
singular = metadata.get('plurals', {}).get(nodename, None)
if singular is None:
if nodename.endswith('s'):
singular = nodename[:-1]
else:
singular = 'item'
for item in data:
self._to_xml_node(result, metadata, singular, item,
used_prefixes)
#TODO(bcwaldon): accomplish this without a type-check
elif isinstance(data, dict):
if not data:
result.set(
constants.TYPE_ATTR,
constants.TYPE_DICT)
return result
attrs = metadata.get('attributes', {}).get(nodename, {})
for k, v in sorted(data.items()):
if k in attrs:
result.set(k, str(v))
else:
self._to_xml_node(result, metadata, k, v,
used_prefixes)
elif data is None:
result.set(constants.XSI_ATTR, 'true')
else:
if isinstance(data, bool):
result.set(
constants.TYPE_ATTR,
constants.TYPE_BOOL)
elif isinstance(data, int):
result.set(
constants.TYPE_ATTR,
constants.TYPE_INT)
elif isinstance(data, long):
result.set(
constants.TYPE_ATTR,
constants.TYPE_LONG)
elif isinstance(data, float):
result.set(
constants.TYPE_ATTR,
constants.TYPE_FLOAT)
LOG.debug(_("Data %(data)s type is %(type)s"),
{'data': data,
'type': type(data)})
if isinstance(data, str):
result.text = unicode(data, 'utf-8')
else:
result.text = unicode(data)
return result
def _create_link_nodes(self, xml_doc, links):
for link in links:
link_node = etree.SubElement(xml_doc, 'atom:link')
link_node.set('rel', link['rel'])
link_node.set('href', link['href'])
class TextDeserializer(ActionDispatcher):
"""Default request body deserialization."""
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("Cannot understand JSON")
raise exception.MalformedRequestBody(reason=msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class XMLDeserializer(TextDeserializer):
def __init__(self, metadata=None):
"""XMLDeserializer constructor.
:param metadata: information needed to deserialize xml into
a dictionary.
"""
super(XMLDeserializer, self).__init__()
self.metadata = metadata or {}
xmlns = self.metadata.get('xmlns')
if not xmlns:
xmlns = constants.XML_NS_V20
self.xmlns = xmlns
def _get_key(self, tag):
tags = tag.split("}", 1)
if len(tags) == 2:
ns = tags[0][1:]
bare_tag = tags[1]
ext_ns = self.metadata.get(constants.EXT_NS, {})
if ns == self.xmlns:
return bare_tag
for prefix, _ns in ext_ns.items():
if ns == _ns:
return prefix + ":" + bare_tag
else:
return tag
def _get_links(self, root_tag, node):
link_nodes = node.findall(constants.ATOM_LINK_NOTATION)
root_tag = self._get_key(node.tag)
link_key = "%s_links" % root_tag
link_list = []
for link in link_nodes:
link_list.append({'rel': link.get('rel'),
'href': link.get('href')})
# Remove link node in order to avoid link node being
# processed as an item in _from_xml_node
node.remove(link)
return link_list and {link_key: link_list} or {}
def _from_xml(self, datastring):
if datastring is None:
return None
plurals = set(self.metadata.get('plurals', {}))
try:
node = etree.fromstring(datastring)
root_tag = self._get_key(node.tag)
links = self._get_links(root_tag, node)
result = self._from_xml_node(node, plurals)
# There is no case where root_tag = constants.VIRTUAL_ROOT_KEY
# and links is not None because of the way data are serialized
if root_tag == constants.VIRTUAL_ROOT_KEY:
return result
return dict({root_tag: result}, **links)
except Exception as e:
parseError = False
# Python2.7
if (hasattr(etree, 'ParseError') and
isinstance(e, getattr(etree, 'ParseError'))):
parseError = True
# Python2.6
elif isinstance(e, expat.ExpatError):
parseError = True
if parseError:
msg = _("Cannot understand XML")
raise exception.MalformedRequestBody(reason=msg)
else:
raise
def _from_xml_node(self, node, listnames):
"""Convert a minidom node to a simple Python type.
:param listnames: list of XML node names whose subnodes should
be considered list items.
"""
attrNil = node.get(str(etree.QName(constants.XSI_NAMESPACE, "nil")))
attrType = node.get(str(etree.QName(
self.metadata.get('xmlns'), "type")))
if (attrNil and attrNil.lower() == 'true'):
return None
elif not len(node) and not node.text:
if (attrType and attrType == constants.TYPE_DICT):
return {}
elif (attrType and attrType == constants.TYPE_LIST):
return []
else:
return ''
elif (len(node) == 0 and node.text):
converters = {constants.TYPE_BOOL:
lambda x: x.lower() == 'true',
constants.TYPE_INT:
lambda x: int(x),
constants.TYPE_LONG:
lambda x: long(x),
constants.TYPE_FLOAT:
lambda x: float(x)}
if attrType and attrType in converters:
return converters[attrType](node.text)
else:
return node.text
elif self._get_key(node.tag) in listnames:
return [self._from_xml_node(n, listnames) for n in node]
else:
result = dict()
for attr in node.keys():
if (attr == 'xmlns' or
attr.startswith('xmlns:') or
attr == constants.XSI_ATTR or
attr == constants.TYPE_ATTR):
continue
result[self._get_key(attr)] = node.get(attr)
children = list(node)
for child in children:
result[self._get_key(child.tag)] = self._from_xml_node(
child, listnames)
return result
def default(self, datastring):
return {'body': self._from_xml(datastring)}
def __call__(self, datastring):
# Adding a migration path to allow us to remove unncessary classes
return self.default(datastring)
# NOTE(maru): this class is duplicated from neutron.wsgi
class Serializer(object):
"""Serializes and deserializes dictionaries to certain MIME types."""
def __init__(self, metadata=None, default_xmlns=None):
"""Create a serializer based on the given WSGI environment.
'metadata' is an optional dict mapping MIME types to information
needed to serialize a dictionary to that type.
"""
self.metadata = metadata or {}
self.default_xmlns = default_xmlns
def _get_serialize_handler(self, content_type):
handlers = {
'application/json': JSONDictSerializer(),
'application/xml': XMLDictSerializer(self.metadata),
}
try:
return handlers[content_type]
except Exception:
raise exception.InvalidContentType(content_type=content_type)
def serialize(self, data, content_type):
"""Serialize a dictionary into the specified content type."""
return self._get_serialize_handler(content_type).serialize(data)
def deserialize(self, datastring, content_type):
"""Deserialize a string to a dictionary.
The string must be in the format of a supported MIME type.
"""
return self.get_deserialize_handler(content_type).deserialize(
datastring)
def get_deserialize_handler(self, content_type):
handlers = {
'application/json': JSONDeserializer(),
'application/xml': XMLDeserializer(self.metadata),
}
try:
return handlers[content_type]
except Exception:
raise exception.InvalidContentType(content_type=content_type)
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2013, Stephen Finucane
# Author: Stephen Finucane <stephenfinucane@hotmail.com>
""" handlers.py: Handlers for all URIs in application """
from collections import OrderedDict
import json
import urllib, urllib2
import webapp2
import datasources.common as common
import datasources.course as course
import datasources.geolocation as geolocation
import datasources.scheduling as scheduling
import datasources.staff as staff
from datetime import datetime
class BaseHandler(webapp2.RequestHandler):
""" Base Handler """
def validate_parameters(self, additional_params):
"""
Validates that the <q> and <key> parameters, along with any other required
params, are included in the request.
@param params: A list of required params other than <q> and <key>
@type params: List of Strings
@return True if validation succesful, else False
"""
#TODO: validate key
missing_params = []
params = ['q'] + additional_params
for param in params:
if not self.request.get(param):
error = OrderedDict()
error['error'] = 'missingParam'
error['details'] = param
missing_params.append(error)
if missing_params:
#Generate an error response
self.generate_error_response('Missing parameters', missing_params)
return False
return True
def generate_response(self, data):
"""
Generates a suitable json response to present to the end user
@param data: The actual data
@param data: List
"""
rfc3339_ts = datetime.utcnow().isoformat("T") + "Z"
response_data = OrderedDict([
('api_version', common.API_VERSION),
('data_created', rfc3339_ts),
('data', data),
])
response = OrderedDict()
response['response'] = response_data
self.response.headers['Content-Type'] = 'application/json'
self.response.write(json.dumps(response))
def generate_error_response(self, error_message, errors=None):
"""
Generates a suitable json error response to present to the end user
@param error_message: The error message to show to the end user
@type data_type: String
"""
if not errors:
error = OrderedDict()
error['error'] = 'generalError'
error['details'] = error_message
errors = [error]
error_data = OrderedDict([
('message', error_message),
('errors', errors)
])
error = OrderedDict()
error['error'] = error_data
self.response.headers['Content-Type'] = 'application/json'
self.response.write(json.dumps(error))
class DocumentationHandler(webapp2.RequestHandler):
""" Documentation Services """
def get(self):
#Parse the URI
self.response.write("Documentation")
'''
Handles the following services:
Scheduling:
calendar Academic Calendar
timetable Semester Timetables
exam_timetable Exam Timetables
'''
class SchedulingHandler(BaseHandler):
""" Scheduling Services """
def get(self):
if not self.validate_parameters([]):
return #Quit, since parameters are invalid
#Parse the parameters from the URL
query = self.request.get('q')
try:
result = {
'/api/v1/timetable' : scheduling.semester_timetable,
'/api/v1/calendar' : scheduling.calendar,
}.get(self.request.path)(query)
if not result:
self.generate_error_response('An Error Occurred')
self.generate_response(result)
except urllib2.URLError as ex:
self.generate_error_response('An Error Occured Contacting the Site')
except IOError as ex:
self.generate_error_response('An Error Occured Contacting the Site')
except Exception as ex:
self.generate_error_response('An Unknown Error Occured')
'''
Handles the following services:
Geolocation:
building List all buildings on campus or parse a building code
room Parse a room code
'''
class GeolocationHandler(BaseHandler):
""" Geolocation Services """
def get(self):
if not self.validate_parameters([]):
return #Quit, since parameters are invalid
#Parse the parameters from the URL
query = self.request.get('q')
result = {
'/api/v1/building' : geolocation.building,
'/api/v1/room' : geolocation.room,
}.get(self.request.path)(query)
if not result:
self.generate_error_response('An Error Occurred')
self.generate_response(result)
'''
Handles the following services:
Course:
course Provides a brief overview of a course
module Provides a brief overview of a module
'''
class CourseHandler(BaseHandler):
""" Course Services """
def get(self):
if not self.validate_parameters([]):
return #Quit, since parameters are invalid
#Parse the parameters from the URL
query = self.request.get('q')
result = {
'/api/v1/course' : course.course,
'/api/v1/module' : course.module,
}.get(self.request.path)(query)
if not result:
self.generate_error_response('An Error Occurred')
self.generate_response(result)
'''
Handles the following services:
Staff:
staff Provides a brief overview of a staff member
'''
class StaffHandler(BaseHandler):
""" Staff Services """
def get(self):
if not self.validate_parameters([]):
return #Quit, since parameters are invalid
#Parse the parameters from the URL
query = self.request.get('q')
#Since name (query) should have been received in format "<first>,<last>",
#we need to split it into two strings
query = query.split(',')
result = {
'/api/v1/staff' : staff.staff,
}.get(self.request.path)(query[0], query[1])
if not result:
self.generate_error_response('An Error Occurred')
self.generate_response(result)
| |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for scm.py."""
import logging
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support import fake_repos
from testing_support.super_mox import SuperMoxTestBase
import scm
import subprocess2
# Access to a protected member XXX of a client class
# pylint: disable=W0212
class BaseTestCase(SuperMoxTestBase):
# Like unittest's assertRaises, but checks for Gclient.Error.
def assertRaisesError(self, msg, fn, *args, **kwargs):
try:
fn(*args, **kwargs)
except scm.gclient_utils.Error, e:
self.assertEquals(e.args[0], msg)
else:
self.fail('%s not raised' % msg)
class BaseSCMTestCase(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.mox.StubOutWithMock(scm.gclient_utils, 'CheckCallAndFilter')
self.mox.StubOutWithMock(scm.gclient_utils, 'CheckCallAndFilterAndHeader')
self.mox.StubOutWithMock(subprocess2, 'Popen')
self.mox.StubOutWithMock(subprocess2, 'communicate')
class RootTestCase(BaseSCMTestCase):
def testMembersChanged(self):
self.mox.ReplayAll()
members = [
'cStringIO',
'determine_scm',
'ElementTree',
'gclient_utils',
'GenFakeDiff',
'GetCasedPath',
'GIT',
'glob',
'logging',
'only_int',
'os',
're',
'subprocess2',
'SVN',
'sys',
'tempfile',
'time',
'ValidateEmail',
]
# If this test fails, you should add the relevant test.
self.compareMembers(scm, members)
class GitWrapperTestCase(BaseSCMTestCase):
def testMembersChanged(self):
members = [
'AssertVersion',
'Capture',
'CaptureStatus',
'current_version',
'FetchUpstreamTuple',
'GenerateDiff',
'GetBlessedSha1ForSvnRev',
'GetBranch',
'GetBranchRef',
'GetCheckoutRoot',
'GetDifferentFiles',
'GetEmail',
'GetGitSvnHeadRev',
'GetPatchName',
'GetSha1ForSvnRev',
'GetSVNBranch',
'GetUpstreamBranch',
'IsGitSvn',
'IsValidRevision',
'MatchSvnGlob',
'ParseGitSvnSha1',
'ShortBranchName',
]
# If this test fails, you should add the relevant test.
self.compareMembers(scm.GIT, members)
def testGetEmail(self):
self.mox.StubOutWithMock(scm.GIT, 'Capture')
scm.GIT.Capture(['config', 'user.email'], cwd=self.root_dir
).AndReturn('mini@me.com')
self.mox.ReplayAll()
self.assertEqual(scm.GIT.GetEmail(self.root_dir), 'mini@me.com')
def testMatchSvnGlob(self):
self.assertEquals(scm.GIT.MatchSvnGlob(
'svn://svn.chromium.org/chrome/trunk/src',
'svn://svn.chromium.org/chrome',
'trunk/src:refs/remotes/origin/trunk',
False), 'refs/remotes/origin/trunk')
self.assertEquals(scm.GIT.MatchSvnGlob(
'https://v8.googlecode.com/svn/branches/bleeding_edge',
'https://v8.googlecode.com/svn',
'branches/*:refs/remotes/*',
True), 'refs/remotes/bleeding_edge')
class RealGitTest(fake_repos.FakeReposTestBase):
def setUp(self):
super(RealGitTest, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_git()
if self.enabled:
self.clone_dir = scm.os.path.join(self.FAKE_REPOS.git_root, 'repo_1')
def testIsValidRevision(self):
if not self.enabled:
return
# Sha1's are [0-9a-z]{32}, so starting with a 'z' or 'r' should always fail.
self.assertFalse(scm.GIT.IsValidRevision(cwd=self.clone_dir, rev='zebra'))
self.assertFalse(scm.GIT.IsValidRevision(cwd=self.clone_dir, rev='r123456'))
# Valid cases
first_rev = self.githash('repo_1', 1)
self.assertTrue(scm.GIT.IsValidRevision(cwd=self.clone_dir, rev=first_rev))
self.assertTrue(scm.GIT.IsValidRevision(cwd=self.clone_dir, rev='HEAD'))
class RealGitSvnTest(fake_repos.FakeReposTestBase):
def setUp(self):
super(RealGitSvnTest, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_git() and self.FAKE_REPOS.set_up_svn()
if self.enabled:
self.tree_name = 'git-svn'
self.svn_url = scm.os.path.join(self.FAKE_REPOS.svn_base, 'trunk')
self.clone_dir = scm.os.path.join(self.FAKE_REPOS.git_root,
self.tree_name)
scm.os.makedirs(self.clone_dir)
self._capture(['svn', 'clone', '-q', '-q', self.svn_url, self.clone_dir])
# git rev-list gives revisions in reverse chronological order.
hashes = reversed(self._capture(['rev-list', 'HEAD']).splitlines())
# We insert a null value at 0 to do 1-based indexing, not 0-based, as SVN
# revisions are 1-based (i.e. they start at r1, not r0).
self.git_hashes = ([None] + list(hashes))
def tearDown(self):
scm.gclient_utils.rmtree(self.clone_dir)
def _capture(self, cmd, **kwargs):
kwargs.setdefault('cwd', self.clone_dir)
return scm.GIT.Capture(cmd, **kwargs)
def testGetGitSvnHeadRev(self):
if not self.enabled:
return
self.assertEquals(scm.GIT.GetGitSvnHeadRev(cwd=self.clone_dir), 2)
self._capture(['reset', '--hard', 'HEAD^'])
self.assertEquals(scm.GIT.GetGitSvnHeadRev(cwd=self.clone_dir), 1)
def testParseGitSvnSha1(self):
test_sha1 = 'a5c63ce8671922e5c59c0dea49ef4f9d4a3020c9'
expected_output = test_sha1 + '\n'
# Cygwin git-svn 1.7.9 prints extra escape sequences when run under
# TERM=xterm
cygwin_output = test_sha1 + '\n\033[?1034h'
self.assertEquals(scm.GIT.ParseGitSvnSha1(expected_output), test_sha1)
self.assertEquals(scm.GIT.ParseGitSvnSha1(cygwin_output), test_sha1)
def testGetGetSha1ForSvnRev(self):
if not self.enabled:
return
self.assertEquals(scm.GIT.GetSha1ForSvnRev(cwd=self.clone_dir, rev=1),
self.git_hashes[1])
self.assertEquals(scm.GIT.GetSha1ForSvnRev(cwd=self.clone_dir, rev=2),
self.git_hashes[2])
class SVNTestCase(BaseSCMTestCase):
def setUp(self):
BaseSCMTestCase.setUp(self)
self.mox.StubOutWithMock(scm.SVN, 'Capture')
self.url = self.SvnUrl()
def testMembersChanged(self):
self.mox.ReplayAll()
members = [
'AssertVersion',
'Capture',
'CaptureLocalInfo',
'CaptureRemoteInfo',
'CaptureRevision',
'CaptureStatus',
'current_version',
'DiffItem',
'GenerateDiff',
'GetCheckoutRoot',
'GetEmail',
'GetFileProperty',
'IsMoved',
'IsMovedInfo',
'IsValidRevision',
'ReadSimpleAuth',
'Revert',
'RunAndGetFileList',
]
# If this test fails, you should add the relevant test.
self.compareMembers(scm.SVN, members)
def testGetCheckoutRoot(self):
# pylint: disable=E1103
self.mox.StubOutWithMock(scm.SVN, '_CaptureInfo')
self.mox.StubOutWithMock(scm, 'GetCasedPath')
scm.os.path.abspath = lambda x: x
scm.GetCasedPath = lambda x: x
scm.SVN._CaptureInfo([], self.root_dir + '/foo/bar').AndReturn({
'Repository Root': 'svn://svn.chromium.org/chrome',
'URL': 'svn://svn.chromium.org/chrome/trunk/src',
})
scm.SVN._CaptureInfo([], self.root_dir + '/foo').AndReturn({
'Repository Root': 'svn://svn.chromium.org/chrome',
'URL': 'svn://svn.chromium.org/chrome/trunk',
})
scm.SVN._CaptureInfo([], self.root_dir).AndReturn({
'Repository Root': 'svn://svn.chromium.org/chrome',
'URL': 'svn://svn.chromium.org/chrome/trunk/tools/commit-queue/workdir',
})
self.mox.ReplayAll()
self.assertEquals(scm.SVN.GetCheckoutRoot(self.root_dir + '/foo/bar'),
self.root_dir + '/foo')
def testGetFileInfo(self):
xml_text = r"""<?xml version="1.0"?>
<info>
<entry kind="file" path="%s" revision="14628">
<url>http://src.chromium.org/svn/trunk/src/chrome/app/d</url>
<repository><root>http://src.chromium.org/svn</root></repository>
<wc-info>
<schedule>add</schedule>
<depth>infinity</depth>
<copy-from-url>http://src.chromium.org/svn/trunk/src/chrome/app/DEPS</copy-from-url>
<copy-from-rev>14628</copy-from-rev>
<checksum>369f59057ba0e6d9017e28f8bdfb1f43</checksum>
</wc-info>
</entry>
</info>
""" % self.url
scm.SVN.Capture(['info', '--xml', self.url], None).AndReturn(xml_text)
expected = {
'URL': 'http://src.chromium.org/svn/trunk/src/chrome/app/d',
'UUID': None,
'Repository Root': 'http://src.chromium.org/svn',
'Schedule': 'add',
'Copied From URL':
'http://src.chromium.org/svn/trunk/src/chrome/app/DEPS',
'Copied From Rev': '14628',
'Path': self.url,
'Revision': 14628,
'Node Kind': 'file',
}
self.mox.ReplayAll()
file_info = scm.SVN._CaptureInfo([self.url], None)
self.assertEquals(sorted(file_info.items()), sorted(expected.items()))
def testCaptureInfo(self):
xml_text = """<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="."
revision="35">
<url>%s</url>
<repository>
<root>%s</root>
<uuid>7b9385f5-0452-0410-af26-ad4892b7a1fb</uuid>
</repository>
<wc-info>
<schedule>normal</schedule>
<depth>infinity</depth>
</wc-info>
<commit
revision="35">
<author>maruel</author>
<date>2008-12-04T20:12:19.685120Z</date>
</commit>
</entry>
</info>
""" % (self.url, self.root_dir)
scm.SVN.Capture(['info', '--xml', self.url], None).AndReturn(xml_text)
self.mox.ReplayAll()
file_info = scm.SVN._CaptureInfo([self.url], None)
expected = {
'URL': self.url,
'UUID': '7b9385f5-0452-0410-af26-ad4892b7a1fb',
'Revision': 35,
'Repository Root': self.root_dir,
'Schedule': 'normal',
'Copied From URL': None,
'Copied From Rev': None,
'Path': '.',
'Node Kind': 'directory',
}
self.assertEqual(file_info, expected)
def testCaptureStatus(self):
text = r"""<?xml version="1.0"?>
<status>
<target path=".">
<entry path="unversionned_file.txt">
<wc-status props="none" item="unversioned"></wc-status>
</entry>
<entry path="build\internal\essential.vsprops">
<wc-status props="normal" item="modified" revision="14628">
<commit revision="13818">
<author>ajwong@chromium.org</author>
<date>2009-04-16T00:42:06.872358Z</date>
</commit>
</wc-status>
</entry>
<entry path="chrome\app\d">
<wc-status props="none" copied="true" tree-conflicted="true" item="added">
</wc-status>
</entry>
<entry path="chrome\app\DEPS">
<wc-status props="modified" item="modified" revision="14628">
<commit revision="1279">
<author>brettw@google.com</author>
<date>2008-08-23T17:16:42.090152Z</date>
</commit>
</wc-status>
</entry>
<entry path="scripts\master\factory\gclient_factory.py">
<wc-status props="normal" item="conflicted" revision="14725">
<commit revision="14633">
<author>nsylvain@chromium.org</author>
<date>2009-04-27T19:37:17.977400Z</date>
</commit>
</wc-status>
</entry>
</target>
</status>
"""
scm.SVN.Capture(['status', '--xml'], '.').AndReturn(text)
self.mox.ReplayAll()
info = scm.SVN.CaptureStatus(None, '.')
expected = [
('? ', 'unversionned_file.txt'),
('M ', 'build\\internal\\essential.vsprops'),
('A + ', 'chrome\\app\\d'),
('MM ', 'chrome\\app\\DEPS'),
('C ', 'scripts\\master\\factory\\gclient_factory.py'),
]
self.assertEquals(sorted(info), sorted(expected))
def testCaptureStatusEmpty(self):
text = r"""<?xml version="1.0"?>
<status>
<target
path="perf">
</target>
</status>"""
scm.SVN.Capture(['status', '--xml'], None).AndReturn(text)
self.mox.ReplayAll()
info = scm.SVN.CaptureStatus(None, None)
self.assertEquals(info, [])
class RealSvnTest(fake_repos.FakeReposTestBase):
# Tests that work with a checkout.
def setUp(self):
super(RealSvnTest, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_svn()
if self.enabled:
self.svn_root = scm.os.path.join(self.root_dir, 'base')
scm.SVN.Capture(
['checkout', self.svn_base + 'trunk/third_party', 'base'],
cwd=self.root_dir)
self.tree = self.mangle_svn_tree(('trunk/third_party@-1', ''),)
def _capture(self, cmd, **kwargs):
kwargs.setdefault('cwd', self.svn_root)
return scm.SVN.Capture(cmd, **kwargs)
def testCheckout(self):
if not self.enabled:
return
# Checkout and verify the tree.
self.assertTree(self.tree, self.svn_root)
def testIsValidRevision(self):
if not self.enabled:
return
url_at_rev = self.svn_base + 'trunk/third_party@%s'
# Invalid or non-existent.
self.assertFalse(scm.SVN.IsValidRevision('url://totally_invalid/trunk/foo'))
self.assertFalse(scm.SVN.IsValidRevision(url_at_rev % 0))
self.assertFalse(scm.SVN.IsValidRevision(url_at_rev % 123))
# Valid.
self.assertTrue(scm.SVN.IsValidRevision(url_at_rev % 1))
self.assertTrue(scm.SVN.IsValidRevision(url_at_rev % 2))
self.assertTrue(scm.SVN.IsValidRevision(url_at_rev % 'HEAD'))
def testRevert(self):
if not self.enabled:
return
# Mess around and make sure revert works for all corner cases.
# - svn add a file
# - svn add a file and delete it
# - Delete a file
# - svn delete a file
# - svn move a directory and svn rename files in it
# - add a directory tree.
def join(*args):
return scm.os.path.join(self.svn_root, *args)
self._capture(['move', 'foo', 'foo2'])
self._capture(
['move',
scm.os.path.join('foo2', 'origin'),
scm.os.path.join('foo2', 'o')])
scm.os.remove(join('origin'))
self._capture(['propset', 'foo', 'bar', join('prout', 'origin')])
fake_repos.gclient_utils.rmtree(join('prout'))
with open(join('faa'), 'w') as f:
f.write('eh')
with open(join('faala'), 'w') as f:
f.write('oh')
self._capture(['add', join('faala')])
added_and_removed = join('added_and_removed')
with open(added_and_removed, 'w') as f:
f.write('oh')
self._capture(['add', added_and_removed])
scm.os.remove(added_and_removed)
# Make sure a tree of directories can be removed.
scm.os.makedirs(join('new_dir', 'subdir'))
with open(join('new_dir', 'subdir', 'newfile'), 'w') as f:
f.write('ah!')
self._capture(['add', join('new_dir')])
self._capture(['add', join('new_dir', 'subdir')])
self._capture(['add', join('new_dir', 'subdir', 'newfile')])
# A random file in an added directory confuses svn.
scm.os.makedirs(join('new_dir2', 'subdir'))
with open(join('new_dir2', 'subdir', 'newfile'), 'w') as f:
f.write('ah!')
self._capture(['add', join('new_dir2')])
self._capture(['add', join('new_dir2', 'subdir')])
self._capture(['add', join('new_dir2', 'subdir', 'newfile')])
with open(join('new_dir2', 'subdir', 'unversionedfile'), 'w') as f:
f.write('unadded file!')
scm.SVN.Revert(self.svn_root)
self._capture(['update', '--revision', 'base'])
self.assertTree(self.tree, self.svn_root)
# Asserting the tree is not sufficient, svn status must come out clear too.
self.assertEquals('', self._capture(['status']))
if __name__ == '__main__':
if '-v' in sys.argv:
logging.basicConfig(level=logging.DEBUG)
unittest.main()
# vim: ts=2:sw=2:tw=80:et:
| |
# flake8: noqa
# Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Initial base for Gnocchi 1.0.0
Revision ID: 1c98ac614015
Revises:
Create Date: 2015-04-27 16:05:13.530625
"""
# revision identifiers, used by Alembic.
revision = '1c98ac614015'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
import gnocchi.indexer.sqlalchemy_base
def upgrade():
op.create_table('resource',
sa.Column('type', sa.Enum('generic', 'instance', 'swift_account', 'volume', 'ceph_account', 'network', 'identity', 'ipmi', 'stack', 'image', name='resource_type_enum'), nullable=False),
sa.Column('created_by_user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('created_by_project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('started_at', gnocchi.indexer.sqlalchemy_base.PreciseTimestamp(), nullable=False),
sa.Column('revision_start', gnocchi.indexer.sqlalchemy_base.PreciseTimestamp(), nullable=False),
sa.Column('ended_at', gnocchi.indexer.sqlalchemy_base.PreciseTimestamp(), nullable=True),
sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_resource_id', 'resource', ['id'], unique=False)
op.create_table('archive_policy',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('back_window', sa.Integer(), nullable=False),
sa.Column('definition', gnocchi.indexer.sqlalchemy_base.ArchivePolicyDefinitionType(), nullable=False),
sa.Column('aggregation_methods', gnocchi.indexer.sqlalchemy_base.SetType(), nullable=False),
sa.PrimaryKeyConstraint('name'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_archive_policy_name', 'archive_policy', ['name'], unique=False)
op.create_table('volume',
sa.Column('display_name', sa.String(length=255), nullable=False),
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_volume_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_volume_id', 'volume', ['id'], unique=False)
op.create_table('instance',
sa.Column('flavor_id', sa.Integer(), nullable=False),
sa.Column('image_ref', sa.String(length=255), nullable=False),
sa.Column('host', sa.String(length=255), nullable=False),
sa.Column('display_name', sa.String(length=255), nullable=False),
sa.Column('server_group', sa.String(length=255), nullable=True),
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_instance_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_instance_id', 'instance', ['id'], unique=False)
op.create_table('stack',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_stack_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_stack_id', 'stack', ['id'], unique=False)
op.create_table('archive_policy_rule',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('archive_policy_name', sa.String(length=255), nullable=False),
sa.Column('metric_pattern', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['archive_policy_name'], ['archive_policy.name'], name="fk_archive_policy_rule_archive_policy_name_archive_policy_name", ondelete='RESTRICT'),
sa.PrimaryKeyConstraint('name'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_archive_policy_rule_name', 'archive_policy_rule', ['name'], unique=False)
op.create_table('swift_account',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_swift_account_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_swift_account_id', 'swift_account', ['id'], unique=False)
op.create_table('ceph_account',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_ceph_account_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_ceph_account_id', 'ceph_account', ['id'], unique=False)
op.create_table('ipmi',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_ipmi_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_ipmi_id', 'ipmi', ['id'], unique=False)
op.create_table('image',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('container_format', sa.String(length=255), nullable=False),
sa.Column('disk_format', sa.String(length=255), nullable=False),
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_image_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_image_id', 'image', ['id'], unique=False)
op.create_table('resource_history',
sa.Column('type', sa.Enum('generic', 'instance', 'swift_account', 'volume', 'ceph_account', 'network', 'identity', 'ipmi', 'stack', 'image', name='resource_type_enum'), nullable=False),
sa.Column('created_by_user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('created_by_project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('started_at', gnocchi.indexer.sqlalchemy_base.PreciseTimestamp(), nullable=False),
sa.Column('revision_start', gnocchi.indexer.sqlalchemy_base.PreciseTimestamp(), nullable=False),
sa.Column('ended_at', gnocchi.indexer.sqlalchemy_base.PreciseTimestamp(), nullable=True),
sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('revision', sa.Integer(), nullable=False),
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.Column('revision_end', gnocchi.indexer.sqlalchemy_base.PreciseTimestamp(), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_resource_history_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_resource_history_id', 'resource_history', ['id'], unique=False)
op.create_table('identity',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_identity_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_identity_id', 'identity', ['id'], unique=False)
op.create_table('network',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_network_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_network_id', 'network', ['id'], unique=False)
op.create_table('metric',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
sa.Column('archive_policy_name', sa.String(length=255), nullable=False),
sa.Column('created_by_user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('created_by_project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('resource_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['archive_policy_name'], ['archive_policy.name'], name="fk_metric_archive_policy_name_archive_policy_name", ondelete='RESTRICT'),
sa.ForeignKeyConstraint(['resource_id'], ['resource.id'], name="fk_metric_resource_id_resource_id", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('resource_id', 'name', name='uniq_metric0resource_id0name'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_metric_id', 'metric', ['id'], unique=False)
op.create_table('identity_history',
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_identity_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_identity_history_revision', 'identity_history', ['revision'], unique=False)
op.create_table('instance_history',
sa.Column('flavor_id', sa.Integer(), nullable=False),
sa.Column('image_ref', sa.String(length=255), nullable=False),
sa.Column('host', sa.String(length=255), nullable=False),
sa.Column('display_name', sa.String(length=255), nullable=False),
sa.Column('server_group', sa.String(length=255), nullable=True),
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_instance_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_instance_history_revision', 'instance_history', ['revision'], unique=False)
op.create_table('network_history',
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_network_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_network_history_revision', 'network_history', ['revision'], unique=False)
op.create_table('swift_account_history',
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_swift_account_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_swift_account_history_revision', 'swift_account_history', ['revision'], unique=False)
op.create_table('ceph_account_history',
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_ceph_account_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_ceph_account_history_revision', 'ceph_account_history', ['revision'], unique=False)
op.create_table('ipmi_history',
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_ipmi_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_ipmi_history_revision', 'ipmi_history', ['revision'], unique=False)
op.create_table('image_history',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('container_format', sa.String(length=255), nullable=False),
sa.Column('disk_format', sa.String(length=255), nullable=False),
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_image_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_image_history_revision', 'image_history', ['revision'], unique=False)
op.create_table('stack_history',
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_stack_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_stack_history_revision', 'stack_history', ['revision'], unique=False)
op.create_table('volume_history',
sa.Column('display_name', sa.String(length=255), nullable=False),
sa.Column('revision', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_volume_history_resource_history_revision", ondelete='CASCADE'),
sa.PrimaryKeyConstraint('revision'),
mysql_charset='utf8',
mysql_engine='InnoDB'
)
op.create_index('ix_volume_history_revision', 'volume_history', ['revision'], unique=False)
| |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility classes for serialization"""
import struct
# Format of a header for a struct, array or union.
HEADER_STRUCT = struct.Struct("<II")
# Format for a pointer.
POINTER_STRUCT = struct.Struct("<Q")
def Flatten(value):
"""Flattens nested lists/tuples into an one-level list. If value is not a
list/tuple, it is converted to an one-item list. For example,
(1, 2, [3, 4, ('56', '7')]) is converted to [1, 2, 3, 4, '56', '7'];
1 is converted to [1].
"""
if isinstance(value, (list, tuple)):
result = []
for item in value:
result.extend(Flatten(item))
return result
return [value]
class SerializationException(Exception):
"""Error when strying to serialize a struct."""
pass
class DeserializationException(Exception):
"""Error when strying to deserialize a struct."""
pass
class DeserializationContext(object):
def ClaimHandle(self, handle):
raise NotImplementedError()
def ClaimMemory(self, start, size):
raise NotImplementedError()
def GetSubContext(self, offset):
raise NotImplementedError()
def IsInitialContext(self):
raise NotImplementedError()
class RootDeserializationContext(DeserializationContext):
def __init__(self, data, handles):
if isinstance(data, buffer):
self.data = data
else:
self.data = buffer(data)
self._handles = handles
self._next_handle = 0;
self._next_memory = 0;
def ClaimHandle(self, handle):
if handle < self._next_handle:
raise DeserializationException('Accessing handles out of order.')
self._next_handle = handle + 1
return self._handles[handle]
def ClaimMemory(self, start, size):
if start < self._next_memory:
raise DeserializationException('Accessing buffer out of order.')
self._next_memory = start + size
def GetSubContext(self, offset):
return _ChildDeserializationContext(self, offset)
def IsInitialContext(self):
return True
class _ChildDeserializationContext(DeserializationContext):
def __init__(self, parent, offset):
self._parent = parent
self._offset = offset
self.data = buffer(parent.data, offset)
def ClaimHandle(self, handle):
return self._parent.ClaimHandle(handle)
def ClaimMemory(self, start, size):
return self._parent.ClaimMemory(self._offset + start, size)
def GetSubContext(self, offset):
return self._parent.GetSubContext(self._offset + offset)
def IsInitialContext(self):
return False
class Serialization(object):
"""
Helper class to serialize/deserialize a struct.
"""
def __init__(self, groups):
self.version = _GetVersion(groups)
self._groups = groups
main_struct = _GetStruct(groups)
self.size = HEADER_STRUCT.size + main_struct.size
self._struct_per_version = {
self.version: main_struct,
}
self._groups_per_version = {
self.version: groups,
}
def _GetMainStruct(self):
return self._GetStruct(self.version)
def _GetGroups(self, version):
# If asking for a version greater than the last known.
version = min(version, self.version)
if version not in self._groups_per_version:
self._groups_per_version[version] = _FilterGroups(self._groups, version)
return self._groups_per_version[version]
def _GetStruct(self, version):
# If asking for a version greater than the last known.
version = min(version, self.version)
if version not in self._struct_per_version:
self._struct_per_version[version] = _GetStruct(self._GetGroups(version))
return self._struct_per_version[version]
def Serialize(self, obj, handle_offset):
"""
Serialize the given obj. handle_offset is the the first value to use when
encoding handles.
"""
handles = []
data = bytearray(self.size)
HEADER_STRUCT.pack_into(data, 0, self.size, self.version)
position = HEADER_STRUCT.size
to_pack = []
for group in self._groups:
position = position + NeededPaddingForAlignment(position,
group.GetAlignment())
(entry, new_handles) = group.Serialize(
obj,
len(data) - position,
data,
handle_offset + len(handles))
to_pack.extend(Flatten(entry))
handles.extend(new_handles)
position = position + group.GetByteSize()
self._GetMainStruct().pack_into(data, HEADER_STRUCT.size, *to_pack)
return (data, handles)
def Deserialize(self, fields, context):
if len(context.data) < HEADER_STRUCT.size:
raise DeserializationException(
'Available data too short to contain header.')
(size, version) = HEADER_STRUCT.unpack_from(context.data)
if len(context.data) < size or size < HEADER_STRUCT.size:
raise DeserializationException('Header size is incorrect.')
if context.IsInitialContext():
context.ClaimMemory(0, size)
version_struct = self._GetStruct(version)
entities = version_struct.unpack_from(context.data, HEADER_STRUCT.size)
filtered_groups = self._GetGroups(version)
if ((version <= self.version and
size != version_struct.size + HEADER_STRUCT.size) or
size < version_struct.size + HEADER_STRUCT.size):
raise DeserializationException('Struct size in incorrect.')
position = HEADER_STRUCT.size
enties_index = 0
for group in filtered_groups:
position = position + NeededPaddingForAlignment(position,
group.GetAlignment())
enties_count = len(group.GetTypeCode())
if enties_count == 1:
value = entities[enties_index]
else:
value = tuple(entities[enties_index:enties_index+enties_count])
fields.update(group.Deserialize(value, context.GetSubContext(position)))
position += group.GetByteSize()
enties_index += enties_count
def NeededPaddingForAlignment(value, alignment=8):
"""Returns the padding necessary to align value with the given alignment."""
if value % alignment:
return alignment - (value % alignment)
return 0
def _GetVersion(groups):
if not len(groups):
return 0
return max([x.GetMaxVersion() for x in groups])
def _FilterGroups(groups, version):
return [group.Filter(version) for
group in groups if group.GetMinVersion() <= version]
def _GetStruct(groups):
index = 0
codes = [ '<' ]
for group in groups:
code = group.GetTypeCode()
needed_padding = NeededPaddingForAlignment(index, group.GetAlignment())
if needed_padding:
codes.append('x' * needed_padding)
index = index + needed_padding
codes.append(code)
index = index + group.GetByteSize()
alignment_needed = NeededPaddingForAlignment(index)
if alignment_needed:
codes.append('x' * alignment_needed)
return struct.Struct(''.join(codes))
class UnionSerializer(object):
"""
Helper class to serialize/deserialize a union.
"""
def __init__(self, fields):
self._fields = {field.index: field for field in fields}
def SerializeInline(self, union, handle_offset):
data = bytearray()
field = self._fields[union.tag]
# If the union value is a simple type or a nested union, it is returned as
# entry.
# Otherwise, the serialized value is appended to data and the value of entry
# is -1. The caller will need to set entry to the location where the
# caller will append data.
(entry, handles) = field.field_type.Serialize(
union.data, -1, data, handle_offset)
# If the value contained in the union is itself a union, we append its
# serialized value to data and set entry to -1. The caller will need to set
# entry to the location where the caller will append data.
if field.field_type.IsUnion():
nested_union = bytearray(16)
HEADER_STRUCT.pack_into(nested_union, 0, entry[0], entry[1])
POINTER_STRUCT.pack_into(nested_union, 8, entry[2])
data = nested_union + data
# Since we do not know where the caller will append the nested union,
# we set entry to an invalid value and let the caller figure out the right
# value.
entry = -1
return (16, union.tag, entry, data), handles
def Serialize(self, union, handle_offset):
(size, tag, entry, extra_data), handles = self.SerializeInline(
union, handle_offset)
data = bytearray(16)
if extra_data:
entry = 8
data.extend(extra_data)
field = self._fields[union.tag]
HEADER_STRUCT.pack_into(data, 0, size, tag)
typecode = field.GetTypeCode()
# If the value is a nested union, we store a 64 bits pointer to it.
if field.field_type.IsUnion():
typecode = 'Q'
struct.pack_into('<%s' % typecode, data, 8, entry)
return data, handles
def Deserialize(self, context, union_class):
if len(context.data) < HEADER_STRUCT.size:
raise DeserializationException(
'Available data too short to contain header.')
(size, tag) = HEADER_STRUCT.unpack_from(context.data)
if size == 0:
return None
if size != 16:
raise DeserializationException('Invalid union size %s' % size)
union = union_class.__new__(union_class)
if tag not in self._fields:
union.SetInternals(None, None)
return union
field = self._fields[tag]
if field.field_type.IsUnion():
ptr = POINTER_STRUCT.unpack_from(context.data, 8)[0]
value = field.field_type.Deserialize(ptr, context.GetSubContext(ptr+8))
else:
raw_value = struct.unpack_from(
field.GetTypeCode(), context.data, 8)[0]
value = field.field_type.Deserialize(raw_value, context.GetSubContext(8))
union.SetInternals(field, value)
return union
| |
import os, re, sys
import commands
import SiteMover
from futil import *
from PilotErrors import PilotErrors
from pUtil import tolog, readpar, getDirectAccessDic
from timed_command import timed_command
from time import time
class ChirpSiteMover(SiteMover.SiteMover):
""" SiteMover for CHIRP copy commands etc """
copyCommand = "chirp"
checksum_command = "adler32"
__warningStr = '!!WARNING!!2995!! %s'
__chirp = 'chirp -t 300 %s %s < %s' # options,server, command file
__timeout = 300 # seconds
__error = PilotErrors()
__pilotErrorDiag = ''
__MAX_FILE_SIZE = 200*1024**2
def get_timeout(self):
return self.__timeout
def get_data(self, gpfn, lfn, path, fsize=0, fchecksum=0, guid=0, **pdict):
""" copy input file from SE to local dir """
# try to get the direct reading control variable (False for direct reading mode; file should not be copied)
useCT = pdict.get('usect', True)
prodDBlockToken = pdict.get('access', '')
# get the DQ2 tracing report
try:
report = pdict['report']
except:
report = {}
else:
# set the proper protocol
report['protocol'] = 'local'
# mark the relative start
report['relativeStart'] = time()
# the current file
report['filename'] = lfn
# guid
report['guid'] = guid.replace('-','')
if not path:
tolog('path is empty, using current directory')
path = os.getcwd()
# build setup string
envsetup = self.getEnvsetup(get=True)
# should the root file be copied or read directly by athena?
directIn = False
dInfo = getDirectAccessDic(readpar('copysetupin'))
# if copysetupin did not contain direct access info, try the copysetup instead
if not dInfo:
dInfo = getDirectAccessDic(readpar('copysetup'))
tolog("dInfo: %s" % str(dInfo))
# check if we should use the copytool
if dInfo:
directIn = dInfo['directIn']
if directIn:
if useCT:
directIn = False
tolog("Direct access mode is switched off (file will be transferred with the copy tool)")
else:
# determine if the file is a root file according to its name
rootFile = self.isRootFileName(lfn)
if prodDBlockToken == 'local' or not rootFile:
directIn = False
tolog("Direct access mode has been switched off for this file (will be transferred with the copy tool)")
elif rootFile:
tolog("Found root file according to file name: %s (will not be transferred in direct reading mode)" % (lfn))
report['relativeStart'] = None
report['transferStart'] = None
self.__sendReport('FOUND_ROOT', report)
return 0, self.__pilotErrorDiag
else:
tolog("Normal file transfer")
else:
tolog("not directIn")
# build the get command
_params = ""
if fchecksum and fchecksum != 'None' and fchecksum != 0 and fchecksum != "0" and not self.isDummyChecksum(fchecksum):
csumtype = self.getChecksumType(fchecksum)
# special case for md5sum (command only understands 'md5' and 'adler32', and not 'ad' and 'md5sum')
if csumtype == 'md5sum':
csumtype = 'md5'
execStr = self.__localget % (envsetup, _params, gpfn, os.path.join(path, lfn))
tolog("Executing command: %s" % (execStr))
report['transferStart'] = time()
try:
status, telapsed, cout, cerr = timed_command(execStr, self.__timeout)
except Exception, e:
self.__pilotErrorDiag = 'timed_command() threw an exception: %s' % str(e)
tolog(self.__warningStr % self.__pilotErrorDiag)
status = 1
output = str(e)
telapsed = self.__timeout
else:
# improve output parsing, keep stderr and stdout separate
output = cout + cerr
tolog("Elapsed time: %d" % (telapsed))
tolog("Command output:\n%s" % (output))
report['validateStart'] = time()
if status:
# did the copy command time out?
if is_timeout(status):
self.__pilotErrorDiag = "lsm-get failed: time out after %d seconds" % (telapsed)
tolog(self.__warningStr % self.__pilotErrorDiag)
self.__sendReport('GET_TIMEOUT', report)
return self.__error.ERR_GETTIMEOUT, self.__pilotErrorDiag
status = os.WEXITSTATUS(status)
self.__pilotErrorDiag = 'lsm-get failed (%s): %s' % (status, output)
tolog(self.__warningStr % self.__pilotErrorDiag)
self.__sendReport('COPY_FAIL', report)
return self.__error.ERR_STAGEINFAILED, self.__pilotErrorDiag
# the lsm-get command will compare the file size and checksum with the catalog values
self.__sendReport('DONE', report)
return 0, self.__pilotErrorDiag
def put_data(self, source, destination, fsize=0, fchecksum=0, **pdict):
""" copy output file from local dir to SE and register into dataset and catalogues """
# Get input parameters from pdict
lfn = pdict.get('lfn', '')
guid = pdict.get('guid', '')
token = pdict.get('token', '')
dsname = pdict.get('dsname', '')
analJob = pdict.get('analJob', False)
sitename = pdict.get('sitename', '')
extradirs = pdict.get('extradirs', '')
prodSourceLabel = pdict.get('prodSourceLabel', '')
dispatchDBlockTokenForOut = pdict.get('dispatchDBlockTokenForOut', '')
if sitename == "CERNVM" and dispatchDBlockTokenForOut == "":
dispatchDBlockTokenForOut = "chirp^cvmappi50.cern.ch^/panda_test^-d chirp"
# get the DQ2 tracing report
try:
report = pdict['report']
except:
report = {}
else:
# set the proper protocol
report['protocol'] = 'local'
# mark the relative start
report['relativeStart'] = time()
# the current file
report['filename'] = lfn
report['guid'] = guid.replace('-','')
# report['dataset'] = dsname
filename = os.path.basename(source)
# get the local file size and checksum
csumtype = self.checksum_command
if fsize == 0 or fchecksum == 0:
ec, self.__pilotErrorDiag, fsize, fchecksum = self.getLocalFileInfo(source, csumtype=csumtype)
if ec != 0:
self.__sendReport('LOCAL_FILE_INFO_FAIL', report)
return self.put_data_retfail(ec, self.__pilotErrorDiag)
# do not transfer files larger than 50 MB except for CERNVM
if sitename != "CERNVM" and int(fsize) > self.__MAX_FILE_SIZE:
self.__pilotErrorDiag = "File sizes larger than %d B can currently not be tranferred with this site mover: size=%s" % (self.__MAX_FILE_SIZE, fsize)
tolog("!!WARNING!!2997!! %s" % (self.__pilotErrorDiag))
return self.put_data_retfail(self.__error.ERR_OUTPUTFILETOOLARGE, self.__pilotErrorDiag)
# now that the file size is known, add it to the tracing report
report['filesize'] = fsize
# build the command
_params = ""
if fchecksum != 0:
# special case for md5sum (command only understands 'md5' and 'adler32', and not 'ad' and 'md5sum')
if csumtype == 'md5sum':
_csumtype = 'md5'
else:
_csumtype = csumtype
# This contains the user configuration for chirp server, path, debug
# Format should be like
# 'chirp^etpgrid01.garching.physik.uni-muenchen.de^/tanyasandoval^-d chirp'
dispatchDBlockTokenForOut = pdict.get('dispatchDBlockTokenForOut', '')
csplit = dispatchDBlockTokenForOut.split('^')
if len(csplit) != 4:
tolog("Wrong number of fields in chirp string: %s" %
(dispatchDBlockTokenForOut))
self.__pilotErrorDiag = "Wrong number of fields in chirp string: %s" % (dispatchDBlockTokenForOut)
return self.put_data_retfail(self.__error.ERR_STAGEOUTFAILED, self.__pilotErrorDiag)
# Remove _sub part from dataset name
re_sub=re.compile('(.*)_sub\d+')
resub =re_sub.search(dsname)
if resub:
dsname_strip=resub.group(1)
else:
dsname_strip=dsname
chirp_server = csplit[1]
chirp_base = csplit[2]
chirp_options = csplit[3]
chirp_path = chirp_base+'/'+dsname_strip+'/'+filename
tolog("Chirp path: %s" % (chirp_path))
# Make compound command file to run in chirp
chirpcom=open('chirp.com','w')
# Create directories.
dirs=chirp_path.split('/')
dir_path=''
for i in range(1,len(dirs)-1):
dir_path=dir_path+'/'+dirs[i]
chirpcom.write('mkdir '+dir_path+'\n')
# and the cop command too
chirpcom.write('put %s %s\n'%(source,chirp_path))
chirpcom.close()
execStr = self.__chirp % (chirp_options, chirp_server, 'chirp.com')
tolog("Executing command: %s" % (execStr))
try:
status, telapsed, cout, cerr = timed_command(execStr, self.__timeout)
except Exception, e:
self.__pilotErrorDiag = 'timed_command() threw an exception: %s' % str(e)
tolog(self.__warningStr % self.__pilotErrorDiag)
status = 1
output = str(e)
telapsed = self.__timeout
else:
output = cout + cerr
tolog("Elapsed time: %d" % (telapsed))
tolog("Command output:\n%s" % (output))
# validate
if status:
# did the copy command time out?
if is_timeout(status):
self.__pilotErrorDiag = "chirp_put failed: time out after %d seconds" % (telapsed)
tolog(self.__warningStr % self.__pilotErrorDiag)
self.__sendReport('PUT_TIMEOUT', report)
return self.put_data_retfail(self.__error.ERR_PUTTIMEOUT, self.__pilotErrorDiag)
status = os.WEXITSTATUS(status)
self.__pilotErrorDiag = 'chirp_put failed (%s): %s' % (status, output)
tolog(self.__warningStr % self.__pilotErrorDiag)
self.__sendReport('COPY_FAIL', report)
return self.put_data_retfail(self.__error.ERR_STAGEOUTFAILED, self.__pilotErrorDiag)
self.__sendReport('DONE', report)
return 0, self.__pilotErrorDiag, chirp_path, fsize, fchecksum, self.arch_type
def __sendReport(self, state, report):
"""
Send DQ2 tracing report. Set the client exit state and finish
"""
if report.has_key('timeStart'):
# finish instrumentation
report['timeEnd'] = time()
report['clientState'] = state
# send report
tolog("Updated tracing report: %s" % str(report))
self.sendTrace(report)
if __name__ == '__main__':
sitemover = ChirpSiteMover()
pfn='README'
ddm_storage=''
dsname='user.tanyasandoval.0630133553.583378.lib._000181_sub012345'
sitename=''
analJob=True
testLevel=0
pinitdir=''
dest=''
proxycheck=''
_token_file=''
DEFAULT_TIMEOUT=10
lfn=''
guid=''
spsetup=''
userid=''
report={}
prodSourceLabel=''
outputDir=''
DN=''
s, pilotErrorDiag, r_gpfn, r_fsize, r_fchecksum, r_farch = sitemover.put_data(pfn, ddm_storage, dsname=dsname, sitename=sitename,analJob=analJob, testLevel=testLevel, pinitdir=pinitdir, dest=dest,proxycheck=proxycheck, token=_token_file, timeout=DEFAULT_TIMEOUT, lfn=lfn, guid=guid, spsetup=spsetup, userid=userid, report=report,prodSourceLabel=prodSourceLabel, outputDir=outputDir, DN=DN, dispatchDBlockTokenForOut='chirp^2^3^4')
tolog("Site mover put function returned: s=%s, r_gpfn=%s, r_fsize=%s, r_fchecksum=%s, r_farch=%s, pilotErrorDiag=%s" % (s, r_gpfn, r_fsize, r_fchecksum, r_farch, pilotErrorDiag))
| |
from transition import Transition
from side import Side
from condition import ConditionState, Condition
from data.vars import *
from typing import Dict, List
cipher_name = 'skipjack' # type: str
systems = dict() # type: Dict[int, System]
def __generate_condition_func(zero_conds: List[Condition], var: Variable, var_with_lo: Variable, var_output: Variable):
var_is_zero = False
var_with_lo_is_zero = False
for zcond in zero_conds:
if zcond.check_contains_var(var, ConditionState.IS_ZERO):
var_is_zero = True
elif zcond.check_contains_var(var_with_lo, ConditionState.IS_ZERO):
var_with_lo_is_zero = True
if var_is_zero and var_with_lo_is_zero:
return [Condition.create_zero_condition(Side(var_output.clone()))]
elif (var_is_zero and not var_with_lo_is_zero) or (not var_is_zero and var_with_lo_is_zero):
return [Condition.create_non_zero_condition(Side(var_output.clone()))]
else: # (not var_is_zero and not var_with_lo_is_zero)
return [Condition.create_zero_condition(Side(var_output.clone())),
Condition.create_non_zero_condition(Side(var_output.clone()))]
########################################################################################################################
# For this system valid next equal c1 = a4 + lambda(c4)
systems[3] = System(
inputs=[a1, a2, a3],
outputs=[c2, c3, c4],
transitions=[
Transition(Side(a1), Side(c2), F),
Transition(Side(a2, cp_with_lo(c2, lmbda)), Side(c3), G),
Transition(Side(a3, cp_with_lo(c3, mu)), Side(c4), F)
]
)
########################################################################################################################
# Cases for c5:
# 1) c1 = 0, c4 = 0 => c5 = 0
# 2) c1 = 0, c4 != 0 => c5 != 0
# 3) c1 != 0, c4 = 0 => c5 != 0
# 4) c1 != 0, c4 != 0 => c5 = 0
# 5) c1 != 0, c4 != 0 => c5 != 0
########################################################################################################################
systems[4] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(c5), F),
Transition(Side(a2, cp_with_lo(c5, lmbda)), Side(c2), G),
Transition(Side(a3, cp_with_lo(c2, mu)), Side(c3), F),
Transition(Side(a4, cp_with_lo(c3, lmbda)), Side(c4), G)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b1 = c1 + mu(c4) => c5 =c1 + mu(c4)
systems[5] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(c5), G),
Transition(Side(a3, cp_with_lo(c5, mu)), Side(c2), F),
Transition(Side(a4, cp_with_lo(c2, lmbda)), Side(c3), G),
Transition(Side(b1, cp_with_lo(c3, mu)), Side(c4), F)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b2 = c1 + lambda(c4) => c5 = c1 + lambda(c4)
systems[6] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(c5), F),
Transition(Side(a4, cp_with_lo(c5, lmbda)), Side(c2), G),
Transition(Side(b1, cp_with_lo(c2, mu)), Side(c3), F),
Transition(Side(b2, cp_with_lo(c3, lmbda)), Side(c4), G)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b3 = c1 + mu(c4) => c5 = c1 + mu(c4)
systems[7] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(c5), G),
Transition(Side(b1, cp_with_lo(c5, mu)), Side(c2), F),
Transition(Side(b2, cp_with_lo(c2, lmbda)), Side(c3), G),
Transition(Side(b3, cp_with_lo(c3, mu)), Side(c4), F)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b4 = c1 + lambda(c4) => c5 = c1 + lambda(c4)
systems[8] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(b4), G),
Transition(Side(b1, cp_with_lo(b4, mu)), Side(c5), F),
Transition(Side(b2, cp_with_lo(c5, lmbda)), Side(c2), G),
Transition(Side(b3, cp_with_lo(c2, mu)), Side(c3), F),
Transition(Side(b4, cp_with_lo(c3, lmbda)), Side(c4), G)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b5 = c1 + mu(c4) => c5 = c1 + mu(c4)
systems[9] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(b4), G),
Transition(Side(b1, cp_with_lo(b4, mu)), Side(b5), F),
Transition(Side(b2, cp_with_lo(b5, lmbda)), Side(c5), G),
Transition(Side(b3, cp_with_lo(c5, mu)), Side(c2), F),
Transition(Side(b4, cp_with_lo(c2, lmbda)), Side(c3), G),
Transition(Side(b5, cp_with_lo(c3, mu)), Side(c4), F)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b6 = c1 + lambda(c4) => c5 = c1 + lambda(c4)
systems[10] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(b4), G),
Transition(Side(b1, cp_with_lo(b4, mu)), Side(b5), F),
Transition(Side(b2, cp_with_lo(b5, lmbda)), Side(b6), G),
Transition(Side(b3, cp_with_lo(b6, mu)), Side(c5), F),
Transition(Side(b4, cp_with_lo(c5, lmbda)), Side(c2), G),
Transition(Side(b5, cp_with_lo(c2, mu)), Side(c3), F),
Transition(Side(b6, cp_with_lo(c3, lmbda)), Side(c4), G)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b7 = c1 + mu(c4) => c5 = c1 + mu(c4)
systems[11] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(b4), G),
Transition(Side(b1, cp_with_lo(b4, mu)), Side(b5), F),
Transition(Side(b2, cp_with_lo(b5, lmbda)), Side(b6), G),
Transition(Side(b3, cp_with_lo(b6, mu)), Side(b7), F),
Transition(Side(b4, cp_with_lo(b7, lmbda)), Side(c5), G),
Transition(Side(b5, cp_with_lo(c5, mu)), Side(c2), F),
Transition(Side(b6, cp_with_lo(c2, lmbda)), Side(c3), G),
Transition(Side(b7, cp_with_lo(c3, mu)), Side(c4), F)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b8 = c1 + lambda(c4) => c5 = c1 + lambda(c4)
systems[12] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(b4), G),
Transition(Side(b1, cp_with_lo(b4, mu)), Side(b5), F),
Transition(Side(b2, cp_with_lo(b5, lmbda)), Side(b6), G),
Transition(Side(b3, cp_with_lo(b6, mu)), Side(b7), F),
Transition(Side(b4, cp_with_lo(b7, lmbda)), Side(b8), G),
Transition(Side(b5, cp_with_lo(b8, mu)), Side(c5), F),
Transition(Side(b6, cp_with_lo(c5, lmbda)), Side(c2), G),
Transition(Side(b7, cp_with_lo(c2, mu)), Side(c3), F),
Transition(Side(b8, cp_with_lo(c3, lmbda)), Side(c4), G)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b9 = c1 + mu(c4) => c5 = c1 + mu(c4)
systems[13] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(b4), G),
Transition(Side(b1, cp_with_lo(b4, mu)), Side(b5), F),
Transition(Side(b2, cp_with_lo(b5, lmbda)), Side(b6), G),
Transition(Side(b3, cp_with_lo(b6, mu)), Side(b7), F),
Transition(Side(b4, cp_with_lo(b7, lmbda)), Side(b8), G),
Transition(Side(b5, cp_with_lo(b8, mu)), Side(b9), F),
Transition(Side(b6, cp_with_lo(b9, lmbda)), Side(c5), G),
Transition(Side(b7, cp_with_lo(c5, mu)), Side(c2), F),
Transition(Side(b8, cp_with_lo(c2, lmbda)), Side(c3), G),
Transition(Side(b9, cp_with_lo(c3, mu)), Side(c4), F)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b10 = c1 + lambda(c4) => c5 = c1 + lambda(c4)
systems[14] = System(
inputs=[a1, a2, a3, a4],
outputs=[c1, c2, c3, c4],
transitions=[
Transition(Side(a1), Side(b1), F),
Transition(Side(a2, cp_with_lo(b1, lmbda)), Side(b2), G),
Transition(Side(a3, cp_with_lo(b2, mu)), Side(b3), F),
Transition(Side(a4, cp_with_lo(b3, lmbda)), Side(b4), G),
Transition(Side(b1, cp_with_lo(b4, mu)), Side(b5), F),
Transition(Side(b2, cp_with_lo(b5, lmbda)), Side(b6), G),
Transition(Side(b3, cp_with_lo(b6, mu)), Side(b7), F),
Transition(Side(b4, cp_with_lo(b7, lmbda)), Side(b8), G),
Transition(Side(b5, cp_with_lo(b8, mu)), Side(b9), F),
Transition(Side(b6, cp_with_lo(b9, lmbda)), Side(b10), G),
Transition(Side(b7, cp_with_lo(b10, mu)), Side(c5), F),
Transition(Side(b8, cp_with_lo(c5, lmbda)), Side(c2), G),
Transition(Side(b9, cp_with_lo(c2, mu)), Side(c3), F),
Transition(Side(b10, cp_with_lo(c3, lmbda)), Side(c4), G)
],
condition_func=lambda output_zero_conds: __generate_condition_func(output_zero_conds, c1, c4, c5)
) # b11 = c1 + mu(c4) => c5 = c1 + mu(c4)
| |
#!/usr/bin/env python3
#
# Tests the basic methods of the adaptive covariance base class.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import numpy as np
import unittest
import pints
import pints.toy as toy
class TestAdaptiveCovarianceMC(unittest.TestCase):
"""
Tests the basic methods of the adaptive covariance MCMC routine.
"""
@classmethod
def setUpClass(cls):
""" Set up problem for tests. """
# Create toy model
cls.model = toy.LogisticModel()
cls.real_parameters = [0.015, 500]
cls.times = np.linspace(0, 1000, 1000)
cls.values = cls.model.simulate(cls.real_parameters, cls.times)
# Add noise
cls.noise = 10
cls.values += np.random.normal(0, cls.noise, cls.values.shape)
cls.real_parameters.append(cls.noise)
cls.real_parameters = np.array(cls.real_parameters)
# Create an object with links to the model and time series
cls.problem = pints.SingleOutputProblem(
cls.model, cls.times, cls.values)
# Create a uniform prior over both the parameters and the new noise
# variable
cls.log_prior = pints.UniformLogPrior(
[0.01, 400, cls.noise * 0.1],
[0.02, 600, cls.noise * 100]
)
# Create a log likelihood
cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem)
# Create an un-normalised log-posterior (log-likelihood + log-prior)
cls.log_posterior = pints.LogPosterior(
cls.log_likelihood, cls.log_prior)
# Run MCMC sampler
xs = [cls.real_parameters * 1.1,
cls.real_parameters * 0.9,
cls.real_parameters * 1.15,
]
mcmc = pints.MCMCController(cls.log_posterior, 3, xs,
method=pints.HaarioBardenetACMC)
mcmc.set_max_iterations(200)
mcmc.set_initial_phase_iterations(50)
mcmc.set_log_to_screen(False)
cls.chains = mcmc.run()
def test_errors(self):
# test errors occur when incorrectly calling MCMCSummary
self.assertRaises(ValueError, pints.MCMCSummary, self.chains, -3)
self.assertRaises(ValueError, pints.MCMCSummary, self.chains, 0)
self.assertRaises(ValueError, pints.MCMCSummary, self.chains, 1.5,
["param 1"])
def test_running(self):
# tests that object works as expected
results = pints.MCMCSummary(self.chains)
self.assertEqual(results.time(), None)
self.assertEqual(results.ess_per_second(), None)
self.assertTrue(len(results.ess()), 3)
self.assertTrue(len(results.mean()), 3)
self.assertTrue(len(results.rhat()), 3)
self.assertTrue(len(results.std()), 3)
# check positive quantities are so
for i in range(3):
self.assertTrue(results.ess()[i] > 0)
self.assertTrue(results.ess()[i] < 1000)
self.assertTrue(results.rhat()[i] > 0)
self.assertTrue(results.std()[i] > 0)
self.assertTrue(results.mean()[i] > 0)
# check means are vaguely near true values
self.assertTrue(np.abs(results.mean()[0] - 0.015) < 0.1)
self.assertTrue(np.abs(results.mean()[1] - 500) < 100)
self.assertTrue(np.abs(results.mean()[2] - 10) < 20)
# check quantiles object
quantiles = results.quantiles()
self.assertEqual(quantiles.shape[0], 5)
self.assertEqual(quantiles.shape[1], 3)
for i in range(5):
for j in range(3):
self.assertTrue(quantiles[i, j] > 0)
def test_summary(self):
# tests summary functions when time not given
results = pints.MCMCSummary(self.chains)
summary = np.array(results.summary())
self.assertEqual(summary.shape[0], 3)
self.assertEqual(summary.shape[1], 10)
text = str(results)
names = [
'param',
'mean',
'std.',
'2.5%',
'25%',
'50%',
'75%',
'97.5%',
'rhat',
'ess',
]
for name in names:
self.assertIn(name, text)
# tests summary functions when time is given
results = pints.MCMCSummary(self.chains, 20)
summary = np.array(results.summary())
self.assertEqual(summary.shape[0], 3)
self.assertEqual(summary.shape[1], 11)
text = str(results)
names.append('ess per sec.')
for name in names:
self.assertIn(name, text)
def test_ess_per_second(self):
# tests that ess per second is calculated when time is supplied
t = 10
results = pints.MCMCSummary(self.chains, t)
self.assertEqual(results.time(), t)
ess_per_second = results.ess_per_second()
ess = results.ess()
self.assertTrue(len(ess_per_second), 3)
for i in range(3):
self.assertEqual(ess_per_second[i], ess[i] / t)
def test_named_parameters(self):
# tests that parameter names are used when values supplied
parameters = ['rrrr', 'kkkk', 'ssss']
results = pints.MCMCSummary(
self.chains, parameter_names=parameters)
text = str(results)
for p in parameters:
self.assertIn(p, text)
# with time supplied
results = pints.MCMCSummary(
self.chains, time=20, parameter_names=parameters)
text = str(results)
for p in parameters:
self.assertIn(p, text)
# Number of parameter names must equal number of parameters
self.assertRaises(
ValueError,
pints.MCMCSummary, self.chains, parameter_names=['a', 'b'])
def test_single_chain(self):
# tests that single chain is broken up into two bits
xs = [self.real_parameters * 0.9]
mcmc = pints.MCMCController(
self.log_posterior, 1, xs, method=pints.HaarioBardenetACMC)
mcmc.set_max_iterations(200)
mcmc.set_initial_phase_iterations(50)
mcmc.set_log_to_screen(False)
chains = mcmc.run()
results = pints.MCMCSummary(chains)
chains1 = results.chains()
self.assertEqual(chains[0].shape[0], chains1[0].shape[0])
self.assertEqual(chains[0].shape[1], chains1[0].shape[1])
self.assertEqual(chains[0][10, 1], chains[0][10, 1])
self.assertEqual(results.time(), None)
self.assertEqual(results.ess_per_second(), None)
self.assertTrue(len(results.ess()), 3)
self.assertTrue(len(results.mean()), 3)
self.assertTrue(len(results.rhat()), 3)
self.assertTrue(len(results.std()), 3)
# check positive quantities are so
for i in range(3):
self.assertTrue(results.ess()[i] > 0)
self.assertTrue(results.ess()[i] < 1000)
self.assertTrue(results.rhat()[i] > 0)
self.assertTrue(results.std()[i] > 0)
self.assertTrue(results.mean()[i] > 0)
# check means are vaguely near true values
self.assertTrue(np.abs(results.mean()[0] - 0.015) < 0.5)
self.assertTrue(np.abs(results.mean()[1] - 500) < 200)
self.assertTrue(np.abs(results.mean()[2] - 10) < 30)
# check quantiles object
quantiles = results.quantiles()
self.assertEqual(quantiles.shape[0], 5)
self.assertEqual(quantiles.shape[1], 3)
for i in range(5):
for j in range(3):
self.assertTrue(quantiles[i, j] > 0)
# Test with odd number of iterations
mcmc = pints.MCMCController(
self.log_posterior, 1, xs, method=pints.HaarioBardenetACMC)
mcmc.set_max_iterations(99)
mcmc.set_initial_phase_iterations(40)
mcmc.set_log_to_screen(False)
chains = mcmc.run()
results = pints.MCMCSummary(chains)
if __name__ == '__main__':
unittest.main()
| |
# -*- coding: utf-8 -*-
"""
shipment.py
"""
import simplejson as json
from decimal import Decimal
from trytond.model import fields, ModelView
from trytond.pool import PoolMeta, Pool
from trytond.wizard import Wizard, StateView, Button, StateTransition
from trytond.pyson import Eval, Bool
from trytond.transaction import Transaction
from .mixin import ShipmentCarrierMixin
__metaclass__ = PoolMeta
__all__ = [
'ShipmentOut', 'GenerateShippingLabelMessage',
'GenerateShippingLabel', 'ShippingCarrierSelector',
'SelectShippingRate'
]
class ShipmentOut(ShipmentCarrierMixin):
__metaclass__ = PoolMeta
__name__ = 'stock.shipment.out'
@property
def carrier_cost_moves(self):
return self.outgoing_moves
def on_change_inventory_moves(self):
with Transaction().set_context(ignore_carrier_computation=True):
super(ShipmentOut, self).on_change_inventory_moves()
@classmethod
def pack(cls, shipments):
Package = Pool().get('stock.package')
super(ShipmentOut, cls).pack(shipments)
for shipment in shipments:
if not shipment.packages:
# No package, create a default package
package = Package()
package.shipment = shipment
package.moves = shipment.outgoing_moves
package.save()
else:
if (len(shipment.outgoing_moves) !=
sum(len(p.moves) for p in shipment.packages)):
cls.raise_user_error(
"Not all the items are packaged for shipment #%s", (
shipment.number, )
)
class ShippingCarrierSelector(ModelView):
'View To Select Carrier'
__name__ = 'shipping.label.start'
carrier = fields.Many2One("carrier", "Carrier", required=True)
override_weight = fields.Float("Override Weight", digits=(16, 2))
no_of_packages = fields.Integer('Number of packages', readonly=True)
box_type = fields.Many2One(
"carrier.box_type", "Box Type", required=True, domain=[
('id', 'in', Eval("available_box_types"))
], depends=["available_box_types"]
)
shipping_instructions = fields.Text('Shipping Instructions', readonly=True)
carrier_service = fields.Many2One(
"carrier.service", "Carrier Service", domain=[
('id', 'in', Eval("available_carrier_services"))
], depends=["available_carrier_services"]
)
available_box_types = fields.Function(
fields.One2Many("carrier.box_type", None, 'Available Box Types'),
getter="on_change_with_available_box_types"
)
available_carrier_services = fields.Function(
fields.One2Many("carrier.service", None, 'Available Carrier Services'),
getter="on_change_with_available_carrier_services"
)
@fields.depends('carrier', 'carrier_service', 'box_type')
def on_change_carrier(self):
self.carrier_service = None
self.box_type = None
@fields.depends("carrier")
def on_change_with_available_box_types(self, name=None):
if self.carrier:
return map(int, self.carrier.box_types)
return []
@fields.depends("carrier")
def on_change_with_available_carrier_services(self, name=None):
if self.carrier:
return map(int, self.carrier.services)
return []
@classmethod
def view_attributes(cls):
return super(ShippingCarrierSelector, cls).view_attributes() + [
('//label[@name="no_of_packages"]', 'states', {
'invisible': Bool(Eval('no_of_packages')),
})
]
class SelectShippingRate(ModelView):
'Select Shipping Rate'
__name__ = 'shipping.label.select_rate'
rate = fields.Selection([], 'Rate')
class GenerateShippingLabelMessage(ModelView):
'Generate Labels Message'
__name__ = 'shipping.label.end'
tracking_number = fields.Many2One(
"shipment.tracking", "Tracking number", readonly=True
)
message = fields.Text("Message", readonly=True)
attachments = fields.One2Many(
'ir.attachment', None, 'Attachments', readonly=True
)
cost = fields.Numeric("Cost", digits=(16, 2), readonly=True)
cost_currency = fields.Many2One(
'currency.currency', 'Cost Currency', readonly=True
)
class GenerateShippingLabel(Wizard):
'Generate Labels'
__name__ = 'shipping.label'
#: This is the first state of wizard to generate shipping label.
#: It asks for carrier, carrier_service, box_type and override weight,
#: once entered, it move to `next` transition where it saves all the
#: values on shipment.
start = StateView(
'shipping.label.start',
'shipping.select_carrier_view_form',
[
Button('Cancel', 'end', 'tryton-cancel'),
Button('Continue', 'next', 'tryton-go-next'),
]
)
#: Transition saves values from `start` state to the shipment.
next = StateTransition()
#: Select shipping rates
select_rate = StateView(
'shipping.label.select_rate',
'shipping.select_rate_view_form',
[
Button('Cancel', 'end', 'tryton-cancel'),
Button('Continue', 'generate_labels', 'tryton-go-next', default=True), # noqa
]
)
#: Transition generates shipping labels.
generate_labels = StateTransition()
#: State shows the generated label, tracking number, cost and cost
#: currency.
generate = StateView(
'shipping.label.end',
'shipping.generate_shipping_label_message_view_form',
[
Button('Ok', 'end', 'tryton-ok'),
]
)
@property
def shipment(self):
"Gives the active shipment."
Shipment = Pool().get(Transaction().context.get('active_model'))
return Shipment(Transaction().context.get('active_id'))
@classmethod
def __setup__(cls):
super(GenerateShippingLabel, cls).__setup__()
cls._error_messages.update({
'tracking_number_already_present':
'Tracking Number is already present for this shipment.',
'invalid_state': (
'Labels can only be generated when the shipment is in Packed or'
' Done states only'
),
'no_packages': 'Shipment %s has no packages',
})
def default_start(self, data):
"""Fill the default values for `start` state.
"""
UOM = Pool().get('product.uom')
if self.shipment.allow_label_generation():
values = {
'no_of_packages': len(self.shipment.packages),
'shipping_instructions': self.shipment.shipping_instructions,
}
if self.shipment.carrier:
values.update({
'carrier': self.shipment.carrier.id,
})
if self.shipment.packages:
package_weights = []
for package in self.shipment.packages:
if not package.override_weight:
continue
package_weights.append(
UOM.compute_qty(
package.override_weight_uom,
package.override_weight,
self.shipment.weight_uom
)
)
values['override_weight'] = sum(package_weights)
if self.shipment.carrier_service:
values['carrier_service'] = self.shipment.carrier_service.id
return values
def transition_next(self):
Company = Pool().get('company.company')
shipment = self.shipment
company = Company(Transaction().context['company'])
shipment.carrier = self.start.carrier
shipment.cost_currency = company.currency
shipment.carrier_service = self.start.carrier_service
shipment.save()
if not shipment.packages:
shipment._create_default_package(self.start.box_type)
default_values = self.default_start({})
per_package_weight = None
if self.start.override_weight and \
default_values['override_weight'] != self.start.override_weight:
# Distribute weight equally
per_package_weight = (
self.start.override_weight / len(shipment.packages)
)
for package in shipment.packages:
if per_package_weight:
package.override_weight = per_package_weight
package.override_weight_uom = shipment.weight_uom
if self.start.box_type != package.box_type:
package.box_type = self.start.box_type
package.save()
# Fetch rates, and fill selection field with result list
rates = self.shipment.get_shipping_rate(
self.start.carrier, self.start.carrier_service
)
result = []
for rate in rates:
json_safe_rate = rate.copy()
json_safe_rate.update({
'carrier': json_safe_rate['carrier'].id,
'carrier_service': json_safe_rate['carrier_service'] and
json_safe_rate['carrier_service'].id,
'cost_currency': json_safe_rate['cost_currency'].id,
})
# Update when delivery date is not None
if json_safe_rate.get('delivery_date'):
json_safe_rate.update({
'delivery_date': json_safe_rate[
'delivery_date'].isoformat()
})
# Update when delivery time is not None
if json_safe_rate.get('delivery_time'):
json_safe_rate.update({
'delivery_time': json_safe_rate[
'delivery_time'].isoformat()
})
result.append((
json.dumps(json_safe_rate), '%s %s %s' % (
rate['display_name'],
rate['cost'],
rate['cost_currency'].code,
)
))
self.select_rate.__class__.rate.selection = result
return 'select_rate'
def transition_generate_labels(self):
"Generates shipping labels from data provided by earlier states"
Carrier = Pool().get('carrier')
CarrierService = Pool().get('carrier.service')
Currency = Pool().get('currency.currency')
if self.select_rate.rate:
rate = json.loads(self.select_rate.rate)
rate.update({
'carrier': Carrier(rate['carrier']),
'carrier_service': rate['carrier_service'] and
CarrierService(rate['carrier_service']),
'cost': Decimal(rate['cost']),
'cost_currency': Currency(rate['cost_currency'])
})
self.shipment.apply_shipping_rate(rate)
self.shipment.generate_shipping_labels()
return "generate"
def get_attachments(self): # pragma: no cover
"""
Returns list of attachments corresponding to shipment.
"""
Attachment = Pool().get('ir.attachment')
return map(
int,
Attachment.search([
('resource.origin.id', 'in', map(int, self.shipment.packages),
'shipment.tracking', 'stock.package')
])
)
def get_message(self):
"""
Returns message to be displayed on wizard
"""
message = 'Shipment labels have been generated via %s and saved as ' \
'attachments for the tracking number' % (
self.shipment.carrier.carrier_cost_method.upper()
)
return message
def default_generate(self, data):
return {
'tracking_number': self.shipment.tracking_number and
self.shipment.tracking_number.id,
'message': self.get_message(),
'attachments': self.get_attachments(),
'cost': self.shipment.cost,
'cost_currency': self.shipment.cost_currency.id,
}
| |
"""
Component to offer a way to select a date and / or a time.
For more details about this component, please refer to the documentation
at https://home-assistant.io/components/input_datetime/
"""
import asyncio
import logging
import datetime
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID, CONF_ICON, CONF_NAME, STATE_UNKNOWN)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import async_get_last_state
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'input_datetime'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
CONF_HAS_DATE = 'has_date'
CONF_HAS_TIME = 'has_time'
CONF_INITIAL = 'initial'
ATTR_DATE = 'date'
ATTR_TIME = 'time'
SERVICE_SET_DATETIME = 'set_datetime'
SERVICE_SET_DATETIME_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_DATE): cv.date,
vol.Optional(ATTR_TIME): cv.time,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
cv.slug: vol.All({
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_HAS_DATE): cv.boolean,
vol.Required(CONF_HAS_TIME): cv.boolean,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_INITIAL): cv.string,
}, cv.has_at_least_one_key_value((CONF_HAS_DATE, True),
(CONF_HAS_TIME, True)))})
}, extra=vol.ALLOW_EXTRA)
@asyncio.coroutine
def async_set_datetime(hass, entity_id, dt_value):
"""Set date and / or time of input_datetime."""
yield from hass.services.async_call(DOMAIN, SERVICE_SET_DATETIME, {
ATTR_ENTITY_ID: entity_id,
ATTR_DATE: dt_value.date(),
ATTR_TIME: dt_value.time()
})
@asyncio.coroutine
def async_setup(hass, config):
"""Set up an input datetime."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
entities = []
for object_id, cfg in config[DOMAIN].items():
name = cfg.get(CONF_NAME)
has_time = cfg.get(CONF_HAS_TIME)
has_date = cfg.get(CONF_HAS_DATE)
icon = cfg.get(CONF_ICON)
initial = cfg.get(CONF_INITIAL)
entities.append(InputDatetime(object_id, name,
has_date, has_time, icon, initial))
if not entities:
return False
@asyncio.coroutine
def async_set_datetime_service(call):
"""Handle a call to the input datetime 'set datetime' service."""
target_inputs = component.async_extract_from_service(call)
tasks = []
for input_datetime in target_inputs:
time = call.data.get(ATTR_TIME)
date = call.data.get(ATTR_DATE)
if (input_datetime.has_date() and not date) or \
(input_datetime.has_time() and not time):
_LOGGER.error("Invalid service data for "
"input_datetime.set_datetime: %s",
str(call.data))
continue
tasks.append(input_datetime.async_set_datetime(date, time))
if tasks:
yield from asyncio.wait(tasks, loop=hass.loop)
hass.services.async_register(
DOMAIN, SERVICE_SET_DATETIME, async_set_datetime_service,
schema=SERVICE_SET_DATETIME_SCHEMA)
yield from component.async_add_entities(entities)
return True
class InputDatetime(Entity):
"""Representation of a datetime input."""
def __init__(self, object_id, name, has_date, has_time, icon, initial):
"""Initialize a select input."""
self.entity_id = ENTITY_ID_FORMAT.format(object_id)
self._name = name
self._has_date = has_date
self._has_time = has_time
self._icon = icon
self._initial = initial
self._current_datetime = None
@asyncio.coroutine
def async_added_to_hass(self):
"""Run when entity about to be added."""
restore_val = None
# Priority 1: Initial State
if self._initial is not None:
restore_val = self._initial
# Priority 2: Old state
if restore_val is None:
old_state = yield from async_get_last_state(self.hass,
self.entity_id)
if old_state is not None:
restore_val = old_state.state
if restore_val is not None:
if not self._has_date:
self._current_datetime = dt_util.parse_time(restore_val)
elif not self._has_time:
self._current_datetime = dt_util.parse_date(restore_val)
else:
self._current_datetime = dt_util.parse_datetime(restore_val)
def has_date(self):
"""Return whether the input datetime carries a date."""
return self._has_date
def has_time(self):
"""Return whether the input datetime carries a time."""
return self._has_time
@property
def should_poll(self):
"""If entity should be polled."""
return False
@property
def name(self):
"""Return the name of the select input."""
return self._name
@property
def icon(self):
"""Return the icon to be used for this entity."""
return self._icon
@property
def state(self):
"""Return the state of the component."""
if self._current_datetime is None:
return STATE_UNKNOWN
return self._current_datetime
@property
def state_attributes(self):
"""Return the state attributes."""
attrs = {
'has_date': self._has_date,
'has_time': self._has_time,
}
if self._current_datetime is None:
return attrs
if self._has_date and self._current_datetime is not None:
attrs['year'] = self._current_datetime.year
attrs['month'] = self._current_datetime.month
attrs['day'] = self._current_datetime.day
if self._has_time and self._current_datetime is not None:
attrs['hour'] = self._current_datetime.hour
attrs['minute'] = self._current_datetime.minute
attrs['second'] = self._current_datetime.second
if self._current_datetime is not None:
if not self._has_date:
attrs['timestamp'] = self._current_datetime.hour * 3600 + \
self._current_datetime.minute * 60 + \
self._current_datetime.second
elif not self._has_time:
extended = datetime.datetime.combine(self._current_datetime,
datetime.time(0, 0))
attrs['timestamp'] = extended.timestamp()
else:
attrs['timestamp'] = self._current_datetime.timestamp()
return attrs
@asyncio.coroutine
def async_set_datetime(self, date_val, time_val):
"""Set a new date / time."""
if self._has_date and self._has_time and date_val and time_val:
self._current_datetime = datetime.datetime.combine(date_val,
time_val)
elif self._has_date and not self._has_time and date_val:
self._current_datetime = date_val
if self._has_time and not self._has_date and time_val:
self._current_datetime = time_val
yield from self.async_update_ha_state()
| |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import tensorflow as tf
import math as m
import numpy as np
from renderer import mesh_renderer
from scipy.io import loadmat
# Reconstruct 3D face based on output coefficients and facemodel
#-----------------------------------------------------------------------------------------
# BFM 3D face model
class BFM():
def __init__(self,model_path = 'renderer/BFM face model/BFM_model_front_gan.mat'):
model = loadmat(model_path)
self.meanshape = tf.constant(model['meanshape']) # mean face shape. [3*N,1]
self.idBase = tf.constant(model['idBase']) # identity basis. [3*N,80]
self.exBase = tf.constant(model['exBase'].astype(np.float32)) # expression basis. [3*N,64]
self.meantex = tf.constant(model['meantex']) # mean face texture. [3*N,1] (0-255)
self.texBase = tf.constant(model['texBase']) # texture basis. [3*N,80]
self.point_buf = tf.constant(model['point_buf']) # face indices for each vertex that lies in. starts from 1. [N,8]
self.face_buf = tf.constant(model['tri']) # vertex indices for each face. starts from 1. [F,3]
self.front_mask_render = tf.squeeze(tf.constant(model['gan_mask'])) # vertex indices for small face region for rendering. starts from 1.
self.mask_face_buf = tf.constant(model['gan_tl']) # vertex indices for each face from small face region. starts from 1. [f,3]
self.keypoints = tf.squeeze(tf.constant(model['keypoints'])) # vertex indices for 68 landmarks. starts from 1. [68,1]
# Analytic 3D face
class Face3D():
def __init__(self):
facemodel = BFM()
self.facemodel = facemodel
# analytic 3D face reconstructions with coefficients from R-Net
def Reconstruction_Block(self,coeff,res,batchsize,progressive=True):
#coeff: [batchsize,257] reconstruction coefficients
id_coeff,ex_coeff,tex_coeff,angles,translation,gamma = self.Split_coeff(coeff)
# [batchsize,N,3] canonical face shape in BFM space
face_shape = self.Shape_formation_block(id_coeff,ex_coeff,self.facemodel)
# [batchsize,N,3] vertex texture (in RGB order)
face_texture = self.Texture_formation_block(tex_coeff,self.facemodel)
# [batchsize,3,3] rotation matrix for face shape
rotation = self.Compute_rotation_matrix(angles)
# [batchsize,N,3] vertex normal
face_norm = self.Compute_norm(face_shape,self.facemodel)
norm_r = tf.matmul(face_norm,rotation)
# do rigid transformation for face shape using predicted rotation and translation
face_shape_t = self.Rigid_transform_block(face_shape,rotation,translation)
# compute 2d landmark projections
# landmark_p: [batchsize,68,2]
face_landmark_t = self.Compute_landmark(face_shape_t,self.facemodel)
landmark_p = self.Projection_block(face_landmark_t) # 256*256 image
# [batchsize,N,3] vertex color (in RGB order)
face_color = self.Illumination_block(face_texture, norm_r, gamma)
# reconstruction images and region masks
if progressive:
render_imgs,img_mask = tf.cond(res<=8, lambda:self.Render_block(face_shape_t,norm_r,face_color,self.facemodel,8,64),
lambda:
tf.cond(res<=16, lambda:self.Render_block(face_shape_t,norm_r,face_color,self.facemodel,16,32),
lambda:
tf.cond(res<=32, lambda:self.Render_block(face_shape_t,norm_r,face_color,self.facemodel,32,16),
lambda:
tf.cond(res<=64, lambda:self.Render_block(face_shape_t,norm_r,face_color,self.facemodel,64,8),
lambda:
tf.cond(res<=128, lambda:self.Render_block(face_shape_t,norm_r,face_color,self.facemodel,128,4),
lambda:
self.Render_block(face_shape_t,norm_r,face_color,self.facemodel,256,4)
)))))
else:
render_imgs,img_mask = self.Render_block(face_shape_t,norm_r,face_color,self.facemodel,res,batchsize)
render_imgs = tf.clip_by_value(render_imgs,0,255)
render_imgs = tf.cast(render_imgs,tf.float32)
render_mask = tf.cast(img_mask,tf.float32)
return render_imgs,render_mask,landmark_p,face_shape_t
def Get_landmark(self,coeff):
face_shape_t = self.Get_face_shape(coeff)
# compute 2d landmark projections
# landmark_p: [batchsize,68,2]
face_landmark_t = self.Compute_landmark(face_shape_t,self.facemodel)
landmark_p = self.Projection_block(face_landmark_t,focal=1015.,half_image_width=112.) # 224*224 image
return landmark_p
def Get_face_shape(self,coeff):
#coeff: [batchsize,257] reconstruction coefficients
id_coeff,ex_coeff,tex_coeff,angles,translation,gamma = self.Split_coeff(coeff)
# [batchsize,N,3] canonical face shape in BFM space
face_shape = self.Shape_formation_block(id_coeff,ex_coeff,self.facemodel)
# [batchsize,3,3] rotation matrix for face shape
rotation = self.Compute_rotation_matrix(angles)
# do rigid transformation for face shape using predicted rotation and translation
face_shape_t = self.Rigid_transform_block(face_shape,rotation,translation)
return face_shape_t
def Split_coeff(self,coeff):
id_coeff = coeff[:,:80]
tex_coeff = coeff[:,80:160]
ex_coeff = coeff[:,160:224]
angles = coeff[:,224:227]
gamma = coeff[:,227:254]
translation = coeff[:,254:257]
return id_coeff,ex_coeff,tex_coeff,angles,translation,gamma
def Shape_formation_block(self,id_coeff,ex_coeff,facemodel):
face_shape = tf.einsum('ij,aj->ai',facemodel.idBase,id_coeff) + \
tf.einsum('ij,aj->ai',facemodel.exBase,ex_coeff) + facemodel.meanshape
# reshape face shape to [batchsize,N,3]
face_shape = tf.reshape(face_shape,[tf.shape(face_shape)[0],-1,3])
# re-centering the face shape with mean shape
face_shape = face_shape - tf.reshape(tf.reduce_mean(tf.reshape(facemodel.meanshape,[-1,3]),0),[1,1,3])
return face_shape
def Compute_norm(self,face_shape,facemodel):
shape = face_shape
face_id = facemodel.face_buf
point_id = facemodel.point_buf
# face_id and point_id index starts from 1
face_id = tf.cast(face_id - 1,tf.int32)
point_id = tf.cast(point_id - 1,tf.int32)
#compute normal for each face
v1 = tf.gather(shape,face_id[:,0], axis = 1)
v2 = tf.gather(shape,face_id[:,1], axis = 1)
v3 = tf.gather(shape,face_id[:,2], axis = 1)
e1 = v1 - v2
e2 = v2 - v3
face_norm = tf.cross(e1,e2)
face_norm = tf.nn.l2_normalize(face_norm, dim = 2) # normalized face_norm first
face_norm = tf.concat([face_norm,tf.zeros([tf.shape(face_shape)[0],1,3])], axis = 1)
#compute normal for each vertex using one-ring neighborhood
v_norm = tf.reduce_sum(tf.gather(face_norm, point_id, axis = 1), axis = 2)
v_norm = tf.nn.l2_normalize(v_norm, dim = 2)
return v_norm
def Texture_formation_block(self,tex_coeff,facemodel):
face_texture = tf.einsum('ij,aj->ai',facemodel.texBase,tex_coeff) + facemodel.meantex
# reshape face texture to [batchsize,N,3], note that texture is in RGB order
face_texture = tf.reshape(face_texture,[tf.shape(face_texture)[0],-1,3])
return face_texture
def Compute_rotation_matrix(self,angles):
n_data = tf.shape(angles)[0]
# compute rotation matrix for X-axis, Y-axis, Z-axis respectively
rotation_X = tf.concat([tf.ones([n_data,1]),
tf.zeros([n_data,3]),
tf.reshape(tf.cos(angles[:,0]),[n_data,1]),
-tf.reshape(tf.sin(angles[:,0]),[n_data,1]),
tf.zeros([n_data,1]),
tf.reshape(tf.sin(angles[:,0]),[n_data,1]),
tf.reshape(tf.cos(angles[:,0]),[n_data,1])],
axis = 1
)
rotation_Y = tf.concat([tf.reshape(tf.cos(angles[:,1]),[n_data,1]),
tf.zeros([n_data,1]),
tf.reshape(tf.sin(angles[:,1]),[n_data,1]),
tf.zeros([n_data,1]),
tf.ones([n_data,1]),
tf.zeros([n_data,1]),
-tf.reshape(tf.sin(angles[:,1]),[n_data,1]),
tf.zeros([n_data,1]),
tf.reshape(tf.cos(angles[:,1]),[n_data,1])],
axis = 1
)
rotation_Z = tf.concat([tf.reshape(tf.cos(angles[:,2]),[n_data,1]),
-tf.reshape(tf.sin(angles[:,2]),[n_data,1]),
tf.zeros([n_data,1]),
tf.reshape(tf.sin(angles[:,2]),[n_data,1]),
tf.reshape(tf.cos(angles[:,2]),[n_data,1]),
tf.zeros([n_data,3]),
tf.ones([n_data,1])],
axis = 1
)
rotation_X = tf.reshape(rotation_X,[n_data,3,3])
rotation_Y = tf.reshape(rotation_Y,[n_data,3,3])
rotation_Z = tf.reshape(rotation_Z,[n_data,3,3])
# R = RzRyRx
rotation = tf.matmul(tf.matmul(rotation_Z,rotation_Y),rotation_X)
# because our face shape is N*3, so compute the transpose of R, so that rotation shapes can be calculated as face_shape*R
rotation = tf.transpose(rotation, perm = [0,2,1])
return rotation
def Projection_block(self,face_shape,focal=1015.0*1.22,half_image_width=128.):
# pre-defined camera focal for pespective projection
focal = tf.constant(focal)
# focal = tf.constant(400.0)
focal = tf.reshape(focal,[-1,1])
batchsize = tf.shape(face_shape)[0]
# center = tf.constant(112.0)
# define camera position
# camera_pos = tf.reshape(tf.constant([0.0,0.0,10.0]),[1,1,3])
camera_pos = tf.reshape(tf.constant([0.0,0.0,10.0]),[1,1,3])
# camera_pos = tf.reshape(tf.constant([0.0,0.0,4.0]),[1,1,3])
reverse_z = tf.tile(tf.reshape(tf.constant([1.0,0,0,0,1,0,0,0,-1.0]),[1,3,3]),[tf.shape(face_shape)[0],1,1])
# compute projection matrix
# p_matrix = tf.concat([[focal],[0.0],[center],[0.0],[focal],[center],[0.0],[0.0],[1.0]],axis = 0)
p_matrix = tf.concat([focal*tf.ones([batchsize,1]),tf.zeros([batchsize,1]),half_image_width*tf.ones([batchsize,1]),tf.zeros([batchsize,1]),\
focal*tf.ones([batchsize,1]),half_image_width*tf.ones([batchsize,1]),tf.zeros([batchsize,2]),tf.ones([batchsize,1])],axis = 1)
# p_matrix = tf.tile(tf.reshape(p_matrix,[1,3,3]),[tf.shape(face_shape)[0],1,1])
p_matrix = tf.reshape(p_matrix,[-1,3,3])
# convert z in canonical space to the distance to camera
face_shape = tf.matmul(face_shape,reverse_z) + camera_pos
aug_projection = tf.matmul(face_shape,tf.transpose(p_matrix,[0,2,1]))
# [batchsize, N,2] 2d face projection
face_projection = aug_projection[:,:,0:2]/tf.reshape(aug_projection[:,:,2],[tf.shape(face_shape)[0],tf.shape(aug_projection)[1],1])
return face_projection
def Compute_landmark(self,face_shape,facemodel):
# compute 3D landmark postitions with pre-computed 3D face shape
keypoints_idx = facemodel.keypoints
keypoints_idx = tf.cast(keypoints_idx - 1,tf.int32)
face_landmark = tf.gather(face_shape,keypoints_idx,axis = 1)
return face_landmark
def Illumination_block(self,face_texture,norm_r,gamma):
n_data = tf.shape(gamma)[0]
n_point = tf.shape(norm_r)[1]
gamma = tf.reshape(gamma,[n_data,3,9])
# set initial lighting with an ambient lighting
init_lit = tf.constant([0.8,0,0,0,0,0,0,0,0])
gamma = gamma + tf.reshape(init_lit,[1,1,9])
# compute vertex color using SH function approximation
a0 = m.pi
a1 = 2*m.pi/tf.sqrt(3.0)
a2 = 2*m.pi/tf.sqrt(8.0)
c0 = 1/tf.sqrt(4*m.pi)
c1 = tf.sqrt(3.0)/tf.sqrt(4*m.pi)
c2 = 3*tf.sqrt(5.0)/tf.sqrt(12*m.pi)
Y = tf.concat([tf.tile(tf.reshape(a0*c0,[1,1,1]),[n_data,n_point,1]),
tf.expand_dims(-a1*c1*norm_r[:,:,1],2),
tf.expand_dims(a1*c1*norm_r[:,:,2],2),
tf.expand_dims(-a1*c1*norm_r[:,:,0],2),
tf.expand_dims(a2*c2*norm_r[:,:,0]*norm_r[:,:,1],2),
tf.expand_dims(-a2*c2*norm_r[:,:,1]*norm_r[:,:,2],2),
tf.expand_dims(a2*c2*0.5/tf.sqrt(3.0)*(3*tf.square(norm_r[:,:,2])-1),2),
tf.expand_dims(-a2*c2*norm_r[:,:,0]*norm_r[:,:,2],2),
tf.expand_dims(a2*c2*0.5*(tf.square(norm_r[:,:,0])-tf.square(norm_r[:,:,1])),2)],axis = 2)
color_r = tf.squeeze(tf.matmul(Y,tf.expand_dims(gamma[:,0,:],2)),axis = 2)
color_g = tf.squeeze(tf.matmul(Y,tf.expand_dims(gamma[:,1,:],2)),axis = 2)
color_b = tf.squeeze(tf.matmul(Y,tf.expand_dims(gamma[:,2,:],2)),axis = 2)
#[batchsize,N,3] vertex color in RGB order
face_color = tf.stack([color_r*face_texture[:,:,0],color_g*face_texture[:,:,1],color_b*face_texture[:,:,2]],axis = 2)
return face_color
def Rigid_transform_block(self,face_shape,rotation,translation):
# do rigid transformation for 3D face shape
face_shape_r = tf.matmul(face_shape,rotation)
face_shape_t = face_shape_r + tf.reshape(translation,[tf.shape(face_shape)[0],1,3])
return face_shape_t
def Render_block(self,face_shape,face_norm,face_color,facemodel,res,batchsize):
# render reconstruction images
n_vex = int(facemodel.idBase.shape[0].value/3)
fov_y = 2*tf.atan(128/(1015.*1.22))*180./m.pi
# full face region
face_shape = tf.reshape(face_shape,[batchsize,n_vex,3])
face_norm = tf.reshape(face_norm,[batchsize,n_vex,3])
face_color = tf.reshape(face_color,[batchsize,n_vex,3])
# pre-defined cropped face region
mask_face_shape = tf.gather(face_shape,tf.cast(facemodel.front_mask_render-1,tf.int32),axis = 1)
mask_face_norm = tf.gather(face_norm,tf.cast(facemodel.front_mask_render-1,tf.int32),axis = 1)
mask_face_color = tf.gather(face_color,tf.cast(facemodel.front_mask_render-1,tf.int32),axis = 1)
# setting cammera settings
camera_position = tf.constant([[0,0,10.0]]) + tf.zeros([batchsize,3])
camera_lookat = tf.constant([[0,0,0.0]]) + tf.zeros([batchsize,3])
camera_up = tf.constant([[0,1.0,0]]) + tf.zeros([batchsize,3])
# setting light source position(intensities are set to 0 because we have computed the vertex color)
light_positions = tf.reshape(tf.constant([0,0,1e5]),[1,1,3]) + tf.zeros([batchsize,1,3])
light_intensities = tf.reshape(tf.constant([0.0,0.0,0.0]),[1,1,3])+tf.zeros([batchsize,1,3])
ambient_color = tf.reshape(tf.constant([1.0,1,1]),[1,3])+ tf.zeros([batchsize,3])
near_clip = 0.01
far_clip = 50.
# using tf_mesh_renderer for rasterization,
# https://github.com/google/tf_mesh_renderer
# img: [batchsize,224,224,3] images in RGB order (0-255)
# mask:[batchsize,224,224,1] transparency for img ({0,1} value)
with tf.device('/cpu:0'):
rgba_img = mesh_renderer.mesh_renderer(mask_face_shape,
tf.cast(facemodel.mask_face_buf-1,tf.int32),
mask_face_norm,
mask_face_color,
camera_position = camera_position,
camera_lookat = camera_lookat,
camera_up = camera_up,
light_positions = light_positions,
light_intensities = light_intensities,
image_width = res,
image_height = res,
# fov_y = 12.5936,
fov_y = fov_y,
ambient_color = ambient_color,
near_clip = near_clip,
far_clip = far_clip)
img = rgba_img[:,:,:,:3]
mask = rgba_img[:,:,:,3:]
return img,mask
| |
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
import math
import netaddr
import threading
from keystoneclient.auth.identity import v3
from keystoneclient import exceptions as k_exceptions
from keystoneclient import session
from keystoneclient.v2_0 import client as k_client
from keystoneclient.v3 import client
from neutron.common import utils
from neutron_lib import exceptions as n_exc
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from sqlalchemy import func
from sqlalchemy.orm import exc
from sqlalchemy.orm import joinedload
from sqlalchemy.sql import expression as expr
from networking_cisco._i18n import _
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.common import (cisco_constants as
c_constants)
from networking_cisco.plugins.cisco.db.device_manager import hd_models
from networking_cisco.plugins.cisco.db.device_manager import hosting_devices_db
from networking_cisco.plugins.cisco.device_manager import config
from networking_cisco.plugins.cisco.device_manager import service_vm_lib
from networking_cisco.plugins.cisco.extensions import ciscohostingdevicemanager
LOG = logging.getLogger(__name__)
HOSTING_DEVICE_MANAGER_OPTS = [
cfg.StrOpt('l3_admin_tenant', default='L3AdminTenant',
help=_("Name of the L3 admin tenant")),
cfg.StrOpt('management_network', default='osn_mgmt_nw',
help=_("Name of management network for service VM "
"configuration. Default value is osn_mgmt_nw")),
cfg.StrOpt('default_security_group', default='mgmt_sec_grp',
help=_("Default security group applied on management port. "
"Default value is mgmt_sec_grp")),
cfg.BoolOpt('ensure_nova_running', default=True,
help=_("Ensure that Nova is running before attempting to "
"create any service VM.")),
cfg.StrOpt('domain_name_server_1', default='8.8.8.8',
help=_("IP address of primary domain name server for hosting "
"devices")),
cfg.StrOpt('domain_name_server_2', default='8.8.4.4',
help=_("IP address of secondary domain name server for hosting "
"devices"))
]
cfg.CONF.register_opts(HOSTING_DEVICE_MANAGER_OPTS, "general")
VM_CATEGORY = ciscohostingdevicemanager.VM_CATEGORY
class HostingDeviceManagerMixin(hosting_devices_db.HostingDeviceDBMixin):
"""A class implementing a resource manager for hosting devices.
The caller should make sure that HostingDeviceManagerMixin is a singleton.
"""
# The all-mighty tenant owning all hosting devices
_l3_tenant_uuid = None
# Keystone session corresponding to admin user and l3_admin_tenant
_keystone_session = None
# The management network for hosting devices
_mgmt_nw_uuid = None
_mgmt_subnet_uuid = None
_mgmt_sec_grp_id = None
# Dictionary with credentials keyed on credential UUID
_credentials = {}
# Dictionaries with loaded driver modules for different host types
_plugging_drivers = {}
_hosting_device_drivers = {}
# Dictionary with locks for hosting device pool maintenance
_hosting_device_locks = {}
# Service VM manager object that interacts with Nova
_svc_vm_mgr_obj = None
# Flag indicating is needed Nova services are reported as up.
_nova_running = False
@classmethod
def _keystone_auth_session(cls):
if cls._keystone_session:
return cls._keystone_session
else:
auth_url = cfg.CONF.keystone_authtoken.auth_url
if auth_url.rsplit('/', 1)[-1] != 'v3':
auth_url += '/v3'
# user = cfg.CONF.keystone_authtoken.admin_user
# pw = cfg.CONF.keystone_authtoken.admin_password
# project_name = cfg.CONF.keystone_authtoken.admin_tenant_name
# project_name = cfg.CONF.keystone_authtoken.project_name
user = cfg.CONF.keystone_authtoken.username
pw = cfg.CONF.keystone_authtoken.password
project_name = cfg.CONF.general.l3_admin_tenant
user_domain_id = (cfg.CONF.keystone_authtoken.user_domain_id or
'default')
project_domain_id = (cfg.CONF.keystone_authtoken.project_domain_id
or 'default')
auth = v3.Password(auth_url=auth_url,
username=user,
password=pw,
project_name=project_name,
user_domain_id=user_domain_id,
project_domain_id=project_domain_id)
cls._keystone_session = session.Session(auth=auth)
return cls._keystone_session
@property
def svc_vm_mgr(self):
if self._svc_vm_mgr_obj is None:
if hasattr(cfg.CONF.keystone_authtoken, 'project_domain_id'):
self._svc_vm_mgr_obj = service_vm_lib.ServiceVMManager(
is_auth_v3=True,
keystone_session=self._keystone_auth_session())
else:
auth_url = cfg.CONF.keystone_authtoken.identity_uri + "/v2.0"
u_name = cfg.CONF.keystone_authtoken.admin_user
pw = cfg.CONF.keystone_authtoken.admin_password
tenant = cfg.CONF.general.l3_admin_tenant
self._svc_vm_mgr_obj = service_vm_lib.ServiceVMManager(
is_auth_v3=False,
user=u_name, passwd=pw, l3_admin_tenant=tenant,
auth_url=auth_url)
return self._svc_vm_mgr_obj
@classmethod
def _get_tenant_id_using_keystone_v2(cls):
auth_url = cfg.CONF.keystone_authtoken.identity_uri + "/v2.0"
user = cfg.CONF.keystone_authtoken.admin_user
pw = cfg.CONF.keystone_authtoken.admin_password
tenant = cfg.CONF.keystone_authtoken.admin_tenant_name
keystone = k_client.Client(username=user, password=pw,
tenant_name=tenant,
auth_url=auth_url)
try:
tenant = keystone.tenants.find(
name=cfg.CONF.general.l3_admin_tenant)
except k_exceptions.NotFound:
LOG.error('No tenant with a name or ID of %s exists.',
cfg.CONF.general.l3_admin_tenant)
except k_exceptions.NoUniqueMatch:
LOG.error('Multiple tenants matches found for %s',
cfg.CONF.general.l3_admin_tenant)
return tenant.id
@classmethod
def _get_tenant_id_using_keystone_v3(cls):
keystone = client.Client(session=cls._keystone_auth_session())
try:
tenant = keystone.projects.find(
name=cfg.CONF.general.l3_admin_tenant)
except k_exceptions.NotFound:
LOG.error('No tenant with a name or ID of %s exists.',
cfg.CONF.general.l3_admin_tenant)
except k_exceptions.NoUniqueMatch:
LOG.error('Multiple tenants matches found for %s',
cfg.CONF.general.l3_admin_tenant)
return tenant.id
@classmethod
def l3_tenant_id(cls):
"""Returns id of tenant owning hosting device resources."""
if cls._l3_tenant_uuid is None:
if hasattr(cfg.CONF.keystone_authtoken, 'project_domain_id'):
# TODO(sridar): hack for now to determing if keystone v3
# API is to be used.
cls._l3_tenant_uuid = cls._get_tenant_id_using_keystone_v3()
else:
cls._l3_tenant_uuid = cls._get_tenant_id_using_keystone_v2()
return cls._l3_tenant_uuid
@classmethod
def mgmt_nw_id(cls):
"""Returns id of the management network."""
if cls._mgmt_nw_uuid is None:
tenant_id = cls.l3_tenant_id()
if not tenant_id:
return
net = bc.get_plugin().get_networks(
bc.context.get_admin_context(),
{'tenant_id': [tenant_id],
'name': [cfg.CONF.general.management_network]},
['id', 'subnets'])
if len(net) == 1:
num_subnets = len(net[0]['subnets'])
if num_subnets == 0:
LOG.error('The management network has no subnet. '
'Please assign one.')
return
elif num_subnets > 1:
LOG.info('The management network has %d subnets. The '
'first one will be used.', num_subnets)
cls._mgmt_nw_uuid = net[0].get('id')
cls._mgmt_subnet_uuid = net[0]['subnets'][0]
elif len(net) > 1:
# Management network must have a unique name.
LOG.error('The management network for does not have '
'unique name. Please ensure that it is.')
else:
# Management network has not been created.
LOG.error('There is no virtual management network. Please '
'create one.')
return cls._mgmt_nw_uuid
@classmethod
def mgmt_subnet_id(cls):
if cls._mgmt_subnet_uuid is None:
cls.mgmt_nw_id()
return cls._mgmt_subnet_uuid
@classmethod
def mgmt_sec_grp_id(cls):
"""Returns id of security group used by the management network."""
if not utils.is_extension_supported(bc.get_plugin(), "security-group"):
return
if cls._mgmt_sec_grp_id is None:
# Get the id for the _mgmt_security_group_id
tenant_id = cls.l3_tenant_id()
res = bc.get_plugin().get_security_groups(
bc.context.get_admin_context(),
{'tenant_id': [tenant_id],
'name': [cfg.CONF.general.default_security_group]},
['id'])
if len(res) == 1:
sec_grp_id = res[0].get('id', None)
cls._mgmt_sec_grp_id = sec_grp_id
elif len(res) > 1:
# the mgmt sec group must be unique.
LOG.error('The security group for the management network '
'does not have unique name. Please ensure that '
'it is.')
else:
# Service VM Mgmt security group is not present.
LOG.error('There is no security group for the management '
'network. Please create one.')
return cls._mgmt_sec_grp_id
def get_hosting_device_config(self, context, id):
# ask config agent for the running config of the hosting device
cfg_notifier = self.agent_notifiers.get(c_constants.AGENT_TYPE_CFG)
if cfg_notifier:
return cfg_notifier.get_hosting_device_configuration(context, id)
def get_hosting_device_driver(self, context, id):
"""Returns device driver for hosting device template with <id>."""
if id is None:
return
try:
return self._hosting_device_drivers[id]
except KeyError:
try:
template = self._get_hosting_device_template(context, id)
self._hosting_device_drivers[id] = importutils.import_object(
template['device_driver'])
except (ImportError, TypeError, n_exc.NeutronException):
LOG.exception("Error loading hosting device driver for "
"hosting device template %s", id)
return self._hosting_device_drivers.get(id)
def get_hosting_device_plugging_driver(self, context, id):
"""Returns plugging driver for hosting device template with <id>."""
if id is None:
return
try:
return self._plugging_drivers[id]
except KeyError:
try:
template = self._get_hosting_device_template(context, id)
self._plugging_drivers[id] = importutils.import_object(
template['plugging_driver'])
except (ImportError, TypeError, n_exc.NeutronException):
LOG.exception("Error loading plugging driver for hosting "
"device template %s", id)
return self._plugging_drivers.get(id)
def report_hosting_device_shortage(self, context, template, requested=0):
"""Used to report shortage of hosting devices based on <template>."""
self._dispatch_pool_maintenance_job(template)
def acquire_hosting_device_slots(self, context, hosting_device, resource,
resource_type, resource_service, num,
exclusive=False):
"""Assign <num> slots in <hosting_device> to logical <resource>.
If exclusive is True the hosting device is bound to the resource's
tenant. Otherwise it is not bound to any tenant.
Returns True if allocation was granted, False otherwise.
"""
bound = hosting_device['tenant_bound']
if ((bound is not None and bound != resource['tenant_id']) or
(exclusive and not self._exclusively_used(context, hosting_device,
resource['tenant_id']))):
LOG.debug(
'Rejecting allocation of %(num)d slots in tenant %(bound)s '
'hosting device %(device)s to logical resource %(r_id)s due '
'to exclusive use conflict.',
{'num': num,
'bound': 'unbound' if bound is None else bound + ' bound',
'device': hosting_device['id'], 'r_id': resource['id']})
return False
with context.session.begin(subtransactions=True):
res_info = {'resource': resource, 'type': resource_type,
'service': resource_service}
slot_info, query = self._get_or_create_slot_allocation(
context, hosting_device, res_info)
if slot_info is None:
LOG.debug('Rejecting allocation of %(num)d slots in hosting '
'device %(device)s to logical resource %(r_id)s',
{'num': num, 'device': hosting_device['id'],
'r_id': resource['id']})
return False
new_allocation = num + slot_info.num_allocated
if hosting_device['template']['slot_capacity'] < new_allocation:
LOG.debug('Rejecting allocation of %(num)d slots in '
'hosting device %(device)s to logical resource '
'%(r_id)s due to insufficent slot availability.',
{'num': num, 'device': hosting_device['id'],
'r_id': resource['id']})
self._dispatch_pool_maintenance_job(hosting_device['template'])
return False
# handle any changes to exclusive usage by tenant
if exclusive and bound is None:
self._update_hosting_device_exclusivity(
context, hosting_device, resource['tenant_id'])
bound = resource['tenant_id']
elif not exclusive and bound is not None:
self._update_hosting_device_exclusivity(context,
hosting_device, None)
bound = None
slot_info.num_allocated = new_allocation
context.session.add(slot_info)
self._dispatch_pool_maintenance_job(hosting_device['template'])
# report success
LOG.info('Allocated %(num)d additional slots in tenant %(bound)s'
'bound hosting device %(hd_id)s. In total %(total)d '
'slots are now allocated in that hosting device for '
'logical resource %(r_id)s.',
{'num': num, 'bound': 'un-' if bound is None else bound + ' ',
'total': new_allocation, 'hd_id': hosting_device['id'],
'r_id': resource['id']})
return True
def release_hosting_device_slots(self, context, hosting_device, resource,
num):
"""Free <num> slots in <hosting_device> from logical resource <id>.
Returns True if deallocation was successful. False otherwise.
"""
with context.session.begin(subtransactions=True):
num_str = str(num) if num >= 0 else "all"
res_info = {'resource': resource}
slot_info, query = self._get_or_create_slot_allocation(
context, hosting_device, res_info, create=False)
if slot_info is None:
LOG.debug('Rejecting de-allocation of %(num)s slots in '
'hosting device %(device)s for logical resource '
'%(id)s', {'num': num_str,
'device': hosting_device['id'],
'id': resource['id']})
return False
if num >= 0:
new_allocation = slot_info.num_allocated - num
else:
# if a negative num is specified all slot allocations for
# the logical resource in the hosting device is removed
new_allocation = 0
if new_allocation < 0:
LOG.debug('Rejecting de-allocation of %(num)s slots in '
'hosting device %(device)s for logical resource '
'%(id)s since only %(alloc)d slots are allocated.',
{'num': num_str, 'device': hosting_device['id'],
'id': resource['id'],
'alloc': slot_info.num_allocated})
self._dispatch_pool_maintenance_job(hosting_device['template'])
return False
elif new_allocation == 0:
result = query.delete()
LOG.info('De-allocated %(num)s slots from hosting device '
'%(hd_id)s. %(total)d slots are now allocated in '
'that hosting device.',
{'num': num_str, 'total': new_allocation,
'hd_id': hosting_device['id']})
if (hosting_device['tenant_bound'] is not None and
context.session.query(hd_models.SlotAllocation).filter_by(
hosting_device_id=hosting_device['id']).first() is
None):
# make hosting device tenant unbound if no logical
# resource use it anymore
hosting_device['tenant_bound'] = None
context.session.add(hosting_device)
LOG.info('Making hosting device %(hd_id)s with no '
'allocated slots tenant unbound.',
{'hd_id': hosting_device['id']})
self._dispatch_pool_maintenance_job(hosting_device['template'])
return result == 1
LOG.info('De-allocated %(num)s slots from hosting device '
'%(hd_id)s. %(total)d slots are now allocated in '
'that hosting device.',
{'num': num_str, 'total': new_allocation,
'hd_id': hosting_device['id']})
slot_info.num_allocated = new_allocation
context.session.add(slot_info)
self._dispatch_pool_maintenance_job(hosting_device['template'])
# report success
return True
def _get_or_create_slot_allocation(self, context, hosting_device,
resource_info, create=True):
resource = resource_info['resource']
slot_info = None
query = context.session.query(hd_models.SlotAllocation).filter_by(
logical_resource_id=resource['id'],
hosting_device_id=hosting_device['id'])
with context.session.begin(subtransactions=True):
try:
slot_info = query.one()
except exc.MultipleResultsFound:
# this should not happen
LOG.debug('DB inconsistency: Multiple slot allocation entries '
'for logical resource %(r_id)s in hosting device '
'%(device)s.', {'r_id': resource['id'],
'device': hosting_device['id']})
except exc.NoResultFound:
LOG.debug('Logical resource %(res)s does not have allocated '
'any slots in hosting device %(dev)s.',
{'res': resource['id'], 'dev': hosting_device['id']})
if create is True:
LOG.debug('Creating new slot allocation DB entry for '
'logical resource %(res)s in hosting device '
'%(dev)s.', {'res': resource['id'],
'dev': hosting_device['id']})
slot_info = hd_models.SlotAllocation(
template_id=hosting_device['template_id'],
hosting_device_id=hosting_device['id'],
logical_resource_type=resource_info['type'],
logical_resource_service=resource_info['service'],
logical_resource_id=resource['id'],
logical_resource_owner=resource['tenant_id'],
num_allocated=0,
tenant_bound=None)
return slot_info, query
def get_slot_allocation(self, context, template_id=None,
hosting_device_id=None, resource_id=None):
query = context.session.query(func.sum(
hd_models.SlotAllocation.num_allocated))
if template_id is not None:
query = query.filter_by(template_id=template_id)
if hosting_device_id is not None:
query = query.filter_by(hosting_device_id=hosting_device_id)
if resource_id is not None:
query = query.filter_by(logical_resource_id=resource_id)
return query.scalar() or 0
def get_hosting_devices_qry(self, context, hosting_device_ids,
load_agent=True):
"""Returns hosting devices with <hosting_device_ids>."""
query = context.session.query(hd_models.HostingDevice)
if load_agent:
query = query.options(joinedload('cfg_agent'))
if len(hosting_device_ids) > 1:
query = query.filter(hd_models.HostingDevice.id.in_(
hosting_device_ids))
else:
query = query.filter(hd_models.HostingDevice.id ==
hosting_device_ids[0])
return query
def delete_all_hosting_devices(self, context, force_delete=False):
"""Deletes all hosting devices."""
for item in self._get_collection_query(
context, hd_models.HostingDeviceTemplate):
self.delete_all_hosting_devices_by_template(
context, template=item, force_delete=force_delete)
def delete_all_hosting_devices_by_template(self, context, template,
force_delete=False):
"""Deletes all hosting devices based on <template>."""
plugging_drv = self.get_hosting_device_plugging_driver(
context, template['id'])
hosting_device_drv = self.get_hosting_device_driver(context,
template['id'])
if plugging_drv is None or hosting_device_drv is None:
return
is_vm = template['host_category'] == VM_CATEGORY
query = context.session.query(hd_models.HostingDevice)
query = query.filter(hd_models.HostingDevice.template_id ==
template['id'])
for hd in query:
if not (hd.auto_delete or force_delete):
# device manager is not responsible for life cycle
# management of this hosting device.
continue
res = plugging_drv.get_hosting_device_resources(
context, hd.id, hd.complementary_id, self.l3_tenant_id(),
self.mgmt_nw_id())
if is_vm:
self.svc_vm_mgr.delete_service_vm(context, hd.id)
plugging_drv.delete_hosting_device_resources(
context, self.l3_tenant_id(), **res)
with context.session.begin(subtransactions=True):
# remove all allocations in this hosting device
context.session.query(hd_models.SlotAllocation).filter_by(
hosting_device_id=hd['id']).delete()
context.session.delete(hd)
def handle_non_responding_hosting_devices(self, context, cfg_agent,
hosting_device_ids):
e_context = context.elevated()
hosting_devices = self.get_hosting_devices_qry(
e_context, hosting_device_ids).all()
# 'hosting_info' is dictionary with ids of removed hosting
# devices and the affected logical resources for each
# removed hosting device:
# {'hd_id1': {'routers': [id1, id2, ...],
# 'fw': [id1, ...],
# ...},
# 'hd_id2': {'routers': [id3, id4, ...]},
# 'fw': [id1, ...],
# ...},
# ...}
hosting_info = dict((id, {}) for id in hosting_device_ids)
#TODO(bobmel): Modify so service plugins register themselves
try:
l3plugin = bc.get_plugin(bc.constants.L3)
l3plugin.handle_non_responding_hosting_devices(
context, hosting_devices, hosting_info)
except AttributeError:
pass
notifier = self.agent_notifiers.get(c_constants.AGENT_TYPE_CFG)
for hd in hosting_devices:
if (self._process_non_responsive_hosting_device(e_context, hd) and
notifier):
notifier.hosting_devices_removed(context, hosting_info, False,
cfg_agent)
def get_device_info_for_agent(self, context, hosting_device_db):
"""Returns information about <hosting_device> needed by config agent.
Convenience function that service plugins can use to populate
their resources with information about the device hosting their
logical resource.
"""
template = hosting_device_db.template
mgmt_port = hosting_device_db.management_port
mgmt_ip = (mgmt_port['fixed_ips'][0]['ip_address']
if mgmt_port else hosting_device_db.management_ip_address)
return {'id': hosting_device_db.id,
'name': template.name,
'template_id': template.id,
'credentials': self._get_credentials(hosting_device_db),
'host_category': template.host_category,
'admin_state_up': hosting_device_db.admin_state_up,
'service_types': template.service_types,
'management_ip_address': mgmt_ip,
'protocol_port': hosting_device_db.protocol_port,
'timeout': None,
'created_at': str(hosting_device_db.created_at),
'status': hosting_device_db.status,
'booting_time': template.booting_time}
def _process_non_responsive_hosting_device(self, context, hosting_device):
"""Host type specific processing of non responsive hosting devices.
:param hosting_device: db object for hosting device
:return: True if hosting_device has been deleted, otherwise False
"""
if (hosting_device['template']['host_category'] == VM_CATEGORY and
hosting_device['auto_delete']):
self._delete_dead_service_vm_hosting_device(context,
hosting_device)
return True
return False
def _setup_device_manager(self):
self._obtain_hosting_device_credentials_from_config()
self._create_hosting_device_templates_from_config()
self._create_hosting_devices_from_config()
self._gt_pool = eventlet.GreenPool()
# initialize hosting device pools
adm_ctx = bc.context.get_admin_context()
for template in adm_ctx.session.query(hd_models.HostingDeviceTemplate):
self._dispatch_pool_maintenance_job(template)
def _dispatch_pool_maintenance_job(self, template):
# Note(bobmel): Nova does not handle VM dispatching well before all
# its services have started. This creates problems for the Neutron
# devstack script that creates a Neutron router, which in turn
# triggers service VM dispatching.
# Only perform pool maintenance if needed Nova services have started
# For now the pool size is only elastic for service VMs.
if template['host_category'] != VM_CATEGORY:
return
if cfg.CONF.general.ensure_nova_running and not self._nova_running:
if self.svc_vm_mgr.nova_services_up():
self._nova_running = True
else:
LOG.info('Not all Nova services are up and running. '
'Skipping this service vm pool management '
'request.')
return
adm_context = bc.context.get_admin_context()
adm_context.tenant_id = self.l3_tenant_id()
self._gt_pool.spawn_n(self._maintain_hosting_device_pool, adm_context,
template)
def _maintain_hosting_device_pool(self, context, template):
"""Maintains the pool of hosting devices that are based on <template>.
Ensures that the number of standby hosting devices (essentially
service VMs) is kept at a suitable level so that resource creation is
not slowed down by booting of the hosting device.
:param context: context for this operation
:param template: db object for hosting device template
"""
#TODO(bobmel): Support HA/load-balanced Neutron servers:
#TODO(bobmel): Locking across multiple running Neutron server instances
lock = self._get_template_pool_lock(template['id'])
acquired = lock.acquire(False)
if not acquired:
# pool maintenance for this template already ongoing, so abort
return
try:
# Maintain a pool of approximately 'desired_slots_free' available
# for allocation. Approximately means that
# abs(desired_slots_free-capacity) <= available_slots <=
# desired_slots_free+capacity
capacity = template['slot_capacity']
if capacity == 0:
return
desired = template['desired_slots_free']
available = self._get_total_available_slots(
context, template['id'], capacity)
grow_threshold = abs(desired - capacity)
if available <= grow_threshold:
num_req = int(math.ceil(grow_threshold / (1.0 * capacity)))
num_created = len(self._create_svc_vm_hosting_devices(
context, num_req, template))
if num_created < num_req:
LOG.warning('Requested %(requested)d instances based '
'on hosting device template %(template)s '
'but could only create %(created)d '
'instances',
{'requested': num_req,
'template': template['id'],
'created': num_created})
elif available >= desired + capacity:
num_req = int(
math.floor((available - desired) / (1.0 * capacity)))
num_deleted = self._delete_idle_service_vm_hosting_devices(
context, num_req, template)
if num_deleted < num_req:
LOG.warning('Tried to delete %(requested)d instances '
'based on hosting device template '
'%(template)s but could only delete '
'%(deleted)d instances',
{'requested': num_req, 'template': template['id'],
'deleted': num_deleted})
finally:
lock.release()
def _create_svc_vm_hosting_devices(self, context, num, template):
"""Creates <num> or less service VM instances based on <template>.
These hosting devices can be bound to a certain tenant or for shared
use. A list with the created hosting device VMs is returned.
"""
hosting_devices = []
template_id = template['id']
credentials_id = template['default_credentials_id']
plugging_drv = self.get_hosting_device_plugging_driver(context,
template_id)
hosting_device_drv = self.get_hosting_device_driver(context,
template_id)
if plugging_drv is None or hosting_device_drv is None or num <= 0:
return hosting_devices
#TODO(bobmel): Determine value for max_hosted properly
max_hosted = 1 # template['slot_capacity']
dev_data, mgmt_context = self._get_resources_properties_for_hd(
template, credentials_id)
credentials_info = self._credentials.get(credentials_id)
if credentials_info is None:
LOG.error('Could not find credentials for hosting device'
'template %s. Aborting VM hosting device creation.',
template_id)
return hosting_devices
connectivity_info = self._get_mgmt_connectivity_info(
context, self.mgmt_subnet_id())
for i in range(num):
complementary_id = uuidutils.generate_uuid()
res = plugging_drv.create_hosting_device_resources(
context, complementary_id, self.l3_tenant_id(), mgmt_context,
max_hosted)
if res.get('mgmt_port') is None:
# Required ports could not be created
return hosting_devices
connectivity_info['mgmt_port'] = res['mgmt_port']
vm_instance = self.svc_vm_mgr.dispatch_service_vm(
context, template['name'] + '_nrouter', template['image'],
template['flavor'], hosting_device_drv, credentials_info,
connectivity_info, res.get('ports'))
if vm_instance is not None:
dev_data.update(
{'id': vm_instance['id'],
'complementary_id': complementary_id,
'management_ip_address': res['mgmt_port'][
'fixed_ips'][0]['ip_address'],
'management_port_id': res['mgmt_port']['id']})
self.create_hosting_device(context,
{'hosting_device': dev_data})
hosting_devices.append(vm_instance)
else:
# Fundamental error like could not contact Nova
# Cleanup anything we created
plugging_drv.delete_hosting_device_resources(
context, self.l3_tenant_id(), **res)
break
LOG.info('Created %(num)d hosting device VMs based on template '
'%(t_id)s', {'num': len(hosting_devices),
't_id': template_id})
return hosting_devices
def _get_mgmt_connectivity_info(self, context, mgmt_subnet_id):
subnet_data = self._core_plugin.get_subnet(
context, mgmt_subnet_id,
['cidr', 'gateway_ip', 'dns_nameservers'])
num = len(subnet_data['dns_nameservers'])
name_server_1 = cfg.CONF.general.domain_name_server_1
name_server_2 = cfg.CONF.general.domain_name_server_2
if num == 1:
name_server_1 = subnet_data['dns_nameservers'][0]['address']
name_server_2 = cfg.CONF.general.domain_name_server_2
elif num >= 2:
name_server_1 = subnet_data['dns_nameservers'][0]['address']
name_server_2 = subnet_data['dns_nameservers'][1]['address']
return {'gateway_ip': subnet_data['gateway_ip'],
'netmask': str(netaddr.IPNetwork(subnet_data['cidr']).netmask),
'name_server_1': name_server_1,
'name_server_2': name_server_2}
def _get_resources_properties_for_hd(self, template, credentials_id):
# These resources are owned by the L3AdminTenant
dev_data = {'template_id': template['id'],
'tenant_id': template['tenant_id'],
'credentials_id': credentials_id,
'admin_state_up': True,
'protocol_port': template['protocol_port'],
'created_at': timeutils.utcnow(),
'tenant_bound': template['tenant_bound'] or None,
'auto_delete': True}
mgmt_context = {
'mgmt_ip_address': None,
'mgmt_nw_id': self.mgmt_nw_id(),
'mgmt_sec_grp_id': self.mgmt_sec_grp_id()}
return dev_data, mgmt_context
def _delete_idle_service_vm_hosting_devices(self, context, num, template):
"""Deletes <num> or less unused <template>-based service VM instances.
The number of deleted service vm instances is returned.
"""
# Delete the "youngest" hosting devices since they are more likely
# not to have finished booting
num_deleted = 0
plugging_drv = self.get_hosting_device_plugging_driver(context,
template['id'])
hosting_device_drv = self.get_hosting_device_driver(context,
template['id'])
if plugging_drv is None or hosting_device_drv is None or num <= 0:
return num_deleted
query = context.session.query(hd_models.HostingDevice)
query = query.outerjoin(
hd_models.SlotAllocation,
hd_models.HostingDevice.id ==
hd_models.SlotAllocation.hosting_device_id)
query = query.filter(hd_models.HostingDevice.template_id ==
template['id'],
hd_models.HostingDevice.admin_state_up ==
expr.true(),
hd_models.HostingDevice.tenant_bound ==
expr.null(),
hd_models.HostingDevice.auto_delete ==
expr.true())
query = query.group_by(hd_models.HostingDevice.id).having(
func.count(hd_models.SlotAllocation.logical_resource_id) == 0)
query = query.order_by(
hd_models.HostingDevice.created_at.desc(),
func.count(hd_models.SlotAllocation.logical_resource_id))
hd_candidates = query.all()
num_possible_to_delete = min(len(hd_candidates), num)
for i in range(num_possible_to_delete):
res = plugging_drv.get_hosting_device_resources(
context, hd_candidates[i]['id'],
hd_candidates[i]['complementary_id'], self.l3_tenant_id(),
self.mgmt_nw_id())
if self.svc_vm_mgr.delete_service_vm(context,
hd_candidates[i]['id']):
with context.session.begin(subtransactions=True):
context.session.delete(hd_candidates[i])
plugging_drv.delete_hosting_device_resources(
context, self.l3_tenant_id(), **res)
num_deleted += 1
LOG.info('Deleted %(num)d hosting devices based on template '
'%(t_id)s', {'num': num_deleted, 't_id': template['id']})
return num_deleted
def _delete_dead_service_vm_hosting_device(self, context, hosting_device):
"""Deletes a presumably dead <hosting_device> service VM.
This will indirectly make all of its hosted resources unscheduled.
"""
if hosting_device is None:
return
plugging_drv = self.get_hosting_device_plugging_driver(
context, hosting_device['template_id'])
hosting_device_drv = self.get_hosting_device_driver(
context, hosting_device['template_id'])
if plugging_drv is None or hosting_device_drv is None:
return
res = plugging_drv.get_hosting_device_resources(
context, hosting_device['id'], hosting_device['complementary_id'],
self.l3_tenant_id(), self.mgmt_nw_id())
if not self.svc_vm_mgr.delete_service_vm(context,
hosting_device['id']):
LOG.error('Failed to delete hosting device %s service VM. '
'Will un-register it anyway.',
hosting_device['id'])
plugging_drv.delete_hosting_device_resources(
context, self.l3_tenant_id(), **res)
with context.session.begin(subtransactions=True):
# remove all allocations in this hosting device
context.session.query(hd_models.SlotAllocation).filter_by(
hosting_device_id=hosting_device['id']).delete()
context.session.delete(hosting_device)
def _get_total_available_slots(self, context, template_id, capacity):
"""Returns available slots in idle devices based on <template_id>.
Only slots in tenant unbound hosting devices are counted to ensure
there is always hosting device slots available regardless of tenant.
"""
query = context.session.query(hd_models.HostingDevice.id)
query = query.outerjoin(
hd_models.SlotAllocation,
hd_models.HostingDevice.id == hd_models.SlotAllocation
.hosting_device_id)
query = query.filter(
hd_models.HostingDevice.template_id == template_id,
hd_models.HostingDevice.admin_state_up == expr.true(),
hd_models.HostingDevice.tenant_bound == expr.null())
query = query.group_by(hd_models.HostingDevice.id)
query = query.having(
func.sum(hd_models.SlotAllocation.num_allocated) == expr.null())
num_hosting_devices = query.count()
return num_hosting_devices * capacity
def _exclusively_used(self, context, hosting_device, tenant_id):
"""Checks if only <tenant_id>'s resources use <hosting_device>."""
return (context.session.query(hd_models.SlotAllocation).filter(
hd_models.SlotAllocation.hosting_device_id == hosting_device['id'],
hd_models.SlotAllocation.logical_resource_owner != tenant_id).
first() is None)
def _update_hosting_device_exclusivity(self, context, hosting_device,
tenant_id):
"""Make <hosting device> bound or unbound to <tenant_id>.
If <tenant_id> is None the device is unbound, otherwise it gets bound
to that <tenant_id>
"""
with context.session.begin(subtransactions=True):
hosting_device['tenant_bound'] = tenant_id
context.session.add(hosting_device)
for item in (context.session.query(hd_models.SlotAllocation).
filter_by(hosting_device_id=hosting_device['id'])):
item['tenant_bound'] = tenant_id
context.session.add(item)
def _get_template_pool_lock(self, id):
"""Returns lock object for hosting device template with <id>."""
try:
return self._hosting_device_locks[id]
except KeyError:
self._hosting_device_locks[id] = threading.Lock()
return self._hosting_device_locks.get(id)
def _obtain_hosting_device_credentials_from_config(self):
"""Obtains credentials from config file and stores them in memory.
To be called before hosting device templates defined in the config file
are created.
"""
cred_dict = config.get_specific_config(
'cisco_hosting_device_credential')
attr_info = {
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None}, 'is_visible': True,
'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'user_name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'password': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'type': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None}, 'is_visible': True,
'default': ''}}
self._credentials = {}
for cred_uuid, kv_dict in cred_dict.items():
# ensure cred_uuid is properly formatted
cred_uuid = config.uuidify(cred_uuid)
config.verify_resource_dict(kv_dict, True, attr_info)
self._credentials[cred_uuid] = kv_dict
def _get_credentials(self, hosting_device):
creds = self._credentials.get(
hosting_device.credentials_id,
self._credentials.get(
hosting_device.template.default_credentials_id))
return {'user_name': creds['user_name'],
'password': creds['password']} if creds else None
def _create_hosting_device_templates_from_config(self):
"""To be called late during plugin initialization so that any hosting
device templates defined in the config file is properly inserted in
the DB.
"""
hdt_dict = config.get_specific_config('cisco_hosting_device_template')
attr_info = ciscohostingdevicemanager.RESOURCE_ATTRIBUTE_MAP[
ciscohostingdevicemanager.DEVICE_TEMPLATES]
adm_context = bc.context.get_admin_context()
for hdt_uuid, kv_dict in hdt_dict.items():
# ensure hdt_uuid is properly formatted
hdt_uuid = config.uuidify(hdt_uuid)
try:
self.get_hosting_device_template(adm_context, hdt_uuid)
is_create = False
except ciscohostingdevicemanager.HostingDeviceTemplateNotFound:
is_create = True
kv_dict['id'] = hdt_uuid
kv_dict['tenant_id'] = self.l3_tenant_id()
config.verify_resource_dict(kv_dict, True, attr_info)
hdt = {ciscohostingdevicemanager.DEVICE_TEMPLATE: kv_dict}
try:
if is_create:
self.create_hosting_device_template(adm_context, hdt)
else:
self.update_hosting_device_template(adm_context,
kv_dict['id'], hdt)
except n_exc.NeutronException:
with excutils.save_and_reraise_exception():
LOG.error('Invalid hosting device template definition '
'in configuration file for template = %s',
hdt_uuid)
def _create_hosting_devices_from_config(self):
"""To be called late during plugin initialization so that any hosting
device specified in the config file is properly inserted in the DB.
"""
hd_dict = config.get_specific_config('cisco_hosting_device')
attr_info = ciscohostingdevicemanager.RESOURCE_ATTRIBUTE_MAP[
ciscohostingdevicemanager.DEVICES]
adm_context = bc.context.get_admin_context()
for hd_uuid, kv_dict in hd_dict.items():
# ensure hd_uuid is properly formatted
hd_uuid = config.uuidify(hd_uuid)
try:
old_hd = self.get_hosting_device(adm_context, hd_uuid)
is_create = False
except ciscohostingdevicemanager.HostingDeviceNotFound:
old_hd = {}
is_create = True
kv_dict['id'] = hd_uuid
kv_dict['tenant_id'] = self.l3_tenant_id()
# make sure we keep using same config agent if it has been assigned
kv_dict['cfg_agent_id'] = old_hd.get('cfg_agent_id')
# make sure we keep using management port if it exists
kv_dict['management_port_id'] = old_hd.get('management_port_id')
config.verify_resource_dict(kv_dict, True, attr_info)
hd = {ciscohostingdevicemanager.DEVICE: kv_dict}
try:
if is_create:
self.create_hosting_device(adm_context, hd)
else:
self.update_hosting_device(adm_context, kv_dict['id'], hd)
except n_exc.NeutronException:
with excutils.save_and_reraise_exception():
LOG.error('Invalid hosting device specification in '
'configuration file for device = %s',
hd_uuid)
| |
#!/usr/bin/env python
# Copyright (c) 2011-2018, wradlib developers.
# Distributed under the MIT License. See LICENSE.txt for more info.
import unittest
import wradlib.adjust as adjust
import numpy as np
# Arguments to be used throughout all test classes
raw_x, raw_y = np.meshgrid(np.arange(4).astype("f4"),
np.arange(4).astype("f4"))
raw_coords = np.vstack((raw_x.ravel(), raw_y.ravel())).T
obs_coords = np.array([[1., 1.], [2., 1.], [1., 3.5], [3.5, 3.]])
raw = np.array([[1., 2., 1., 0., 1., 2., 1.,
2., 1., 0., 0., 3., 4., 0., 4., 0.],
[1., 2., 1., 0., 1., 2., 1.,
2., 1., 0., 0., 3., 4., 0., 4., 0.]
]).T
obs = np.array([[2., 3, 0., 4.], [2., 3, 0., 4.]]).T
nnear_raws = 2
mingages = 3
class AdjustBaseTest(unittest.TestCase):
def setUp(self):
self.raw_coords = raw_coords
self.obs_coords = obs_coords
self.raw = raw
self.obs = obs
self.nnear_raws = nnear_raws
self.mingages = mingages
def test___init__(self):
pass
def test__checkip(self):
pass
def test__check_shape(self):
pass
def test___call__(self):
pass
def test__get_valid_pairs(self):
pass
def test_xvalidate(self):
pass
class AdjustAddTest(unittest.TestCase):
def setUp(self):
self.raw_coords = raw_coords
self.obs_coords = obs_coords
self.raw = raw
self.obs = obs
self.nnear_raws = nnear_raws
self.mingages = mingages
def test_AdjustAdd_1(self):
adj = adjust.AdjustAdd(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages)
res = adj(self.obs, self.raw)
shouldbe = np.array([[1.62818784, 1.62818784],
[2.75926679, 2.75926679],
[2.09428144, 2.09428144],
[1.1466651, 1.1466651],
[1.51948941, 1.51948941],
[2.5, 2.5],
[2.5, 2.5],
[3.27498305, 3.27498305],
[1.11382822, 1.11382822],
[0.33900645, 0.33900645],
[0.89999998, 0.89999998],
[4.52409637, 4.52409637],
[3.08139533, 3.08139533],
[0., 0.],
[3.99180328, 3.99180328],
[2.16913891, 2.16913891]])
self.assertTrue(np.allclose(res, shouldbe))
# test in case only one dataset is passed
res = adj(self.obs[:, 0], self.raw[:, 0])
self.assertTrue(np.allclose(res, shouldbe[:, 0]))
class AdjustMultiplyTest(unittest.TestCase):
def setUp(self):
self.raw_coords = raw_coords
self.obs_coords = obs_coords
self.raw = raw
self.obs = obs
self.nnear_raws = nnear_raws
self.mingages = mingages
def test_AdjustMultiply_1(self):
adj = adjust.AdjustMultiply(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages)
res = adj(self.obs, self.raw)
shouldbe = np.array([[1.44937706, 1.44937706],
[3.04539442, 3.04539442],
[1.74463618, 1.74463618],
[0., 0.],
[1.37804615, 1.37804615],
[2.66666675, 2.66666675],
[2., 2.],
[3.74106812, 3.74106812],
[1.17057478, 1.17057478],
[0., 0.],
[0., 0.],
[6.14457822, 6.14457822],
[2.43439031, 2.43439031],
[0., 0.],
[4.60765028, 4.60765028],
[0., 0.]])
self.assertTrue(np.allclose(res, shouldbe))
# test in case only one dataset is passed
res = adj(self.obs[:, 0], self.raw[:, 0])
self.assertTrue(np.allclose(res, shouldbe[:, 0]))
class AdjustMixedTest(unittest.TestCase):
def setUp(self):
self.raw_coords = raw_coords
self.obs_coords = obs_coords
self.raw = raw
self.obs = obs
self.nnear_raws = nnear_raws
self.mingages = mingages
def test_AdjustMixed_1(self):
adj = adjust.AdjustMixed(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages)
res = adj(self.obs, self.raw)
shouldbe = np.array([[1.51427719, 1.51427719],
[2.95735525, 2.95735525],
[1.85710269, 1.85710269],
[0.36806121, 0.36806121],
[1.43181512, 1.43181512],
[2.61538471, 2.61538471],
[2.15384617, 2.15384617],
[3.59765723, 3.59765723],
[1.18370627, 1.18370627],
[0.15027952, 0.15027952],
[0.30825174, 0.30825174],
[5.63558862, 5.63558862],
[2.49066845, 2.49066845],
[-0.29200733, -0.29200733],
[4.31646909, 4.31646909],
[0.67854041, 0.67854041]])
self.assertTrue(np.allclose(res, shouldbe))
# test in case only one dataset is passed
res = adj(self.obs[:, 0], self.raw[:, 0])
self.assertTrue(np.allclose(res, shouldbe[:, 0]))
class AdjustMFBTest(unittest.TestCase):
def setUp(self):
self.raw_coords = np.array([[0., 0.], [1., 1.]])
self.obs_coords = np.array([[0.5, 0.5], [1.5, 1.5]])
self.raw = np.array([2., 2.])
self.obs = np.array([4., 4.])
self.nnear_raws = nnear_raws
self.mingages = 0
self.mfb_args = dict(method="mean")
def test_AdjustMFB_1(self):
adj = adjust.AdjustMFB(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages, mfb_args=self.mfb_args)
res = adj(self.obs, self.raw)
shouldbe = np.array([4., 4.])
self.assertTrue(np.allclose(res, shouldbe))
adj = adjust.AdjustMFB(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages,
mfb_args=dict(method="median"))
adj(self.obs, self.raw)
adj = adjust.AdjustMFB(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages,
mfb_args=dict(method="linregr", minslope=1.0,
minr='0.7', maxp=0.5))
adj(self.obs, self.raw)
class AdjustNoneTest(unittest.TestCase):
def setUp(self):
self.raw_coords = np.array([[0., 0.], [1., 1.]])
self.obs_coords = np.array([[0.5, 0.5], [1.5, 1.5]])
self.raw = np.array([2., 2.])
self.obs = np.array([4., 4.])
self.nnear_raws = nnear_raws
self.mingages = 0
self.mfb_args = dict(method="mean")
def test_AdjustNone_1(self):
adj = adjust.AdjustNone(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages)
res = adj(self.obs, self.raw)
shouldbe = np.array([2., 2.])
self.assertTrue(np.allclose(res, shouldbe))
class GageOnlyTest(unittest.TestCase):
def setUp(self):
self.raw_coords = np.array([[0., 0.], [1., 1.]])
self.obs_coords = np.array([[0.5, 0.5], [1.5, 1.5]])
self.raw = np.array([2., 2.])
self.obs = np.array([4., 4.])
self.nnear_raws = nnear_raws
self.mingages = 0
self.mfb_args = dict(method="mean")
def test_GageOnly_1(self):
adj = adjust.GageOnly(self.obs_coords, self.raw_coords,
nnear_raws=self.nnear_raws,
mingages=self.mingages)
res = adj(self.obs, self.raw)
shouldbe = np.array([4., 4.])
self.assertTrue(np.allclose(res, shouldbe))
class AdjustHelperTest(unittest.TestCase):
def test__get_neighbours_ix(self):
pass
def test__get_statfunc(self):
adjust._get_statfunc('median')
adjust._get_statfunc('best')
with self.assertRaises(NameError):
adjust._get_statfunc('wradlib')
def test_best(self):
x = 7.5
y = np.array([0., 1., 0., 1., 0., 7.7, 8., 8., 8., 8.])
self.assertEqual(adjust.best(x, y), 7.7)
if __name__ == '__main__':
unittest.main()
| |
'''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on May 14, 2012
@author: Jeffrey Portman
@contact: chromox@gmail.com
@summary: <summary>
'''
import os
import time
import pandas
import pickle
import sqlite3
import numpy
import datetime
import MySQLdb
import qsdateutil as du
from operator import itemgetter
from dateutil.relativedelta import relativedelta
B_NEW = True
class DriverInterface(object):
'''
DriverInterface is the data access interface that all driver classes must adhear to.
'''
def get_data(self, ts_list, symbol_list, data_item, verbose=False,
include_delisted=False):
raise NotImplementedError("Selected Driver has not implmented get_data.")
def get_all_symbols(self):
raise NotImplementedError("Selected Driver has not implmented get_all_symbols.")
def get_list(self, list_name):
raise NotImplementedError("Selected Driver has not implmented get_list.")
def get_all_lists(self):
raise NotImplementedError("Selected Driver has not implmented get_all_lists.")
class _SQLite(DriverInterface):
"""
Driver class for SQLite
"""
def __init__(self):
try:
self.sqldbfile = os.environ['QSDB']
except KeyError:
raise RuntimeError("Database environment variable not set.")
self._connect()
def _connect(self):
self.connection = sqlite3.connect(self.sqldbfile,
detect_types=sqlite3.PARSE_DECLTYPES)
self.cursor = self.connection.cursor()
def get_data(self, ts_list, symbol_list, data_item,
verbose=False, include_delisted=False):
if _ScratchCache.try_cache:
return _ScratchCache.try_cache(ts_list, symbol_list, data_item,
verbose, include_delisted, self.get_data_hard_read, "SQLite")
else:
return self.get_data_hard_read(ts_list, symbol_list, data_item,
verbose, include_delisted)
def get_data_hard_read(self, ts_list, symbol_list, data_item, verbose=False, include_delisted=False):
"""
Read data into a DataFrame from SQLite
@param ts_list: List of timestamps for which the data values are needed. Timestamps must be sorted.
@param symbol_list: The list of symbols for which the data values are needed
@param data_item: The data_item needed. Like open, close, volume etc. May be a list, in which case a list of DataFrame is returned.
@param include_delisted: If true, delisted securities will be included.
@note: If a symbol is not found all the values in the column for that stock will be NaN. Execution then
continues as usual. No errors are raised at the moment.
"""
columns = []
results = []
# Check input data
assert isinstance(ts_list, list)
assert isinstance(symbol_list, list)
assert isinstance(data_item, list)
# Combine Symbols List for Query
symbol_query_list = ",".join(map(lambda x: "'" + x + "'", symbol_list))
# Combine Data Fields for Query
data_item = map(lambda x: "B." + x, data_item)
query_select_items = ",".join(data_item)
self.cursor.execute("""
select A.code as symbol, B.date,"""+ query_select_items +
""" from price B, asset A where A.assetid = B.assetid and
B.date >= (?) and B.date <= (?)
and A.code in (%s);""" % symbol_query_list, (ts_list[0], ts_list[-1],))
# Retrieve Results
results = self.cursor.fetchall()
if len(results) == 0:
for current_column in range(len(data_item)):
columns.append( pandas.DataFrame(columns=symbol_list) )
return columns
# Remove all rows that were not asked for
for i, row in enumerate(results):
if row[1] not in ts_list:
del results[i]
# Create Pandas DataFrame in Expected Format
current_dict = {}
symbol_ranges = self._find_ranges_of_symbols(results)
for current_column in range(len(data_item)):
for symbol, ranges in symbol_ranges.items():
current_symbol_data = results[ranges[0]:ranges[1] + 1]
current_dict[symbol] = pandas.Series(
map(itemgetter(current_column + 2), current_symbol_data),
index=map(itemgetter(1), current_symbol_data))
# Make DataFrame
columns.append(pandas.DataFrame(current_dict, columns=symbol_list))
current_dict = {}
return columns
def get_list(self, list_name):
self.cursor.execute("""SELECT A.Symbol
FROM tblEquity A JOIN tblListDetail C ON A.ID = C.tblEquity_ID
JOIN tblListHeader B ON B.ID = C.tblListHeader_ID
WHERE B.ListName = ?
""", (list_name,))
return self.cursor.fetchall()
def get_all_symbols(self):
self.cursor.execute("SELECT DISTINCT symbol FROM tblEquity")
return self.cursor.fetchall()
def get_all_lists(self):
self.cursor.execute("SELECT ListName FROM tblListHeader")
return self.cursor.fetchall()
def _find_ranges_of_symbols(self, results):
''' Finds range of current symbols in results list '''
symbol_dict = {}
current_symbol = results[0][0]
start = 0
for i, row in enumerate(results):
if row[0] != current_symbol:
symbol_dict[current_symbol] = (start, i - 1)
start = i
current_symbol = row[0]
#handle last symbol
symbol_dict[current_symbol] = (start, i)
return symbol_dict
class _MySQL(DriverInterface):
"""
Driver class for SQLite
"""
def __init__(self):
# Connection is now handled on a per-call basis
#self._connect()
pass
def __del__(self):
pass
def _connect(self):
s_filepath = os.path.dirname(os.path.abspath(__file__))
# Read password from a file (does not support whitespace)
s_pass = open(os.path.join(s_filepath,'pass.txt')).read().rstrip()
try:
self.db = MySQLdb.connect("cordoba.lucenaresearch.com", "finance", s_pass, "premiumdata")
except:
s_filepath = os.path.dirname(os.path.abspath(__file__))
# Read password from a file (does not support whitespace)
s_pass = open(os.path.join(s_filepath,'pass2.txt')).read().rstrip()
self.db = MySQLdb.connect("cordoba.lucenaresearch.com", "finance", s_pass, "premiumdata")
self.cursor = self.db.cursor()
def _disconnect(self):
self.cursor.close()
self.db.close()
def get_data(self, ts_list, symbol_list, data_item,
verbose=False, include_delisted=False):
if _ScratchCache.try_cache:
return _ScratchCache.try_cache(ts_list, symbol_list, data_item,
verbose, include_delisted, self.get_data_hard_read, "MySQL")
else:
return self.get_data_hard_read(ts_list, symbol_list, data_item,
verbose, include_delisted)
def get_data_hard_read(self, ts_list, symbol_list, data_item, verbose=False, include_delisted=False):
"""
Read data into a DataFrame from SQLite
@param ts_list: List of timestamps for which the data values are needed. Timestamps must be sorted.
@param symbol_list: The list of symbols for which the data values are needed
@param data_item: The data_item needed. Like open, close, volume etc. May be a list, in which case a list of DataFrame is returned.
@param include_delisted: If true, delisted securities will be included.
@note: If a symbol is not found all the values in the column for that stock will be NaN. Execution then
continues as usual. No errors are raised at the moment.
"""
# Validate timestamps and symbol list
assert len(symbol_list) == len(set(symbol_list)), "Duplicate symbols"
assert len(ts_list) == len(set(ts_list)), "Duplicate timestamps"
self._connect()
try:
columns_tech = []
columns_fund = []
results_tech = []
results_fund = []
columns_asset = []
results_asset = []
columns_dividend = []
results_dividend = []
columns_dilution = []
results_dilution = []
columns_insider = []
# Check input data
assert isinstance(ts_list, list)
assert isinstance(symbol_list, list)
assert isinstance(data_item, list)
# Map to new database schema to preserve legacy code
ds_map = {'open':'tropen',
'high':'trhigh',
'low':'trlow',
'close':'trclose',
'actual_close':'close',
'adjusted_close':'adjclose'}
#keys for fundamental indicators
ls_fund_keys = ['sharesout',
'latestavailableannual',
'latestavailableinterim',
'projfiscalyearend',
'peproj',
'pe',
'eps',
# 'dividend',
# 'yield',
'pegproj',
'p2b',
'p2s',
'totd2eq',
'ebitda',
'grossmargin'
]
#Keys to indicator from asset table
ls_asset_keys = ['icbcode', 'gicscode']
#Keys to indicator from dividend table
ls_dividend_keys = ['divamt']
#Keys to indicator from dilution table
ls_dilution_keys = ['dilfact']
#Keys to indicator from insider table
ls_insider_keys = ['rating']
data_item = data_item[:]
data_fund = []
li_fund_index = []
data_tech = []
li_tech_index = []
data_asset = []
li_asset_index = []
data_dividend = []
li_dividend_index = []
data_dilution = []
li_dilution_index = []
data_insider = []
li_insider_index = []
for i, item in enumerate(data_item):
if item in ls_fund_keys:
data_fund.append(item)
li_fund_index.append(i)
elif item in ls_asset_keys:
data_asset.append(item)
li_asset_index.append(i)
elif item in ls_dividend_keys:
data_dividend.append(item)
li_dividend_index.append(i)
elif item in ls_dilution_keys:
data_dilution.append(item)
li_dilution_index.append(i)
elif item in ls_insider_keys:
data_insider.append(item)
li_insider_index.append(i)
else:
data_tech.append(item)
li_tech_index.append(i)
for i, item in enumerate(data_tech):
if item in ds_map.keys():
data_tech[i] = ds_map[item]
# Combine Symbols List for Query
symbol_query_list = ",".join(map(lambda x: "'" + x + "'", symbol_list))
# Combine Data Fields for Query
query_select_tech_items = ",".join(data_tech)
query_select_fund_items = ",".join(data_fund)
query_select_asset_items = ",".join(data_asset)
query_select_dividend_items = ",".join(data_dividend)
query_select_dilution_items = ",".join(data_dilution)
query_select_insider_items = ",".join(data_insider)
# Now convert to ID's
self.cursor.execute('''select assetid, code from asset
where code in( ''' + symbol_query_list + ''') AND recordstatus=1''')
# Dictionary linking id's:symbols
d_id_sym = dict(self.cursor.fetchall())
ls_ids = d_id_sym.keys()
s_idlist = ",".join([str(x) for x in ls_ids])
s_query_tech = 'SELECT assetid, date, ' + query_select_tech_items + \
' FROM priceadjusted WHERE assetid in (' + s_idlist + ')' + \
' AND date >= %s AND date <= %s '
s_query_fund = 'SELECT assetid, date, ' + query_select_fund_items + \
' FROM fundamentals WHERE assetid in (' +s_idlist +')' + \
' AND date >= %s AND date <= %s '
s_query_asset = 'SELECT assetid, ' + query_select_asset_items + \
' FROM asset WHERE assetid in (' +s_idlist +')'
s_query_dividend = 'SELECT assetid, exdate, recordstatus, ' + query_select_dividend_items + \
' FROM dividend WHERE assetid in (' +s_idlist +')' + \
' AND exdate >= %s AND exdate <= %s AND recordstatus=1'
s_query_dilution = 'SELECT assetid, exdate, recordstatus, diltypeid, ' + query_select_dilution_items + \
' FROM dilution WHERE assetid in (' +s_idlist +')' + \
' AND exdate >= %s AND exdate <= %s AND recordstatus=1 AND diltypeid in (1,3)'
s_query_insider = 'SELECT assetid, date, ' + query_select_insider_items + \
' FROM insider_activity WHERE assetid in (' +s_idlist +')' + \
' AND date >= %s AND date <= %s '
if len(query_select_tech_items) !=0:
try:
self.cursor.execute(s_query_tech, (ts_list[0].replace(hour=0), ts_list[-1]))
except:
print 'Data error1, probably using an non-existent symbol'
# Retrieve Results
results_tech = self.cursor.fetchall()
# Create Data frames
for i in range(len(data_tech)):
columns_tech.append(pandas.DataFrame(index=ts_list, columns=symbol_list))
# Loop through rows
dt_time = datetime.time(hour=16)
for row in results_tech:
#format of row is (sym, date, item1, item2, ...)
dt_date = datetime.datetime.combine(row[1], dt_time)
if dt_date not in columns_tech[i].index:
continue
# Add all columns to respective data-frames
for i in range(len(data_tech)):
columns_tech[i][d_id_sym[row[0]]][dt_date] = row[i+2]
if len(query_select_asset_items)!=0:
try:
self.cursor.execute(s_query_asset)
except:
print 'Data error2, probably using an non-existent symbol'
# Retrieve Results
results_asset = self.cursor.fetchall()
# print results_asset
# Create Data frames
for i in range(len(data_asset)):
columns_asset.append(pandas.DataFrame(index=ts_list, columns=symbol_list))
# Loop through rows
for row in results_asset:
#format of row is (sym, item1, item2, ...)
# Add all columns to respective data-frames
for i in range(len(data_asset)):
if row[i+1] == None or row[i+1] == '':
columns_asset[i][d_id_sym[row[0]]] = float('nan')
else:
columns_asset[i][d_id_sym[row[0]]] = float(row[i+1])
if len(query_select_fund_items)!=0:
try:
self.cursor.execute(s_query_fund, (ts_list[0].replace(hour=0), ts_list[-1]))
except:
print 'Data error3, probably using an non-existent symbol'
# Retrieve Results
results_fund = self.cursor.fetchall()
#print results_fund[2]
# Create Data frames
for i in range(len(data_fund)):
columns_fund.append(pandas.DataFrame(index=ts_list, columns=symbol_list))
# Loop through rows
dt_time = datetime.time(hour=16)
for row in results_fund:
#format of row is (sym, date, item1, item2, ...)
dt_date = datetime.datetime.combine(row[1], dt_time)
if dt_date not in columns_fund[i].index:
continue
# Add all columns to respective data-frames
for i in range(len(data_fund)):
#print type(row[i+2])
#print (type(row[i+2]) == datetime.date)
if (type(row[i+2])!=datetime.date):
columns_fund[i][d_id_sym[row[0]]][dt_date] = row[i+2]
else:
columns_fund[i][d_id_sym[row[0]]][dt_date] = int((row[
i+2]-datetime.date(1970,1,1)).total_seconds())
if len(query_select_dividend_items)!=0:
try:
self.cursor.execute(s_query_dividend, (ts_list[0].replace(hour=0), ts_list[-1]))
except:
print 'Data error4, probably using an non-existent symbol'
# Retrieve Results
results_dividend = self.cursor.fetchall()
# print results_dividend
# Create Data frames
for i in range(len(data_dividend)):
columns_dividend.append(pandas.DataFrame(index=ts_list, columns=symbol_list))
# Loop through rows
dt_time = datetime.time(hour=16)
for row in results_dividend:
#format of row is (sym, date, item1, item2, ...)
dt_date = datetime.datetime.combine(row[1], dt_time)
if dt_date not in columns_dividend[i].index:
continue
# Add all columns to respective data-frames
for i in range(len(data_dividend)):
if numpy.isnan(columns_dividend[i][d_id_sym[row[0]]][dt_date]):
columns_dividend[i][d_id_sym[row[0]]][dt_date] = row[i+3]
elif columns_dividend[i][d_id_sym[row[0]]][dt_date] != row[i+3]:
columns_dividend[i][d_id_sym[row[0]]][dt_date] += row[i+3]
if len(query_select_dilution_items)!=0:
try:
self.cursor.execute(s_query_dilution, (ts_list[0].replace(hour=0), ts_list[-1]))
except:
print 'Data error5, probably using an non-existent symbol'
# Retrieve Results
results_dilution = self.cursor.fetchall()
# print results_dilution
# Create Data frames
for i in range(len(data_dilution)):
columns_dilution.append(pandas.DataFrame(index=ts_list, columns=symbol_list))
# Loop through rows
dt_time = datetime.time(hour=16)
for row in results_dilution:
#format of row is (sym, date, item1, item2, ...)
dt_date = datetime.datetime.combine(row[1], dt_time)
if dt_date not in columns_dilution[i].index:
continue
# Add all columns to respective data-frames
for i in range(len(data_dilution)):
if numpy.isnan(columns_dilution[i][d_id_sym[row[0]]][dt_date]):
columns_dilution[i][d_id_sym[row[0]]][dt_date] = row[i+4]
elif columns_dilution[i][d_id_sym[row[0]]][dt_date] != row[i+4]:
columns_dilution[i][d_id_sym[row[0]]][dt_date] *= row[i+4]
if len(query_select_insider_items) != 0:
try:
self.cursor.execute(s_query_insider, (ts_list[0].replace(hour=0), ts_list[-1]))
except:
print 'Data error6, probably using an non-existent symbol'
# Retrieve Results
results_insider = self.cursor.fetchall()
# Create Data frames
for i in range(len(data_insider)):
columns_insider.append(pandas.DataFrame(index=ts_list,
columns=symbol_list).fillna(0))
# Loop through rows
dt_time = datetime.time(hour=16)
for row in results_insider:
#format of row is (sym, date, item1, item2, ...)
dt_date = datetime.datetime.combine(row[1], dt_time)
if dt_date not in columns_insider[i].index:
i_index = columns_insider[i].index.searchsorted(dt_date)
if i_index == 0:
continue
i_index -= 1
dt_new = columns_insider[i].index[i_index]
dt_date = dt_new
# Add all columns to respective data-frames
for i in range(len(data_insider)):
columns_insider[i][d_id_sym[row[0]]][dt_date] = row[i+2]
columns = [numpy.NaN] * len(data_item)
for i, item in enumerate(li_tech_index):
columns[item] = columns_tech[i]
for i, item in enumerate(li_fund_index):
columns[item] = columns_fund[i]
for i, item in enumerate(li_asset_index):
columns[item] = columns_asset[i]
for i, item in enumerate(li_dividend_index):
columns[item] = columns_dividend[i]
for i, item in enumerate(li_dilution_index):
columns[item] = columns_dilution[i]
for i, item in enumerate(li_insider_index):
columns[item] = columns_insider[i]
return columns
finally:
self._disconnect()
def get_dividends(self, ts_list, symbol_list):
"""
Read dividend data into a DataFrame from SQLite
@param ts_list: List of timestamps for which the data values are needed. Timestamps must be sorted.
@param symbol_list: The list of symbols for which the data values are needed
"""
self._connect()
try:
# Combine Symbols List for Query
symbol_query_list = ",".join(map(lambda x: "'" + x + "'", symbol_list))
self.cursor.execute("""
select code, exdate, divamt
from dividend B, asset A where A.assetid = B.assetid and
B.exdate >= %s and B.exdate <= %s and A.code in (
""" + symbol_query_list + """)""", (ts_list[0].replace(hour=0),
ts_list[-1],))
# Retrieve Results
results = self.cursor.fetchall()
# Remove all rows that were not asked for
results = list(results)
if len(results) == 0:
return pandas.DataFrame(columns=symbol_list)
# Create Pandas DataFrame in Expected Format
current_dict = {}
symbol_ranges = self._find_ranges_of_symbols(results)
for symbol, ranges in symbol_ranges.items():
current_symbol_data = results[ranges[0]:ranges[1] + 1]
current_dict[symbol] = pandas.Series(map(itemgetter(2),
current_symbol_data),
index=map(lambda x: itemgetter(1)(x) + relativedelta(hours=16),
current_symbol_data))
# Make DataFrame
ret = pandas.DataFrame(current_dict, columns=symbol_list)
finally:
self._disconnect()
return ret.reindex(ts_list)
def get_list(self, list_name):
self._connect()
try:
if type(list_name) == type('str') or \
type(list_name) == type(u'unicode'):
self.cursor.execute("""select symbol from premiumdata.lists
where name=%s;""", (list_name))
else:
self.cursor.execute("""select myself.code as symbol from
indexconstituent consititue1_, asset myself
where myself.assetid = consititue1_.assetid and myself.recordstatus=1 and myself.statuscodeid < 100 and
consititue1_.indexassetid = %s;""", (str(int(list_name))))
lt_ret = self.cursor.fetchall()
finally:
self._disconnect()
return sorted([x[0] for x in lt_ret])
def get_all_symbols(self, b_dead=False):
''' Returns all symbols '''
self._connect()
try:
if b_dead:
self.cursor.execute('''select distinct code from asset a where
a.statuscodeid<=100 and a.recordstatus=1''')
else:
self.cursor.execute('''select distinct code from asset a where
a.statuscodeid<100 and a.recordstatus=1''')
lt_ret = self.cursor.fetchall()
finally:
self._disconnect()
return sorted([x[0] for x in lt_ret])
def get_all_lists(self):
self._connect()
try:
self.cursor.execute("""select asset0_.assetid as id, asset0_.issuername as name
from asset asset0_ where exists
(select consititue1_.assetid from indexconstituent consititue1_
where asset0_.assetid=consititue1_.indexassetid)
order by asset0_.issuername;""")
lt_ret = self.cursor.fetchall()
finally:
self._disconnect()
return sorted([x[1] for x in lt_ret])
def get_last_date(self):
''' Returns last day of valid data '''
self._connect()
try:
self.cursor.execute( ''' select ts from premiumdata.price
p,premiumdata.asset a, (select assetid as id,max(date)
as ts from premiumdata.price group by assetid) s
where p.assetid = a.assetid and s.id = p.assetid and
p.date = s.ts and a.code='SPY';''')
dt_ret = datetime.datetime.combine(self.cursor.fetchall()[0][0],
datetime.time(16))
finally:
self._disconnect()
return dt_ret
def get_shares(self, symbol_list):
''' Returns list of values corresponding to shares outstanding '''
self._connect()
try:
symbol_query_list = ",".join(map(lambda x: "'" + x + "'", symbol_list))
self.cursor.execute( ''' SELECT code, sharesoutstanding FROM asset a
where code in (''' + symbol_query_list + ');' )
lt_ret = self.cursor.fetchall()
finally:
self._disconnect()
return dict(lt_ret)
def _find_ranges_of_symbols(self, results):
''' Finds range of current symbols in results list '''
symbol_dict = {}
current_symbol = results[0][0]
start = 0
for i, row in enumerate(results):
if row[0] != current_symbol:
symbol_dict[current_symbol] = (start, i - 1)
start = i
current_symbol = row[0]
#handle last symbol
symbol_dict[current_symbol] = (start, i)
return symbol_dict
class _ScratchCache(object):
@staticmethod
def try_cache(ts_list, symbol_list, data_item, verbose=False,
include_delisted=False, cache_miss_function=None, source=None):
'''
Read data into a DataFrame, but check to see if it is in a cache first.
@param ts_list: List of timestamps for which the data values are needed. Timestamps must be sorted.
@param symbol_list: The list of symbols for which the data values are needed
@param data_item: The data_item needed. Like open, close, volume etc. May be a list, in which case a list of DataFrame is returned.
@param include_delisted: If true, delisted securities will be included.
@note: If a symbol is not found then a message is printed. All the values in the column for that stock will be NaN. Execution then
continues as usual. No errors are raised at the moment.
'''
# Construct hash -- filename where data may be already
#
# The idea here is to create a filename from the arguments provided.
# We then check to see if the filename exists already, meaning that
# the data has already been created and we can just read that file.
# Create the hash for the symbols
hashsyms = 0
for i in symbol_list:
hashsyms = (hashsyms + hash(i)) % 10000000
# Create the hash for the timestamps
hashts = 0
# print "test point 1: " + str(len(ts_list))
for i in ts_list:
hashts = (hashts + hash(i)) % 10000000
hashstr = 'qstk-' + str(source) + '-' + str(abs(hashsyms)) + '-' + str(abs(hashts)) \
+ '-' + str(hash(str(data_item)))
# get the directory for scratch files from environment
try:
scratchdir = os.environ['QSSCRATCH']
except KeyError:
#self.rootdir = "/hzr71/research/QSData"
raise KeyError("Please be sure to set the value for QSSCRATCH in config.sh or local.sh")
# final complete filename
cachefilename = scratchdir + '/' + hashstr + '.pkl'
if verbose:
print "cachefilename is:" + cachefilename
# now eather read the pkl file, or do a hardread
readfile = False # indicate that we have not yet read the file
#check if the cachestall variable is defined.
# catchstall=os.environ['CACHESTALLTIME']
try:
catchstall = datetime.timedelta(hours=int(os.environ['CACHESTALLTIME']))
except:
catchstall = datetime.timedelta(hours=12)
# Check if the file is older than the cachestalltime
if os.path.exists(cachefilename):
if((datetime.datetime.now() - datetime.datetime.fromtimestamp(os.path.getmtime(cachefilename))) < catchstall):
if verbose:
print "cache hit"
try:
cachefile = open(cachefilename, "rb")
start = time.time() # start timer
retval = pickle.load(cachefile)
elapsed = time.time() - start # end timer
readfile = True # remember success
cachefile.close()
except IOError:
if verbose:
print "error reading cache: " + cachefilename
print "recovering..."
except EOFError:
if verbose:
print "error reading cache: " + cachefilename
print "recovering..."
if (readfile != True):
if verbose:
print "cache miss"
print "beginning hardread"
start = time.time() # start timer
print "data_item(s): " + str(data_item)
print "symbols to read: " + str(symbol_list)
retval = cache_miss_function(ts_list,
symbol_list, data_item, verbose, include_delisted)
if verbose:
elapsed = time.time() - start # end timer
print "end hardread"
print "saving to cache"
try:
cachefile = open(cachefilename, "wb")
pickle.dump(retval, cachefile, -1)
os.chmod(cachefilename, 0666)
except IOError:
print "error writing cache: " + cachefilename
if verbose:
print "end saving to cache"
print "reading took " + str(elapsed) + " seconds"
return retval
class DataAccess(object):
"""
Factory class that returns the requested data source driver
"""
drivers = {'sqlite': _SQLite, 'mysql': _MySQL}
def __new__(self, driver):
if not DataAccess.drivers[driver]:
raise NotImplementedError("DataAccess Driver: " + driver +
" not available or implmented.")
return DataAccess.drivers[driver]()
if __name__ == "__main__":
db = DataAccess('mysql')
date1 = datetime.datetime(2012, 11, 1, 16)
date2 = datetime.datetime(2012, 11, 2, 16)
date3 = datetime.datetime(2012, 11, 23, 16)
ts_list = du.getNYSEdays(date1,date3, datetime.timedelta(hours=16))
#print db.get_shares(['GOOG', 'AAPL'])
#print db.get_all_lists()
#print db.get_all_symbols()
#print db.get_list('Dow Jones Transportation')
#print db.get_dividends([date1 + datetime.timedelta(days=x) for x in range(100)],
# ["MSFT", "PGF", "GOOG", "A"])
ldf_data = db.get_data_hard_read(ts_list, ["MSFT", "AAPL"],
["close","open","latestavailableannual","pe", "rating"])
print ldf_data[0]
print ldf_data[-2]
print ldf_data[-1]
| |
# -*- coding: utf-8 -*-
# $URL$
# $Date$
# $Revision$
# :Author: a Pygments author|contributor; Felix Wiemann; Guenter Milde
# :Date: $Date$
# :Copyright: This module has been placed in the public domain.
#
# This is a merge of `Using Pygments in ReST documents`_ from the pygments_
# documentation, and a `proof of concept`_ by Felix Wiemann.
#
# ========== ===========================================================
# 2007-06-01 Removed redundancy from class values.
# 2007-06-04 Merge of successive tokens of same type
# (code taken from pygments.formatters.others).
# 2007-06-05 Separate docutils formatter script
# Use pygments' CSS class names (like the html formatter)
# allowing the use of pygments-produced style sheets.
# 2007-06-07 Merge in the formatting of the parsed tokens
# (misnamed as docutils_formatter) as class DocutilsInterface
# 2007-06-08 Failsave implementation (fallback to a standard literal block
# if pygments not found)
# ========== ===========================================================
#
# ::
"""
Define and register a code-block directive using pygments
"""
# Requirements
# ------------
# ::
import codecs
from docutils import nodes
from docutils.parsers.rst import directives
try:
import pygments
from pygments.lexers import get_lexer_by_name
from pygments.formatters.html import _get_ttype_class
except ImportError:
pass
from .log import log
# Customisation
# -------------
#
# Do not insert inline nodes for the following tokens.
# (You could add e.g. Token.Punctuation like ``['', 'p']``.) ::
unstyled_tokens = ['']
# DocutilsInterface
# -----------------
#
# This interface class combines code from
# pygments.formatters.html and pygments.formatters.others.
#
# It does not require anything of docutils and could also become a part of
# pygments::
class DocutilsInterface(object):
"""
Parse `code` string and yield "classified" tokens.
Arguments
code -- string of source code to parse
language -- formal language the code is written in.
Merge subsequent tokens of the same token-type.
Yields the tokens as ``(ttype_class, value)`` tuples,
where ttype_class is taken from pygments.token.STANDARD_TYPES and
corresponds to the class argument used in pygments html output.
"""
def __init__(self, code, language, custom_args=None):
self.code = code
self.language = language
self.custom_args = custom_args or {}
def lex(self):
# Get lexer for language (use text as fallback)
try:
if self.language and str(self.language).lower() != 'none':
lexer = get_lexer_by_name(self.language.lower(),
**self.custom_args)
else:
lexer = get_lexer_by_name('text', **self.custom_args)
except ValueError:
log.info("no pygments lexer for %s, using 'text'" % self.language)
# what happens if pygment isn't present ?
lexer = get_lexer_by_name('text')
return pygments.lex(self.code, lexer)
def join(self, tokens):
"""
Join subsequent tokens of same token-type
"""
tokens = iter(tokens)
(lasttype, lastval) = next(tokens)
for ttype, value in tokens:
if ttype is lasttype:
lastval += value
else:
yield(lasttype, lastval)
(lasttype, lastval) = (ttype, value)
yield(lasttype, lastval)
def __iter__(self):
"""
Parse code string and yield "clasified" tokens
"""
try:
tokens = self.lex()
except IOError:
log.info("Pygments lexer not found, using fallback")
# TODO: write message to INFO
yield ('', self.code)
return
for ttype, value in self.join(tokens):
yield (_get_ttype_class(ttype), value)
# code_block_directive
# --------------------
# ::
def code_block_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
"""Parse and classify content of a code_block."""
if 'include' in options:
try:
if 'encoding' in options:
encoding = options['encoding']
else:
encoding = 'utf-8'
content = codecs.open(options['include'], 'r', encoding).read().rstrip()
except (IOError, UnicodeError): # no file or problem finding it or reading it
log.error('Error reading file: "%s" L %s' % (options['include'], lineno))
content = ''
line_offset = 0
if content:
# here we define the start-at and end-at options
# so that limit is included in extraction
# this is different than the start-after directive of docutils
# (docutils/parsers/rst/directives/misc.py L73+)
# which excludes the beginning
# the reason is we want to be able to define a start-at like
# def mymethod(self)
# and have such a definition included
after_text = options.get('start-at', None)
if after_text:
# skip content in include_text before *and NOT incl.* a matching text
after_index = content.find(after_text)
if after_index < 0:
raise state_machine.reporter.severe('Problem with "start-at" option of "%s" '
'code-block directive:\nText not found.' % options['start-at'])
# patch mmueller start
# Move the after_index to the beginning of the line with the
# match.
for char in content[after_index:0:-1]:
# codecs always opens binary. This works with '\n', '\r' and
# '\r\n'. We are going backwards, so '\n' is found first
# in '\r\n'.
# Going with .splitlines() seems more appropriate
# but needs a few more changes.
if char == '\n' or char == '\r':
break
after_index -= 1
# patch mmueller end
content = content[after_index:]
line_offset = len(content[:after_index].splitlines())
after_text = options.get('start-after', None)
if after_text:
# skip content in include_text before *and incl.* a matching text
after_index = content.find(after_text)
if after_index < 0:
raise state_machine.reporter.severe('Problem with "start-after" option of "%s" '
'code-block directive:\nText not found.' % options['start-after'])
line_offset = len(content[:after_index + len(after_text)].splitlines())
content = content[after_index + len(after_text):]
# same changes here for the same reason
before_text = options.get('end-at', None)
if before_text:
# skip content in include_text after *and incl.* a matching text
before_index = content.find(before_text)
if before_index < 0:
raise state_machine.reporter.severe('Problem with "end-at" option of "%s" '
'code-block directive:\nText not found.' % options['end-at'])
content = content[:before_index + len(before_text)]
before_text = options.get('end-before', None)
if before_text:
# skip content in include_text after *and NOT incl.* a matching text
before_index = content.find(before_text)
if before_index < 0:
raise state_machine.reporter.severe('Problem with "end-before" option of "%s" '
'code-block directive:\nText not found.' % options['end-before'])
content = content[:before_index]
else:
line_offset = options.get('linenos_offset')
content = '\n'.join(content)
if 'tabsize' in options:
tabw = options['tabsize']
else:
tabw = int(options.get('tab-width', 8))
content = content.replace('\t', ' ' * tabw)
withln = "linenos" in options
if not "linenos_offset" in options:
line_offset = 0
language = arguments[0]
# create a literal block element and set class argument
code_block = nodes.literal_block(classes=["code", language])
if withln:
lineno = 1 + line_offset
total_lines = content.count('\n') + 1 + line_offset
lnwidth = len(str(total_lines))
fstr = "\n%%%dd " % lnwidth
code_block += nodes.inline(fstr[1:] % lineno, fstr[1:] % lineno, classes=['linenumber'])
# parse content with pygments and add to code_block element
for cls, value in DocutilsInterface(content, language, options):
if withln and "\n" in value:
# Split on the "\n"s
values = value.split("\n")
# The first piece, pass as-is
code_block += nodes.Text(values[0], values[0])
# On the second and later pieces, insert \n and linenos
linenos = list(range(lineno, lineno + len(values)))
for chunk, ln in list(zip(values, linenos))[1:]:
if ln <= total_lines:
code_block += nodes.inline(fstr % ln, fstr % ln, classes=['linenumber'])
code_block += nodes.Text(chunk, chunk)
lineno += len(values) - 1
elif cls in unstyled_tokens:
# insert as Text to decrease the verbosity of the output.
code_block += nodes.Text(value, value)
else:
code_block += nodes.inline(value, value, classes=["pygments-" + cls])
return [code_block]
# Custom argument validators
# --------------------------
# ::
#
# Move to separated module??
def zero_or_positive_int(argument):
"""
Converts a string into python positive integer including zero.
None is a special case; it is regarded as zero.
"""
if argument is None:
return 0
elif argument == '0':
return 0
else:
return directives.positive_int(argument)
def string_list(argument):
"""
Converts a space- or comma-separated list of values into a python list
of strings.
(Directive option conversion function)
Based in positive_int_list of docutils.parsers.rst.directives
"""
if ',' in argument:
entries = argument.split(',')
else:
entries = argument.split()
return entries
def string_bool(argument):
"""
Converts True, true, False, False in python boolean values
"""
if argument is None:
msg = 'argument required but none supplied; choose from "True" or "False"'
raise ValueError(msg)
elif argument.lower() == 'true':
return True
elif argument.lower() == 'false':
return False
else:
raise ValueError('"%s" unknown; choose from "True" or "False"' %
argument)
def csharp_unicodelevel(argument):
return directives.choice(argument, ('none', 'basic', 'full'))
def lhs_litstyle(argument):
return directives.choice(argument, ('bird', 'latex'))
def raw_compress(argument):
return directives.choice(argument, ('gz', 'bz2'))
# Register Directive
# ------------------
# ::
code_block_directive.arguments = (1, 0, 1)
code_block_directive.content = 1
code_block_directive.options = {
'include': directives.unchanged_required,
'start-at': directives.unchanged_required,
'end-at': directives.unchanged_required,
'start-after': directives.unchanged_required,
'end-before': directives.unchanged_required,
'linenos': directives.unchanged,
'linenos_offset': zero_or_positive_int,
'tab-width': directives.unchanged,
# generic
'stripnl' : string_bool,
'stripall': string_bool,
'ensurenl': string_bool,
'tabsize' : directives.positive_int,
'encoding': directives.encoding,
# Lua
'func_name_hightlighting':string_bool,
'disabled_modules': string_list,
# Python Console
'python3': string_bool,
# Delphi
'turbopascal':string_bool,
'delphi' :string_bool,
'freepascal': string_bool,
'units': string_list,
# Modula2
'pim' : string_bool,
'iso' : string_bool,
'objm2' : string_bool,
'gm2ext': string_bool,
# CSharp
'unicodelevel' : csharp_unicodelevel,
# Literate haskell
'litstyle' : lhs_litstyle,
# Raw
'compress': raw_compress,
# Rst
'handlecodeblocks': string_bool,
# Php
'startinline': string_bool,
'funcnamehighlighting': string_bool,
'disabledmodules': string_list,
}
# .. _doctutils: http://docutils.sf.net/
# .. _pygments: http://pygments.org/
# .. _Using Pygments in ReST documents: http://pygments.org/docs/rstdirective/
# .. _proof of concept:
# http://article.gmane.org/gmane.text.docutils.user/3689
#
# Test output
# -----------
#
# If called from the command line, call the docutils publisher to render the
# input::
if __name__ == '__main__':
from docutils.core import publish_cmdline, default_description
from docutils.parsers.rst import directives
directives.register_directive('code-block', code_block_directive)
description = "code-block directive test output" + default_description
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except Exception:
pass
publish_cmdline(writer_name='html', description=description)
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Mixer multirotor test and prototyping script.
Author: Mathieu Bresciani <brescianimathieu@gmail.com>, Beat Kueng <beat-kueng@gmx.net>
Description: This script can be used to prototype new mixer algorithms and test
it against the C++ implementation.
"""
from __future__ import print_function
from argparse import ArgumentParser
import numpy as np
import numpy.matlib
import subprocess
# --------------------------------------------------
# mixing algorithms
# --------------------------------------------------
def compute_desaturation_gain(u, u_min, u_max, desaturation_vector):
"""
Computes the gain k by which desaturation_vector has to be multiplied
in order to unsaturate the output that has the greatest saturation
"""
d_u_sat_plus = u_max - u
d_u_sat_minus = u_min - u
k = np.zeros(u.size*2)
for i in range(u.size):
if abs(desaturation_vector[i]) < 0.000001:
# avoid division by zero
continue
if d_u_sat_minus[i] > 0.0:
k[2*i] = d_u_sat_minus[i] / desaturation_vector[i]
if d_u_sat_plus[i] < 0.0:
k[2*i+1] = d_u_sat_plus[i] / desaturation_vector[i]
k_min = min(k)
k_max = max(k)
# Reduce the saturation as much as possible
k = k_min + k_max
return k
def minimize_sat(u, u_min, u_max, desaturation_vector):
"""
Minimize the saturation of the actuators by
adding or substracting a fraction of desaturation_vector.
desaturation_vector is the vector that added to the output u,
modifies the thrust or angular acceleration on a
specific axis.
For example, if desaturation_vector is given
to slide along the vertical thrust axis, the saturation will
be minimized by shifting the vertical thrust setpoint,
without changing the roll/pitch/yaw accelerations.
"""
k_1 = compute_desaturation_gain(u, u_min, u_max, desaturation_vector)
u_1 = u + k_1 * desaturation_vector # Try to unsaturate
# Compute the desaturation gain again based on the updated outputs.
# In most cases it will be zero. It won't be if max(outputs) - min(outputs)
# > max_output - min_output.
# In that case adding 0.5 of the gain will equilibrate saturations.
k_2 = compute_desaturation_gain(u_1, u_min, u_max, desaturation_vector)
k_opt = k_1 + 0.5 * k_2
u_prime = u + k_opt * desaturation_vector
return u_prime
def mix_yaw(m_sp, u, P, u_min, u_max):
"""
Mix yaw by adding it to an existing output vector u
Desaturation behavior: thrust is allowed to be decreased up to 15% in order to allow
some yaw control on the upper end. On the lower end thrust will never be increased,
but yaw is decreased as much as required.
"""
m_sp_yaw_only = np.matlib.zeros(m_sp.size).T
m_sp_yaw_only[2, 0] = m_sp[2, 0]
u_p = u + P * m_sp_yaw_only
# Change yaw acceleration to unsaturate the outputs if needed (do not change roll/pitch),
# and allow some yaw response at maximum thrust
u_r_dot = P[:,2]
u_pp = minimize_sat(u_p, u_min, u_max+0.15, u_r_dot)
u_T = P[:, 3]
u_ppp = minimize_sat(u_pp, 0, u_max, u_T)
# reduce thrust only
if (u_ppp > (u_pp)).any():
u_ppp = u_pp
return u_ppp
def airmode_rp(m_sp, P, u_min, u_max):
"""
Mix roll, pitch, yaw and thrust.
Desaturation behavior: airmode for roll/pitch:
thrust is increased/decreased as much as required to meet the demanded roll/pitch.
Yaw is not allowed to increase the thrust, @see mix_yaw() for the exact behavior.
"""
# Mix without yaw
m_sp_no_yaw = m_sp.copy()
m_sp_no_yaw[2, 0] = 0.0
u = P * m_sp_no_yaw
# Use thrust to unsaturate the outputs if needed
u_T = P[:, 3]
u_prime = minimize_sat(u, u_min, u_max, u_T)
# Mix yaw axis independently
u_final = mix_yaw(m_sp, u_prime, P, u_min, u_max)
return (u, u_final)
def airmode_rpy(m_sp, P, u_min, u_max):
"""
Mix roll, pitch, yaw and thrust.
Desaturation behavior: full airmode for roll/pitch/yaw:
thrust is increased/decreased as much as required to meet demanded the roll/pitch/yaw.
"""
# Mix with yaw
u = P * m_sp
# Use thrust to unsaturate the outputs if needed
u_T = P[:, 3]
u_prime = minimize_sat(u, u_min, u_max, u_T)
return (u, u_prime)
def normal_mode(m_sp, P, u_min, u_max):
"""
Mix roll, pitch, yaw and thrust.
Desaturation behavior: no airmode, thrust is NEVER increased to meet the demanded
roll/pitch/yaw. Instead roll/pitch/yaw is reduced as much as needed.
Thrust can be reduced to unsaturate the upper side.
@see mix_yaw() for the exact yaw behavior.
"""
# Mix without yaw
m_sp_no_yaw = m_sp.copy()
m_sp_no_yaw[2, 0] = 0.0
u = P * m_sp_no_yaw
# Use thrust to unsaturate the outputs if needed
# by reducing the thrust only
u_T = P[:, 3]
u_prime = minimize_sat(u, u_min, u_max, u_T)
if (u_prime > (u)).any():
u_prime = u
# Reduce roll/pitch acceleration if needed to unsaturate
u_p_dot = P[:, 0]
u_p2 = minimize_sat(u_prime, u_min, u_max, u_p_dot)
u_q_dot = P[:, 1]
u_p3 = minimize_sat(u_p2, u_min, u_max, u_q_dot)
# Mix yaw axis independently
u_final = mix_yaw(m_sp, u_p3, P, u_min, u_max)
return (u, u_final)
# --------------------------------------------------
# test cases
# --------------------------------------------------
# normalized control allocation test matrices (B_px from px_generate_mixers.py)
# quad_x
P1 = np.matrix([
[-0.71, 0.71, 1., 1. ],
[ 0.71, -0.71, 1., 1. ],
[ 0.71, 0.71, -1., 1. ],
[-0.71, -0.71, -1., 1. ]])
# quad_wide
P2 = np.matrix([
[-0.5, 0.71, 0.77, 1. ],
[ 0.5, -0.71, 1., 1. ],
[ 0.5, 0.71, -0.77, 1. ],
[-0.5, -0.71, -1., 1. ]])
# hex_x
P3 = np.matrix([
[-1., 0., -1., 1. ],
[ 1., -0., 1., 1. ],
[ 0.5, 0.87, -1., 1. ],
[-0.5, -0.87, 1., 1. ],
[-0.5, 0.87, 1., 1. ],
[ 0.5, -0.87, -1., 1. ]])
# hex_cox
P4 = np.matrix([
[-0.87, 0.5, -1., 1. ],
[-0.87, 0.5, 1., 1. ],
[ 0., -1., -1., 1. ],
[ 0., -1., 1., 1. ],
[ 0.87, 0.5, -1., 1. ],
[ 0.87, 0.5, 1., 1. ]])
# octa_plus
P5 = np.matrix([
[-0., 1., -1., 1. ],
[ 0., -1., -1., 1. ],
[-0.71, 0.71, 1., 1. ],
[-0.71, -0.71, 1., 1. ],
[ 0.71, 0.71, 1., 1. ],
[ 0.71, -0.71, 1., 1. ],
[ 1., 0., -1., 1. ],
[-1., -0., -1., 1. ]])
P_tests = [ P1, P2, P3, P4, P5 ]
test_cases_input = np.matrix([
# desired accelerations (must be within [-1, 1]):
#roll pitch yaw thrust
[ 0.0, 0.0, 0.0, 0.0],
[-0.05, 0.0, 0.0, 0.0],
[ 0.05, -0.05, 0.0, 0.0],
[ 0.05, 0.05, -0.025, 0.0],
[ 0.0, 0.2, -0.025, 0.0],
[ 0.2, 0.05, 0.09, 0.0],
[-0.125, 0.02, 0.04, 0.0],
# extreme cases
[ 1.0, 0.0, 0.0, 0.0],
[ 0.0, -1.0, 0.0, 0.0],
[ 0.0, 0.0, 1.0, 0.0],
[ 1.0, 1.0, -1.0, 0.0],
[-1.0, 0.9, -0.9, 0.0],
[-1.0, 0.9, 0.0, 0.0],
])
# use the following thrust values for all test cases (must be within [0, 1])
thrust_values = [0, 0.1, 0.45, 0.9, 1.0]
test_cases = np.zeros((test_cases_input.shape[0] * len(thrust_values), 4))
for i in range(test_cases_input.shape[0]):
for k in range(len(thrust_values)):
test_case = test_cases_input[i]
test_case[0, 3] = thrust_values[k]
test_cases[i * len(thrust_values) + k, :] = test_case
def run_tests(mixer_cb, P, test_mixer_binary, test_index=None):
"""
Run all (or a specific) tests for a certain mixer method an control
allocation matrix P
"""
B = np.linalg.pinv(P)
proc = subprocess.Popen(
test_mixer_binary,
#'cat > /tmp/test_'+str(mode_idx), shell=True, # just to test the output
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
proc.stdin.write("{:}\n".format(mode_idx)) # airmode
motor_count = P.shape[0]
proc.stdin.write("{:}\n".format(motor_count)) # motor count
# control allocation matrix
for row in P.getA():
for col in row:
proc.stdin.write("{:.8f} ".format(col))
proc.stdin.write("\n")
proc.stdin.write("\n")
failed = False
try:
if test_index is None:
# go through all test cases
test_indices = range(test_cases.shape[0])
else:
test_indices = [test_index]
for i in test_indices:
actuator_controls = test_cases[[i], :].T
proc.stdin.write("{:.8f} {:.8f} {:.8f} {:.8f}\n"
.format(actuator_controls[0, 0], actuator_controls[1, 0],
actuator_controls[2, 0], actuator_controls[3, 0]))
(u, u_new) = mixer_cb(actuator_controls, P, 0.0, 1.0)
# Saturate the outputs between 0 and 1
u_new_sat = np.maximum(u_new, np.matlib.zeros(u.size).T)
u_new_sat = np.minimum(u_new_sat, np.matlib.ones(u.size).T)
# write expected outputs
for j in range(motor_count):
proc.stdin.write("{:.8f} ".format(u_new_sat[j, 0]))
proc.stdin.write("\n")
proc.stdin.close()
except IOError as e:
failed = True
result = proc.stdout.read()
proc.wait()
if proc.returncode != 0: failed = True
if failed:
print("Error: test failed")
print("B:\n{}".format(B))
print("P:\n{}".format(P))
print(result)
raise Exception('Test failed')
parser = ArgumentParser(description=__doc__)
parser.add_argument('--test', action='store_true', default=False, help='Run tests')
parser.add_argument("--mixer-multirotor-binary",
help="select test_mixer_multirotor binary file name",
default='./test_mixer_multirotor')
parser.add_argument("--mode", "-m", dest="mode",
help="mixer mode: none, rp, rpy", default=None)
parser.add_argument("-i", dest="index", type=int,
help="Select a single test to run (starting at 1)", default=None)
args = parser.parse_args()
mixer_mode = args.mode
if args.test:
mixer_binary = args.mixer_multirotor_binary
test_index = args.index
if test_index is not None: test_index -= 1
for mode_idx, (airmode, mixer_cb) in enumerate([
('none', normal_mode),
('rp', airmode_rp),
('rpy', airmode_rpy)]):
if mixer_mode is not None and mixer_mode != airmode:
continue
print('Testing mode: '+airmode)
for P in P_tests:
run_tests(mixer_cb, P, mixer_binary, test_index)
exit(0)
# --------------------------------------------------
# Prototyping and corner case testing playground
# --------------------------------------------------
# Compute the control allocation matrix
# u = P * m
P = P1 # normal quad
#P = P2 # wide quad
# Normalized actuator effectiveness matrix using the pseudo inverse of P
# m = B * u
B = np.linalg.pinv(P)
# Desired accelerations (actuator controls, in [-1, 1])
p_dot_sp = 0.0 # roll acceleration (p is the roll rate)
q_dot_sp = 0.1 # pitch acceleration
r_dot_sp = 0.1 # yaw acceleration
T_sp = 0.0 # vertical thrust
m_sp = np.matrix([p_dot_sp, q_dot_sp, r_dot_sp, T_sp]).T # Vector of desired "accelerations"
# Airmode type (none/rp/rpy)
airmode = mixer_mode
if airmode is None: airmode = "none"
# Actuators output saturations
u_max = 1.0
u_min = 0.0
if airmode == "none":
(u, u_new) = normal_mode(m_sp, P, u_min, u_max)
elif airmode == "rp":
(u, u_new) = airmode_rp(m_sp, P, u_min, u_max)
elif airmode == "rpy":
(u, u_new) = airmode_rpy(m_sp, P, u_min, u_max)
else:
u = 0.0
u_new = 0.0
# Saturate the outputs between 0 and 1
u_new_sat = np.maximum(u_new, np.matlib.zeros(u.size).T)
u_new_sat = np.minimum(u_new_sat, np.matlib.ones(u.size).T)
np.set_printoptions(suppress=True)
# Display some results
print("u = \n{}\n".format(u))
print("u_new = \n{}\n".format(u_new))
print("u_new_sat = \n{}\n".format(u_new_sat))
print("Desired accelerations = \n{}\n".format(m_sp))
# Compute back the allocated accelerations
m_new = B * u_new_sat
print("Allocated accelerations = \n{}\n".format(m_new))
| |
import cv2
from distutils.version import LooseVersion
import itertools
from numpy import array, zeros, vstack, hstack, math, nan, argsort, median, \
argmax, isnan, append
import scipy.cluster
import scipy.spatial
import time
import numpy as np
import util
class CMT(object):
DETECTOR = 'BRISK'
DESCRIPTOR = 'BRISK'
DESC_LENGTH = 512
MATCHER = 'BruteForce-Hamming'
THR_OUTLIER = 20
THR_CONF = 0.75
THR_RATIO = 0.8
estimate_scale = True
estimate_rotation = True
def initialise(self, im_gray0, tl, br):
'''
# Initialise detector, descriptor, matcher
self.detector = cv2.FeatureDetector_create(self.DETECTOR)
self.descriptor = cv2.DescriptorExtractor_create(self.DESCRIPTOR)
self.matcher = cv2.DescriptorMatcher_create(self.MATCHER)
'''
if cv2.__version__ < LooseVersion('3.0'):
self.detector = cv2.FeatureDetector_create(self.DETECTOR)
self.descriptor = cv2.DescriptorExtractor_create(self.DESCRIPTOR)
self.matcher = cv2.DescriptorMatcher_create(self.MATCHER)
else:
self.detector = cv2.BRISK_create()
self.descriptor = self.detector
self.matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
# Get initial keypoints in whole image
keypoints_cv = self.detector.detect(im_gray0)
# Remember keypoints that are in the rectangle as selected keypoints
ind = util.in_rect(keypoints_cv, tl, br)
selected_keypoints_cv = list(itertools.compress(keypoints_cv, ind))
selected_keypoints_cv, self.selected_features = self.descriptor.compute(im_gray0, selected_keypoints_cv)
selected_keypoints = util.keypoints_cv_to_np(selected_keypoints_cv)
num_selected_keypoints = len(selected_keypoints_cv)
if num_selected_keypoints == 0:
raise Exception('No keypoints found in selection')
# Remember keypoints that are not in the rectangle as background keypoints
background_keypoints_cv = list(itertools.compress(keypoints_cv, ~ind))
background_keypoints_cv, background_features = self.descriptor.compute(im_gray0, background_keypoints_cv)
_ = util.keypoints_cv_to_np(background_keypoints_cv)
# Assign each keypoint a class starting from 1, background is 0
self.selected_classes = array(range(num_selected_keypoints)) + 1
background_classes = zeros(len(background_keypoints_cv))
# Stack background features and selected features into database
self.features_database = vstack((background_features, self.selected_features))
# Same for classes
self.database_classes = hstack((background_classes, self.selected_classes))
# Get all distances between selected keypoints in squareform
pdist = scipy.spatial.distance.pdist(selected_keypoints)
self.squareform = scipy.spatial.distance.squareform(pdist)
# Get all angles between selected keypoints
angles = np.empty((num_selected_keypoints, num_selected_keypoints))
for k1, i1 in zip(selected_keypoints, range(num_selected_keypoints)):
for k2, i2 in zip(selected_keypoints, range(num_selected_keypoints)):
# Compute vector from k1 to k2
v = k2 - k1
# Compute angle of this vector with respect to x axis
angle = math.atan2(v[1], v[0])
# Store angle
angles[i1, i2] = angle
self.angles = angles
# Find the center of selected keypoints
center = np.mean(selected_keypoints, axis=0)
# Remember the rectangle coordinates relative to the center
self.center_to_tl = np.array(tl) - center
self.center_to_tr = np.array([br[0], tl[1]]) - center
self.center_to_br = np.array(br) - center
self.center_to_bl = np.array([tl[0], br[1]]) - center
# Calculate springs of each keypoint
self.springs = selected_keypoints - center
# Set start image for tracking
self.im_prev = im_gray0
# Make keypoints 'active' keypoints
self.active_keypoints = np.copy(selected_keypoints)
# Attach class information to active keypoints
self.active_keypoints = hstack((selected_keypoints, self.selected_classes[:, None]))
# Remember number of initial keypoints
self.num_initial_keypoints = len(selected_keypoints_cv)
def estimate(self, keypoints):
center = array((nan, nan))
scale_estimate = nan
med_rot = nan
# At least 2 keypoints are needed for scale
if keypoints.size > 1:
# Extract the keypoint classes
keypoint_classes = keypoints[:, 2].squeeze().astype(np.int)
# Retain singular dimension
if keypoint_classes.size == 1:
keypoint_classes = keypoint_classes[None]
# Sort
ind_sort = argsort(keypoint_classes)
keypoints = keypoints[ind_sort]
keypoint_classes = keypoint_classes[ind_sort]
# Get all combinations of keypoints
all_combs = array([val for val in itertools.product(range(keypoints.shape[0]), repeat=2)])
# But exclude comparison with itself
all_combs = all_combs[all_combs[:, 0] != all_combs[:, 1], :]
# Measure distance between allcombs[0] and allcombs[1]
ind1 = all_combs[:, 0]
ind2 = all_combs[:, 1]
class_ind1 = keypoint_classes[ind1] - 1
class_ind2 = keypoint_classes[ind2] - 1
duplicate_classes = class_ind1 == class_ind2
if not all(duplicate_classes):
ind1 = ind1[~duplicate_classes]
ind2 = ind2[~duplicate_classes]
class_ind1 = class_ind1[~duplicate_classes]
class_ind2 = class_ind2[~duplicate_classes]
pts_allcombs0 = keypoints[ind1, :2]
pts_allcombs1 = keypoints[ind2, :2]
# This distance might be 0 for some combinations,
# as it can happen that there is more than one keypoint at a single location
dists = util.L2norm(pts_allcombs0 - pts_allcombs1)
original_dists = self.squareform[class_ind1, class_ind2]
scalechange = dists / original_dists
# Compute angles
angles = np.empty((pts_allcombs0.shape[0]))
v = pts_allcombs1 - pts_allcombs0
angles = np.arctan2(v[:, 1], v[:, 0])
original_angles = self.angles[class_ind1, class_ind2]
angle_diffs = angles - original_angles
# Fix long way angles
long_way_angles = np.abs(angle_diffs) > math.pi
angle_diffs[long_way_angles] = angle_diffs[long_way_angles] - np.sign(angle_diffs[long_way_angles]) * 2 * math.pi
scale_estimate = median(scalechange)
if not self.estimate_scale:
scale_estimate = 1;
med_rot = median(angle_diffs)
if not self.estimate_rotation:
med_rot = 0;
keypoint_class = keypoints[:, 2].astype(np.int)
votes = keypoints[:, :2] - scale_estimate * (util.rotate(self.springs[keypoint_class - 1], med_rot))
# Remember all votes including outliers
self.votes = votes
# Compute pairwise distance between votes
pdist = scipy.spatial.distance.pdist(votes)
# Compute linkage between pairwise distances
linkage = scipy.cluster.hierarchy.linkage(pdist)
# Perform hierarchical distance-based clustering
T = scipy.cluster.hierarchy.fcluster(linkage, self.THR_OUTLIER, criterion='distance')
# Count votes for each cluster
cnt = np.bincount(T) # Dummy 0 label remains
# Get largest class
Cmax = argmax(cnt)
# Identify inliers (=members of largest class)
inliers = T == Cmax
# inliers = med_dists < THR_OUTLIER
# Remember outliers
self.outliers = keypoints[~inliers, :]
# Stop tracking outliers
keypoints = keypoints[inliers, :]
# Remove outlier votes
votes = votes[inliers, :]
# Compute object center
center = np.mean(votes, axis=0)
return (center, scale_estimate, med_rot, keypoints)
def process_frame(self, im_gray):
tracked_keypoints, _ = util.track(self.im_prev, im_gray, self.active_keypoints)
(center, scale_estimate, rotation_estimate, tracked_keypoints) = self.estimate(tracked_keypoints)
# Detect keypoints, compute descriptors
keypoints_cv = self.detector.detect(im_gray)
keypoints_cv, features = self.descriptor.compute(im_gray, keypoints_cv)
# Create list of active keypoints
active_keypoints = zeros((0, 3))
# Get the best two matches for each feature
matches_all = self.matcher.knnMatch(features, self.features_database, 2)
# Get all matches for selected features
if not any(isnan(center)):
selected_matches_all = self.matcher.knnMatch(features, self.selected_features, len(self.selected_features))
# For each keypoint and its descriptor
if len(keypoints_cv) > 0:
transformed_springs = scale_estimate * util.rotate(self.springs, -rotation_estimate)
for i in range(len(keypoints_cv)):
# Retrieve keypoint location
location = np.array(keypoints_cv[i].pt)
# First: Match over whole image
# Compute distances to all descriptors
matches = matches_all[i]
distances = np.array([m.distance for m in matches])
# Convert distances to confidences, do not weight
combined = 1 - distances / self.DESC_LENGTH
classes = self.database_classes
# Get best and second best index
bestInd = matches[0].trainIdx
secondBestInd = matches[1].trainIdx
# Compute distance ratio according to Lowe
ratio = (1 - combined[0]) / (1 - combined[1])
# Extract class of best match
keypoint_class = classes[bestInd]
# If distance ratio is ok and absolute distance is ok and keypoint class is not background
if ratio < self.THR_RATIO and combined[0] > self.THR_CONF and keypoint_class != 0:
# Add keypoint to active keypoints
new_kpt = append(location, keypoint_class)
active_keypoints = append(active_keypoints, array([new_kpt]), axis=0)
# In a second step, try to match difficult keypoints
# If structural constraints are applicable
if not any(isnan(center)):
# Compute distances to initial descriptors
matches = selected_matches_all[i]
distances = np.array([m.distance for m in matches])
# Re-order the distances based on indexing
idxs = np.argsort(np.array([m.trainIdx for m in matches]))
distances = distances[idxs]
# Convert distances to confidences
confidences = 1 - distances / self.DESC_LENGTH
# Compute the keypoint location relative to the object center
relative_location = location - center
# Compute the distances to all springs
displacements = util.L2norm(transformed_springs - relative_location)
# For each spring, calculate weight
weight = displacements < self.THR_OUTLIER # Could be smooth function
combined = weight * confidences
classes = self.selected_classes
# Sort in descending order
sorted_conf = argsort(combined)[::-1] # reverse
# Get best and second best index
bestInd = sorted_conf[0]
secondBestInd = sorted_conf[1]
# Compute distance ratio according to Lowe
ratio = (1 - combined[bestInd]) / (1 - combined[secondBestInd])
# Extract class of best match
keypoint_class = classes[bestInd]
# If distance ratio is ok and absolute distance is ok and keypoint class is not background
if ratio < self.THR_RATIO and combined[bestInd] > self.THR_CONF and keypoint_class != 0:
# Add keypoint to active keypoints
new_kpt = append(location, keypoint_class)
# Check whether same class already exists
if active_keypoints.size > 0:
same_class = np.nonzero(active_keypoints[:, 2] == keypoint_class)
active_keypoints = np.delete(active_keypoints, same_class, axis=0)
active_keypoints = append(active_keypoints, array([new_kpt]), axis=0)
# If some keypoints have been tracked
if tracked_keypoints.size > 0:
# Extract the keypoint classes
tracked_classes = tracked_keypoints[:, 2]
# If there already are some active keypoints
if active_keypoints.size > 0:
# Add all tracked keypoints that have not been matched
associated_classes = active_keypoints[:, 2]
missing = ~np.in1d(tracked_classes, associated_classes)
active_keypoints = append(active_keypoints, tracked_keypoints[missing, :], axis=0)
# Else use all tracked keypoints
else:
active_keypoints = tracked_keypoints
# Update object state estimate
_ = active_keypoints
self.center = center
self.scale_estimate = scale_estimate
self.rotation_estimate = rotation_estimate
self.tracked_keypoints = tracked_keypoints
self.active_keypoints = active_keypoints
self.im_prev = im_gray
self.keypoints_cv = keypoints_cv
_ = time.time()
self.tl = (nan, nan)
self.tr = (nan, nan)
self.br = (nan, nan)
self.bl = (nan, nan)
self.bb = array([nan, nan, nan, nan])
self.has_result = False
if not any(isnan(self.center)) and self.active_keypoints.shape[0] > self.num_initial_keypoints / 10:
self.has_result = True
tl = util.array_to_int_tuple(center + scale_estimate * util.rotate(self.center_to_tl[None, :], rotation_estimate).squeeze())
tr = util.array_to_int_tuple(center + scale_estimate * util.rotate(self.center_to_tr[None, :], rotation_estimate).squeeze())
br = util.array_to_int_tuple(center + scale_estimate * util.rotate(self.center_to_br[None, :], rotation_estimate).squeeze())
bl = util.array_to_int_tuple(center + scale_estimate * util.rotate(self.center_to_bl[None, :], rotation_estimate).squeeze())
min_x = min((tl[0], tr[0], br[0], bl[0]))
min_y = min((tl[1], tr[1], br[1], bl[1]))
max_x = max((tl[0], tr[0], br[0], bl[0]))
max_y = max((tl[1], tr[1], br[1], bl[1]))
self.tl = tl
self.tr = tr
self.bl = bl
self.br = br
self.bb = np.array([min_x, min_y, max_x - min_x, max_y - min_y])
| |
# Written by Bram Cohen
# see LICENSE.txt for license information
from random import randrange, shuffle
from BitTornado.clock import clock
try:
True
except:
True = 1
False = 0
class PiecePicker:
def __init__(self, numpieces,
rarest_first_cutoff = 1, rarest_first_priority_cutoff = 3,
priority_step = 20):
self.rarest_first_cutoff = rarest_first_cutoff
self.rarest_first_priority_cutoff = rarest_first_priority_cutoff + priority_step
self.priority_step = priority_step
self.cutoff = rarest_first_priority_cutoff
self.numpieces = numpieces
self.started = []
self.totalcount = 0
self.numhaves = [0] * numpieces
self.priority = [1] * numpieces
self.removed_partials = {}
self.crosscount = [numpieces]
self.crosscount2 = [numpieces]
self.has = [0] * numpieces
self.numgot = 0
self.done = False
self.seed_connections = {}
self.past_ips = {}
self.seed_time = None
self.superseed = False
self.seeds_connected = 0
self._init_interests()
def _init_interests(self):
self.interests = [[] for x in xrange(self.priority_step)]
self.level_in_interests = [self.priority_step] * self.numpieces
interests = range(self.numpieces)
shuffle(interests)
self.pos_in_interests = [0] * self.numpieces
for i in xrange(self.numpieces):
self.pos_in_interests[interests[i]] = i
self.interests.append(interests)
def got_have(self, piece):
self.totalcount+=1
numint = self.numhaves[piece]
self.numhaves[piece] += 1
self.crosscount[numint] -= 1
if numint+1==len(self.crosscount):
self.crosscount.append(0)
self.crosscount[numint+1] += 1
if not self.done:
numintplus = numint+self.has[piece]
self.crosscount2[numintplus] -= 1
if numintplus+1 == len(self.crosscount2):
self.crosscount2.append(0)
self.crosscount2[numintplus+1] += 1
numint = self.level_in_interests[piece]
self.level_in_interests[piece] += 1
if self.superseed:
self.seed_got_haves[piece] += 1
numint = self.level_in_interests[piece]
self.level_in_interests[piece] += 1
elif self.has[piece] or self.priority[piece] == -1:
return
if numint == len(self.interests) - 1:
self.interests.append([])
self._shift_over(piece, self.interests[numint], self.interests[numint + 1])
def lost_have(self, piece):
self.totalcount-=1
numint = self.numhaves[piece]
self.numhaves[piece] -= 1
self.crosscount[numint] -= 1
self.crosscount[numint-1] += 1
if not self.done:
numintplus = numint+self.has[piece]
self.crosscount2[numintplus] -= 1
self.crosscount2[numintplus-1] += 1
numint = self.level_in_interests[piece]
self.level_in_interests[piece] -= 1
if self.superseed:
numint = self.level_in_interests[piece]
self.level_in_interests[piece] -= 1
elif self.has[piece] or self.priority[piece] == -1:
return
self._shift_over(piece, self.interests[numint], self.interests[numint - 1])
def _shift_over(self, piece, l1, l2):
assert self.superseed or (not self.has[piece] and self.priority[piece] >= 0)
parray = self.pos_in_interests
p = parray[piece]
assert l1[p] == piece
q = l1[-1]
l1[p] = q
parray[q] = p
del l1[-1]
newp = randrange(len(l2)+1)
if newp == len(l2):
parray[piece] = len(l2)
l2.append(piece)
else:
old = l2[newp]
parray[old] = len(l2)
l2.append(old)
l2[newp] = piece
parray[piece] = newp
def got_seed(self):
self.seeds_connected += 1
self.cutoff = max(self.rarest_first_priority_cutoff-self.seeds_connected,0)
def became_seed(self):
self.got_seed()
self.totalcount -= self.numpieces
self.numhaves = [i-1 for i in self.numhaves]
if self.superseed or not self.done:
self.level_in_interests = [i-1 for i in self.level_in_interests]
if self.interests:
del self.interests[0]
del self.crosscount[0]
if not self.done:
del self.crosscount2[0]
def lost_seed(self):
self.seeds_connected -= 1
self.cutoff = max(self.rarest_first_priority_cutoff-self.seeds_connected,0)
def requested(self, piece):
if piece not in self.started:
self.started.append(piece)
def _remove_from_interests(self, piece, keep_partial = False):
l = self.interests[self.level_in_interests[piece]]
p = self.pos_in_interests[piece]
assert l[p] == piece
q = l[-1]
l[p] = q
self.pos_in_interests[q] = p
del l[-1]
try:
self.started.remove(piece)
if keep_partial:
self.removed_partials[piece] = 1
except ValueError:
pass
def complete(self, piece):
assert not self.has[piece]
self.has[piece] = 1
self.numgot += 1
if self.numgot == self.numpieces:
self.done = True
self.crosscount2 = self.crosscount
else:
numhaves = self.numhaves[piece]
self.crosscount2[numhaves] -= 1
if numhaves+1 == len(self.crosscount2):
self.crosscount2.append(0)
self.crosscount2[numhaves+1] += 1
self._remove_from_interests(piece)
def next(self, haves, wantfunc, complete_first = False):
cutoff = self.numgot < self.rarest_first_cutoff
complete_first = (complete_first or cutoff) and not haves.complete()
best = None
bestnum = 2 ** 30
for i in self.started:
if haves[i] and wantfunc(i):
if self.level_in_interests[i] < bestnum:
best = i
bestnum = self.level_in_interests[i]
if best is not None:
if complete_first or (cutoff and len(self.interests) > self.cutoff):
return best
if haves.complete():
r = [ (0, min(bestnum,len(self.interests))) ]
elif cutoff and len(self.interests) > self.cutoff:
r = [ (self.cutoff, min(bestnum,len(self.interests))),
(0, self.cutoff) ]
else:
r = [ (0, min(bestnum,len(self.interests))) ]
for lo,hi in r:
for i in xrange(lo,hi):
for j in self.interests[i]:
if haves[j] and wantfunc(j):
return j
if best is not None:
return best
return None
def am_I_complete(self):
return self.done
def bump(self, piece):
l = self.interests[self.level_in_interests[piece]]
pos = self.pos_in_interests[piece]
del l[pos]
l.append(piece)
for i in range(pos,len(l)):
self.pos_in_interests[l[i]] = i
try:
self.started.remove(piece)
except:
pass
def set_priority(self, piece, p):
if self.superseed:
return False # don't muck with this if you're a superseed
oldp = self.priority[piece]
if oldp == p:
return False
self.priority[piece] = p
if p == -1:
# when setting priority -1,
# make sure to cancel any downloads for this piece
if not self.has[piece]:
self._remove_from_interests(piece, True)
return True
if oldp == -1:
level = self.numhaves[piece] + (self.priority_step * p)
self.level_in_interests[piece] = level
if self.has[piece]:
return True
while len(self.interests) < level+1:
self.interests.append([])
l2 = self.interests[level]
parray = self.pos_in_interests
newp = randrange(len(l2)+1)
if newp == len(l2):
parray[piece] = len(l2)
l2.append(piece)
else:
old = l2[newp]
parray[old] = len(l2)
l2.append(old)
l2[newp] = piece
parray[piece] = newp
if self.removed_partials.has_key(piece):
del self.removed_partials[piece]
self.started.append(piece)
# now go to downloader and try requesting more
return True
numint = self.level_in_interests[piece]
newint = numint + ((p - oldp) * self.priority_step)
self.level_in_interests[piece] = newint
if self.has[piece]:
return False
while len(self.interests) < newint+1:
self.interests.append([])
self._shift_over(piece, self.interests[numint], self.interests[newint])
return False
def is_blocked(self, piece):
return self.priority[piece] < 0
def set_superseed(self):
assert self.done
self.superseed = True
self.seed_got_haves = [0] * self.numpieces
self._init_interests() # assume everyone is disconnected
def next_have(self, connection, looser_upload):
if self.seed_time is None:
self.seed_time = clock()
return None
if clock() < self.seed_time+10: # wait 10 seconds after seeing the first peers
return None # to give time to grab have lists
if not connection.upload.super_seeding:
return None
olddl = self.seed_connections.get(connection)
if olddl is None:
ip = connection.get_ip()
olddl = self.past_ips.get(ip)
if olddl is not None: # peer reconnected
self.seed_connections[connection] = olddl
if not looser_upload:
self.seed_got_haves[olddl] -= 1 # penalize
if olddl is not None:
if looser_upload:
num = 1 # send a new have even if it hasn't spread that piece elsewhere
else:
num = 2
if self.seed_got_haves[olddl] < num:
return None
if not connection.upload.was_ever_interested: # it never downloaded it?
connection.upload.skipped_count += 1
if connection.upload.skipped_count >= 3: # probably another stealthed seed
return -1 # signal to close it
for tier in self.interests:
for piece in tier:
if not connection.download.have[piece]:
seedint = self.level_in_interests[piece]
self.level_in_interests[piece] += 1 # tweak it up one, so you don't duplicate effort
if seedint == len(self.interests) - 1:
self.interests.append([])
self._shift_over(piece,
self.interests[seedint], self.interests[seedint + 1])
self.seed_got_haves[piece] = 0 # reset this
self.seed_connections[connection] = piece
connection.upload.seed_have_list.append(piece)
return piece
return -1 # something screwy; terminate connection
def lost_peer(self, connection):
olddl = self.seed_connections.get(connection)
if olddl is None:
return
del self.seed_connections[connection]
self.past_ips[connection.get_ip()] = olddl
if self.seed_got_haves[olddl] == 1:
self.seed_got_haves[olddl] = 0
| |
'''
reStructuredText renderer
=========================
.. versionadded:: 1.1.0
`reStructuredText <http://docutils.sourceforge.net/rst.html>`_ is an
easy-to-read, what-you-see-is-what-you-get plaintext markup syntax and parser
system.
.. note::
This widget requires the ``docutils`` package to run. Install it with
``pip`` or include it as one of your deployment requirements.
.. warning::
This widget is highly experimental. The styling and implementation should
not be considered stable until this warning has been removed.
Usage with Text
---------------
::
text = """
.. _top:
Hello world
===========
This is an **emphased text**, some ``interpreted text``.
And this is a reference to top_::
$ print("Hello world")
"""
document = RstDocument(text=text)
The rendering will output:
.. image:: images/rstdocument.png
Usage with Source
-----------------
You can also render a rst file using the :attr:`~RstDocument.source` property::
document = RstDocument(source='index.rst')
You can reference other documents using the role ``:doc:``. For example, in the
document ``index.rst`` you can write::
Go to my next document: :doc:`moreinfo.rst`
It will generate a link that, when clicked, opens the ``moreinfo.rst``
document.
'''
__all__ = ('RstDocument', )
import os
from os.path import dirname, join, exists, abspath
from kivy.clock import Clock
from kivy.compat import PY2
from kivy.properties import ObjectProperty, NumericProperty, \
DictProperty, ListProperty, StringProperty, \
BooleanProperty, OptionProperty, AliasProperty
from kivy.lang import Builder
from kivy.utils import get_hex_from_color, get_color_from_hex
from kivy.uix.widget import Widget
from kivy.uix.scrollview import ScrollView
from kivy.uix.gridlayout import GridLayout
from kivy.uix.label import Label
from kivy.uix.image import AsyncImage, Image
from kivy.uix.videoplayer import VideoPlayer
from kivy.uix.anchorlayout import AnchorLayout
from kivy.animation import Animation
from kivy.logger import Logger
from docutils.parsers import rst
from docutils.parsers.rst import roles
from docutils import nodes, frontend, utils
from docutils.parsers.rst import Directive, directives
from docutils.parsers.rst.roles import set_classes
#
# Handle some additional roles
#
if 'KIVY_DOC' not in os.environ:
class role_doc(nodes.Inline, nodes.TextElement):
pass
class role_video(nodes.General, nodes.TextElement):
pass
class VideoDirective(Directive):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {'width': directives.nonnegative_int,
'height': directives.nonnegative_int}
def run(self):
set_classes(self.options)
node = role_video(source=self.arguments[0], **self.options)
return [node]
generic_docroles = {
'doc': role_doc}
for rolename, nodeclass in generic_docroles.items():
generic = roles.GenericRole(rolename, nodeclass)
role = roles.CustomRole(rolename, generic, {'classes': [rolename]})
roles.register_local_role(rolename, role)
directives.register_directive('video', VideoDirective)
Builder.load_string('''
#:import parse_color kivy.parser.parse_color
<RstDocument>:
content: content
scatter: scatter
do_scroll_x: False
canvas.before:
Color:
rgba: parse_color(root.colors['background'])
Rectangle:
pos: self.pos
size: self.size
Scatter:
id: scatter
size_hint_y: None
height: content.minimum_height
width: root.width
scale: 1
do_translation: False, False
do_scale: False
do_rotation: False
GridLayout:
id: content
cols: 1
height: self.minimum_height
width: root.width
padding: 10
<RstTitle>:
markup: True
valign: 'top'
font_size:
sp(self.document.base_font_size - self.section * (
self.document.base_font_size / 31.0 * 2))
size_hint_y: None
height: self.texture_size[1] + dp(20)
text_size: self.width, None
bold: True
canvas:
Color:
rgba: parse_color(self.document.underline_color)
Rectangle:
pos: self.x, self.y + 5
size: self.width, 1
<RstParagraph>:
markup: True
valign: 'top'
size_hint_y: None
height: self.texture_size[1] + self.my
text_size: self.width - self.mx, None
font_size: sp(self.document.base_font_size / 2.0)
<RstTerm>:
size_hint: None, None
height: label.height
anchor_x: 'left'
Label:
id: label
text: root.text
markup: True
valign: 'top'
size_hint: None, None
size: self.texture_size[0] + dp(10), self.texture_size[1] + dp(10)
font_size: sp(root.document.base_font_size / 2.0)
<RstBlockQuote>:
cols: 2
content: content
size_hint_y: None
height: content.height
Widget:
size_hint_x: None
width: 20
GridLayout:
id: content
cols: 1
size_hint_y: None
height: self.minimum_height
<RstLiteralBlock>:
cols: 1
content: content
size_hint_y: None
height: content.texture_size[1] + dp(20)
canvas:
Color:
rgb: parse_color('#cccccc')
Rectangle:
pos: self.x - 1, self.y - 1
size: self.width + 2, self.height + 2
Color:
rgb: parse_color('#eeeeee')
Rectangle:
pos: self.pos
size: self.size
Label:
id: content
markup: True
valign: 'top'
text_size: self.width - 20, None
font_name: 'data/fonts/RobotoMono-Regular.ttf'
color: (0, 0, 0, 1)
<RstList>:
cols: 2
size_hint_y: None
height: self.minimum_height
<RstListItem>:
cols: 1
size_hint_y: None
height: self.minimum_height
<RstSystemMessage>:
cols: 1
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgba: 1, 0, 0, .3
Rectangle:
pos: self.pos
size: self.size
<RstWarning>:
content: content
cols: 1
padding: 20
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgba: 1, 0, 0, .5
Rectangle:
pos: self.x + 10, self.y + 10
size: self.width - 20, self.height - 20
GridLayout:
cols: 1
id: content
size_hint_y: None
height: self.minimum_height
<RstNote>:
content: content
cols: 1
padding: 20
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgba: 0, 1, 0, .5
Rectangle:
pos: self.x + 10, self.y + 10
size: self.width - 20, self.height - 20
GridLayout:
cols: 1
id: content
size_hint_y: None
height: self.minimum_height
<RstImage>:
size_hint: None, None
size: self.texture_size[0], self.texture_size[1] + dp(10)
<RstAsyncImage>:
size_hint: None, None
size: self.texture_size[0], self.texture_size[1] + dp(10)
<RstDefinitionList>:
cols: 1
size_hint_y: None
height: self.minimum_height
font_size: sp(self.document.base_font_size / 2.0)
<RstDefinition>:
cols: 2
size_hint_y: None
height: self.minimum_height
font_size: sp(self.document.base_font_size / 2.0)
<RstFieldList>:
cols: 2
size_hint_y: None
height: self.minimum_height
<RstFieldName>:
markup: True
valign: 'top'
size_hint: 0.2, 1
color: (0, 0, 0, 1)
bold: True
text_size: self.width-10, self.height - 10
valign: 'top'
font_size: sp(self.document.base_font_size / 2.0)
<RstFieldBody>:
cols: 1
size_hint_y: None
height: self.minimum_height
<RstTable>:
size_hint_y: None
height: self.minimum_height
<RstEntry>:
cols: 1
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgb: .2, .2, .2
Line:
points: [\
self.x,\
self.y,\
self.right,\
self.y,\
self.right,\
self.top,\
self.x,\
self.top,\
self.x,\
self.y]
<RstTransition>:
size_hint_y: None
height: 20
canvas:
Color:
rgb: .2, .2, .2
Line:
points: [self.x, self.center_y, self.right, self.center_y]
<RstListBullet>:
markup: True
valign: 'top'
size_hint_x: None
width: self.texture_size[0] + dp(10)
text_size: None, self.height - dp(10)
font_size: sp(self.document.base_font_size / 2.0)
<RstEmptySpace>:
size_hint: 0.01, 0.01
<RstDefinitionSpace>:
size_hint: None, 0.1
width: 50
font_size: sp(self.document.base_font_size / 2.0)
<RstVideoPlayer>:
options: {'allow_stretch': True}
canvas.before:
Color:
rgba: (1, 1, 1, 1)
BorderImage:
source: 'atlas://data/images/defaulttheme/player-background'
pos: self.x - 25, self.y - 25
size: self.width + 50, self.height + 50
border: (25, 25, 25, 25)
''')
class RstVideoPlayer(VideoPlayer):
pass
class RstDocument(ScrollView):
'''Base widget used to store an Rst document. See module documentation for
more information.
'''
source = StringProperty(None)
'''Filename of the RST document.
:attr:`source` is a :class:`~kivy.properties.StringProperty` and
defaults to None.
'''
source_encoding = StringProperty('utf-8')
'''Encoding to be used for the :attr:`source` file.
:attr:`source_encoding` is a :class:`~kivy.properties.StringProperty` and
defaults to `utf-8`.
.. Note::
It is your responsibility to ensure that the value provided is a
valid codec supported by python.
'''
source_error = OptionProperty('strict',
options=('strict', 'ignore', 'replace',
'xmlcharrefreplace',
'backslashreplac'))
'''Error handling to be used while encoding the :attr:`source` file.
:attr:`source_error` is an :class:`~kivy.properties.OptionProperty` and
defaults to `strict`. Can be one of 'strict', 'ignore', 'replace',
'xmlcharrefreplace' or 'backslashreplac'.
'''
text = StringProperty(None)
'''RST markup text of the document.
:attr:`text` is a :class:`~kivy.properties.StringProperty` and defaults to
None.
'''
document_root = StringProperty(None)
'''Root path where :doc: will search for rst documents. If no path is
given, it will use the directory of the first loaded source file.
:attr:`document_root` is a :class:`~kivy.properties.StringProperty` and
defaults to None.
'''
base_font_size = NumericProperty(31)
'''Font size for the biggest title, 31 by default. All other font sizes are
derived from this.
.. versionadded:: 1.8.0
'''
show_errors = BooleanProperty(False)
'''Indicate whether RST parsers errors should be shown on the screen
or not.
:attr:`show_errors` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
def _get_bgc(self):
return get_color_from_hex(self.colors.background)
def _set_bgc(self, value):
self.colors.background = get_hex_from_color(value)[1:]
background_color = AliasProperty(_get_bgc, _set_bgc, bind=('colors',))
'''Specifies the background_color to be used for the RstDocument.
.. versionadded:: 1.8.0
:attr:`background_color` is an :class:`~kivy.properties.AliasProperty`
for colors['background'].
'''
colors = DictProperty({
'background': 'e5e6e9ff',
'link': 'ce5c00ff',
'paragraph': '202020ff',
'title': '204a87ff',
'bullet': '000000ff'})
'''Dictionary of all the colors used in the RST rendering.
.. warning::
This dictionary is needs special handling. You also need to call
:meth:`RstDocument.render` if you change them after loading.
:attr:`colors` is a :class:`~kivy.properties.DictProperty`.
'''
title = StringProperty('')
'''Title of the current document.
:attr:`title` is a :class:`~kivy.properties.StringProperty` and defaults to
''. It is read-only.
'''
toctrees = DictProperty({})
'''Toctree of all loaded or preloaded documents. This dictionary is filled
when a rst document is explicitly loaded or where :meth:`preload` has been
called.
If the document has no filename, e.g. when the document is loaded from a
text file, the key will be ''.
:attr:`toctrees` is a :class:`~kivy.properties.DictProperty` and defaults
to {}.
'''
underline_color = StringProperty('204a9699')
'''underline color of the titles, expressed in html color notation
:attr:`underline_color` is a
:class:`~kivy.properties.StringProperty` and defaults to '204a9699'.
.. versionadded: 1.9.0
'''
# internals.
content = ObjectProperty(None)
scatter = ObjectProperty(None)
anchors_widgets = ListProperty([])
refs_assoc = DictProperty({})
def __init__(self, **kwargs):
self._trigger_load = Clock.create_trigger(self._load_from_text, -1)
self._parser = rst.Parser()
self._settings = frontend.OptionParser(
components=(rst.Parser, )).get_default_values()
super(RstDocument, self).__init__(**kwargs)
def on_source(self, instance, value):
if not value:
return
if self.document_root is None:
# set the documentation root to the directory name of the
# first tile
self.document_root = abspath(dirname(value))
self._load_from_source()
def on_text(self, instance, value):
self._trigger_load()
def render(self):
'''Force document rendering.
'''
self._load_from_text()
def resolve_path(self, filename):
'''Get the path for this filename. If the filename doesn't exist,
it returns the document_root + filename.
'''
if exists(filename):
return filename
return join(self.document_root, filename)
def preload(self, filename, encoding='utf-8', errors='strict'):
'''Preload a rst file to get its toctree and its title.
The result will be stored in :attr:`toctrees` with the ``filename`` as
key.
'''
with open(filename, 'rb') as fd:
text = fd.read().decode(encoding, errors)
# parse the source
document = utils.new_document('Document', self._settings)
self._parser.parse(text, document)
# fill the current document node
visitor = _ToctreeVisitor(document)
document.walkabout(visitor)
self.toctrees[filename] = visitor.toctree
return text
def _load_from_source(self):
filename = self.resolve_path(self.source)
self.text = self.preload(filename,
self.source_encoding,
self.source_error)
def _load_from_text(self, *largs):
try:
# clear the current widgets
self.content.clear_widgets()
self.anchors_widgets = []
self.refs_assoc = {}
# parse the source
document = utils.new_document('Document', self._settings)
text = self.text
if PY2 and type(text) is str:
text = text.decode('utf-8')
self._parser.parse(text, document)
# fill the current document node
visitor = _Visitor(self, document)
document.walkabout(visitor)
self.title = visitor.title or 'No title'
except:
Logger.exception('Rst: error while loading text')
def on_ref_press(self, node, ref):
self.goto(ref)
def goto(self, ref, *largs):
'''Scroll to the reference. If it's not found, nothing will be done.
For this text::
.. _myref:
This is something I always wanted.
You can do::
from kivy.clock import Clock
from functools import partial
doc = RstDocument(...)
Clock.schedule_once(partial(doc.goto, 'myref'), 0.1)
.. note::
It is preferable to delay the call of the goto if you just loaded
the document because the layout might not be finished or the
size of the RstDocument has not yet been determined. In
either case, the calculation of the scrolling would be
wrong.
You can, however, do a direct call if the document is already
loaded.
.. versionadded:: 1.3.0
'''
# check if it's a file ?
if ref.endswith('.rst'):
# whether it's a valid or invalid file, let source deal with it
self.source = ref
return
# get the association
ref = self.refs_assoc.get(ref, ref)
# search into all the nodes containing anchors
ax = ay = None
for node in self.anchors_widgets:
if ref in node.anchors:
ax, ay = node.anchors[ref]
break
# not found, stop here
if ax is None:
return
# found, calculate the real coordinate
# get the anchor coordinate inside widget space
ax += node.x
ay = node.top - ay
# ay += node.y
# what's the current coordinate for us?
sx, sy = self.scatter.x, self.scatter.top
# ax, ay = self.scatter.to_parent(ax, ay)
ay -= self.height
dx, dy = self.convert_distance_to_scroll(0, ay)
dy = max(0, min(1, dy))
Animation(scroll_y=dy, d=.25, t='in_out_expo').start(self)
def add_anchors(self, node):
self.anchors_widgets.append(node)
class RstTitle(Label):
section = NumericProperty(0)
document = ObjectProperty(None)
class RstParagraph(Label):
mx = NumericProperty(10)
my = NumericProperty(10)
document = ObjectProperty(None)
class RstTerm(AnchorLayout):
text = StringProperty('')
document = ObjectProperty(None)
class RstBlockQuote(GridLayout):
content = ObjectProperty(None)
class RstLiteralBlock(GridLayout):
content = ObjectProperty(None)
class RstList(GridLayout):
pass
class RstListItem(GridLayout):
content = ObjectProperty(None)
class RstListBullet(Label):
document = ObjectProperty(None)
class RstSystemMessage(GridLayout):
pass
class RstWarning(GridLayout):
content = ObjectProperty(None)
class RstNote(GridLayout):
content = ObjectProperty(None)
class RstImage(Image):
pass
class RstAsyncImage(AsyncImage):
pass
class RstDefinitionList(GridLayout):
document = ObjectProperty(None)
class RstDefinition(GridLayout):
document = ObjectProperty(None)
class RstFieldList(GridLayout):
pass
class RstFieldName(Label):
document = ObjectProperty(None)
class RstFieldBody(GridLayout):
pass
class RstGridLayout(GridLayout):
pass
class RstTable(GridLayout):
pass
class RstEntry(GridLayout):
pass
class RstTransition(Widget):
pass
class RstEmptySpace(Widget):
pass
class RstDefinitionSpace(Widget):
document = ObjectProperty(None)
class _ToctreeVisitor(nodes.NodeVisitor):
def __init__(self, *largs):
self.toctree = self.current = []
self.queue = []
self.text = ''
nodes.NodeVisitor.__init__(self, *largs)
def push(self, tree):
self.queue.append(tree)
self.current = tree
def pop(self):
self.current = self.queue.pop()
def dispatch_visit(self, node):
cls = node.__class__
if cls is nodes.section:
section = {
'ids': node['ids'],
'names': node['names'],
'title': '',
'children': []}
if isinstance(self.current, dict):
self.current['children'].append(section)
else:
self.current.append(section)
self.push(section)
elif cls is nodes.title:
self.text = ''
elif cls is nodes.Text:
self.text += node
def dispatch_departure(self, node):
cls = node.__class__
if cls is nodes.section:
self.pop()
elif cls is nodes.title:
self.current['title'] = self.text
class _Visitor(nodes.NodeVisitor):
def __init__(self, root, *largs):
self.root = root
self.title = None
self.current_list = []
self.current = None
self.idx_list = None
self.text = ''
self.text_have_anchor = False
self.section = 0
self.do_strip_text = False
self.substitution = {}
nodes.NodeVisitor.__init__(self, *largs)
def push(self, widget):
self.current_list.append(self.current)
self.current = widget
def pop(self):
self.current = self.current_list.pop()
def dispatch_visit(self, node):
cls = node.__class__
if cls is nodes.document:
self.push(self.root.content)
elif cls is nodes.comment:
return
elif cls is nodes.section:
self.section += 1
elif cls is nodes.substitution_definition:
name = node.attributes['names'][0]
self.substitution[name] = node.children[0]
elif cls is nodes.substitution_reference:
node = self.substitution[node.attributes['refname']]
self.text += node
elif cls is nodes.title:
label = RstTitle(section=self.section, document=self.root)
self.current.add_widget(label)
self.push(label)
# assert(self.text == '')
elif cls is nodes.Text:
# check if parent isn't a special directive
if hasattr(node, 'parent'):
if node.parent.tagname == 'substitution_definition':
# .. |ref| replace:: something
return
elif node.parent.tagname == 'substitution_reference':
# |ref|
return
elif node.parent.tagname == 'comment':
# .. COMMENT
return
if self.do_strip_text:
node = node.replace('\n', ' ')
node = node.replace(' ', ' ')
node = node.replace('\t', ' ')
node = node.replace(' ', ' ')
if node.startswith(' '):
node = ' ' + node.lstrip(' ')
if node.endswith(' '):
node = node.rstrip(' ') + ' '
if self.text.endswith(' ') and node.startswith(' '):
node = node[1:]
self.text += node
elif cls is nodes.paragraph:
self.do_strip_text = True
label = RstParagraph(document=self.root)
if isinstance(self.current, RstEntry):
label.mx = 10
self.current.add_widget(label)
self.push(label)
elif cls is nodes.literal_block:
box = RstLiteralBlock()
self.current.add_widget(box)
self.push(box)
elif cls is nodes.emphasis:
self.text += '[i]'
elif cls is nodes.strong:
self.text += '[b]'
elif cls is nodes.literal:
self.text += '[font=fonts/RobotoMono-Regular.ttf]'
elif cls is nodes.block_quote:
box = RstBlockQuote()
self.current.add_widget(box)
self.push(box.content)
assert(self.text == '')
elif cls is nodes.enumerated_list:
box = RstList()
self.current.add_widget(box)
self.push(box)
self.idx_list = 0
elif cls is nodes.bullet_list:
box = RstList()
self.current.add_widget(box)
self.push(box)
self.idx_list = None
elif cls is nodes.list_item:
bullet = '-'
if self.idx_list is not None:
self.idx_list += 1
bullet = '%d.' % self.idx_list
bullet = self.colorize(bullet, 'bullet')
item = RstListItem()
self.current.add_widget(RstListBullet(
text=bullet, document=self.root))
self.current.add_widget(item)
self.push(item)
elif cls is nodes.system_message:
label = RstSystemMessage()
if self.root.show_errors:
self.current.add_widget(label)
self.push(label)
elif cls is nodes.warning:
label = RstWarning()
self.current.add_widget(label)
self.push(label.content)
assert(self.text == '')
elif cls is nodes.note:
label = RstNote()
self.current.add_widget(label)
self.push(label.content)
assert(self.text == '')
elif cls is nodes.image:
# docutils parser breaks path with spaces
# e.g. "C:/my path" -> "C:/mypath"
uri = node['uri']
align = node.get('align', 'center')
image_size = [
node.get('width'),
node.get('height')
]
# use user's size if defined
def set_size(img, size):
img.size = [
size[0] or img.width,
size[1] or img.height
]
if uri.startswith('/') and self.root.document_root:
uri = join(self.root.document_root, uri[1:])
if uri.startswith('http://') or uri.startswith('https://'):
image = RstAsyncImage(source=uri)
image.bind(on_load=lambda *a: set_size(image, image_size))
else:
image = RstImage(source=uri)
set_size(image, image_size)
root = AnchorLayout(
size_hint_y=None,
anchor_x=align,
height=image.height
)
image.bind(height=root.setter('height'))
root.add_widget(image)
self.current.add_widget(root)
elif cls is nodes.definition_list:
lst = RstDefinitionList(document=self.root)
self.current.add_widget(lst)
self.push(lst)
elif cls is nodes.term:
assert(isinstance(self.current, RstDefinitionList))
term = RstTerm(document=self.root)
self.current.add_widget(term)
self.push(term)
elif cls is nodes.definition:
assert(isinstance(self.current, RstDefinitionList))
definition = RstDefinition(document=self.root)
definition.add_widget(RstDefinitionSpace(document=self.root))
self.current.add_widget(definition)
self.push(definition)
elif cls is nodes.field_list:
fieldlist = RstFieldList()
self.current.add_widget(fieldlist)
self.push(fieldlist)
elif cls is nodes.field_name:
name = RstFieldName(document=self.root)
self.current.add_widget(name)
self.push(name)
elif cls is nodes.field_body:
body = RstFieldBody()
self.current.add_widget(body)
self.push(body)
elif cls is nodes.table:
table = RstTable(cols=0)
self.current.add_widget(table)
self.push(table)
elif cls is nodes.colspec:
self.current.cols += 1
elif cls is nodes.entry:
entry = RstEntry()
self.current.add_widget(entry)
self.push(entry)
elif cls is nodes.transition:
self.current.add_widget(RstTransition())
elif cls is nodes.reference:
name = node.get('name', node.get('refuri'))
self.text += '[ref=%s][color=%s]' % (
name, self.root.colors.get(
'link', self.root.colors.get('paragraph')))
if 'refname' in node and 'name' in node:
self.root.refs_assoc[node['name']] = node['refname']
elif cls is nodes.target:
name = None
if 'ids' in node:
name = node['ids'][0]
elif 'names' in node:
name = node['names'][0]
self.text += '[anchor=%s]' % name
self.text_have_anchor = True
elif cls is role_doc:
self.doc_index = len(self.text)
elif cls is role_video:
pass
def dispatch_departure(self, node):
cls = node.__class__
if cls is nodes.document:
self.pop()
elif cls is nodes.section:
self.section -= 1
elif cls is nodes.title:
assert(isinstance(self.current, RstTitle))
if not self.title:
self.title = self.text
self.set_text(self.current, 'title')
self.pop()
elif cls is nodes.Text:
pass
elif cls is nodes.paragraph:
self.do_strip_text = False
assert(isinstance(self.current, RstParagraph))
self.set_text(self.current, 'paragraph')
self.pop()
elif cls is nodes.literal_block:
assert(isinstance(self.current, RstLiteralBlock))
self.set_text(self.current.content, 'literal_block')
self.pop()
elif cls is nodes.emphasis:
self.text += '[/i]'
elif cls is nodes.strong:
self.text += '[/b]'
elif cls is nodes.literal:
self.text += '[/font]'
elif cls is nodes.block_quote:
self.pop()
elif cls is nodes.enumerated_list:
self.idx_list = None
self.pop()
elif cls is nodes.bullet_list:
self.pop()
elif cls is nodes.list_item:
self.pop()
elif cls is nodes.system_message:
self.pop()
elif cls is nodes.warning:
self.pop()
elif cls is nodes.note:
self.pop()
elif cls is nodes.definition_list:
self.pop()
elif cls is nodes.term:
assert(isinstance(self.current, RstTerm))
self.set_text(self.current, 'term')
self.pop()
elif cls is nodes.definition:
self.pop()
elif cls is nodes.field_list:
self.pop()
elif cls is nodes.field_name:
assert(isinstance(self.current, RstFieldName))
self.set_text(self.current, 'field_name')
self.pop()
elif cls is nodes.field_body:
self.pop()
elif cls is nodes.table:
self.pop()
elif cls is nodes.colspec:
pass
elif cls is nodes.entry:
self.pop()
elif cls is nodes.reference:
self.text += '[/color][/ref]'
elif cls is role_doc:
docname = self.text[self.doc_index:]
rst_docname = docname
if rst_docname.endswith('.rst'):
docname = docname[:-4]
else:
rst_docname += '.rst'
# try to preload it
filename = self.root.resolve_path(rst_docname)
self.root.preload(filename)
# if exist, use the title of the first section found in the
# document
title = docname
if filename in self.root.toctrees:
toctree = self.root.toctrees[filename]
if len(toctree):
title = toctree[0]['title']
# replace the text with a good reference
text = '[ref=%s]%s[/ref]' % (
rst_docname,
self.colorize(title, 'link'))
self.text = self.text[:self.doc_index] + text
elif cls is role_video:
width = node['width'] if 'width' in node.attlist() else 400
height = node['height'] if 'height' in node.attlist() else 300
uri = node['source']
if uri.startswith('/') and self.root.document_root:
uri = join(self.root.document_root, uri[1:])
video = RstVideoPlayer(
source=uri,
size_hint=(None, None),
size=(width, height))
anchor = AnchorLayout(size_hint_y=None, height=height + 20)
anchor.add_widget(video)
self.current.add_widget(anchor)
def set_text(self, node, parent):
text = self.text
if parent == 'term' or parent == 'field_name':
text = '[b]%s[/b]' % text
# search anchors
node.text = self.colorize(text, parent)
node.bind(on_ref_press=self.root.on_ref_press)
if self.text_have_anchor:
self.root.add_anchors(node)
self.text = ''
self.text_have_anchor = False
def colorize(self, text, name):
return '[color=%s]%s[/color]' % (
self.root.colors.get(name, self.root.colors['paragraph']),
text)
if __name__ == '__main__':
from kivy.base import runTouchApp
import sys
runTouchApp(RstDocument(source=sys.argv[1]))
| |
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import logging
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import six.moves.urllib.parse as urlparse
from keystoneclient import exceptions as keystone_exceptions
from openstack_auth import backend
from openstack_auth import utils as auth_utils
from horizon import exceptions
from horizon import messages
from horizon.utils import functions as utils
from openstack_dashboard.api import base
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
DEFAULT_ROLE = None
# Set up our data structure for managing Identity API versions, and
# add a couple utility methods to it.
class IdentityAPIVersionManager(base.APIVersionManager):
def upgrade_v2_user(self, user):
if getattr(user, "project_id", None) is None:
user.project_id = getattr(user, "default_project_id",
getattr(user, "tenantId", None))
return user
def get_project_manager(self, *args, **kwargs):
if VERSIONS.active < 3:
manager = keystoneclient(*args, **kwargs).tenants
else:
manager = keystoneclient(*args, **kwargs).projects
return manager
VERSIONS = IdentityAPIVersionManager(
"identity", preferred_version=auth_utils.get_keystone_version())
# Import from oldest to newest so that "preferred" takes correct precedence.
try:
from keystoneclient.v2_0 import client as keystone_client_v2
VERSIONS.load_supported_version(2.0, {"client": keystone_client_v2})
except ImportError:
pass
try:
from keystoneclient.v3 import client as keystone_client_v3
VERSIONS.load_supported_version(3, {"client": keystone_client_v3})
except ImportError:
pass
class Service(base.APIDictWrapper):
"""Wrapper for a dict based on the service data from keystone."""
_attrs = ['id', 'type', 'name']
def __init__(self, service, region, *args, **kwargs):
super(Service, self).__init__(service, *args, **kwargs)
self.public_url = base.get_url_for_service(service, region,
'publicURL')
self.url = base.get_url_for_service(service, region, 'internalURL')
if self.url:
self.host = urlparse.urlparse(self.url).hostname
else:
self.host = None
self.disabled = None
self.region = region
def __unicode__(self):
if(self.type == "identity"):
return _("%(type)s (%(backend)s backend)") \
% {"type": self.type, "backend": keystone_backend_name()}
else:
return self.type
def __repr__(self):
return "<Service: %s>" % unicode(self)
def _get_endpoint_url(request, endpoint_type, catalog=None):
if getattr(request.user, "service_catalog", None):
url = base.url_for(request,
service_type='identity',
endpoint_type=endpoint_type)
else:
auth_url = getattr(settings, 'OPENSTACK_KEYSTONE_URL')
url = request.session.get('region_endpoint', auth_url)
# TODO(gabriel): When the Service Catalog no longer contains API versions
# in the endpoints this can be removed.
url = url.rstrip('/')
url = urlparse.urljoin(url, 'v%s' % VERSIONS.active)
return url
def keystoneclient(request, admin=False):
"""Returns a client connected to the Keystone backend.
Several forms of authentication are supported:
* Username + password -> Unscoped authentication
* Username + password + tenant id -> Scoped authentication
* Unscoped token -> Unscoped authentication
* Unscoped token + tenant id -> Scoped authentication
* Scoped token -> Scoped authentication
Available services and data from the backend will vary depending on
whether the authentication was scoped or unscoped.
Lazy authentication if an ``endpoint`` parameter is provided.
Calls requiring the admin endpoint should have ``admin=True`` passed in
as a keyword argument.
The client is cached so that subsequent API calls during the same
request/response cycle don't have to be re-authenticated.
"""
user = request.user
if admin:
if not policy.check((("identity", "admin_required"),), request):
raise exceptions.NotAuthorized
endpoint_type = 'adminURL'
else:
endpoint_type = getattr(settings,
'OPENSTACK_ENDPOINT_TYPE',
'internalURL')
api_version = VERSIONS.get_active_version()
# Take care of client connection caching/fetching a new client.
# Admin vs. non-admin clients are cached separately for token matching.
cache_attr = "_keystoneclient_admin" if admin \
else backend.KEYSTONE_CLIENT_ATTR
if (hasattr(request, cache_attr) and
(not user.token.id or
getattr(request, cache_attr).auth_token == user.token.id)):
conn = getattr(request, cache_attr)
else:
endpoint = _get_endpoint_url(request, endpoint_type)
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
LOG.debug("Creating a new keystoneclient connection to %s." % endpoint)
remote_addr = request.environ.get('REMOTE_ADDR', '')
conn = api_version['client'].Client(token=user.token.id,
endpoint=endpoint,
original_ip=remote_addr,
insecure=insecure,
cacert=cacert,
auth_url=endpoint,
debug=settings.DEBUG)
setattr(request, cache_attr, conn)
return conn
def domain_create(request, name, description=None, enabled=None):
manager = keystoneclient(request, admin=True).domains
return manager.create(name,
description=description,
enabled=enabled)
def domain_get(request, domain_id):
manager = keystoneclient(request, admin=True).domains
return manager.get(domain_id)
def domain_delete(request, domain_id):
manager = keystoneclient(request, admin=True).domains
return manager.delete(domain_id)
def domain_list(request):
manager = keystoneclient(request, admin=True).domains
return manager.list()
def domain_update(request, domain_id, name=None, description=None,
enabled=None):
manager = keystoneclient(request, admin=True).domains
return manager.update(domain_id, name, description, enabled)
def tenant_create(request, name, description=None, enabled=None,
domain=None, **kwargs):
manager = VERSIONS.get_project_manager(request, admin=True)
if VERSIONS.active < 3:
return manager.create(name, description, enabled, **kwargs)
else:
return manager.create(name, domain,
description=description,
enabled=enabled, **kwargs)
def get_default_domain(request):
"""Gets the default domain object to use when creating Identity object.
Returns the domain context if is set, otherwise return the domain
of the logon user.
"""
domain_id = request.session.get("domain_context", None)
domain_name = request.session.get("domain_context_name", None)
# if running in Keystone V3 or later
if VERSIONS.active >= 3 and not domain_id:
# if no domain context set, default to users' domain
domain_id = request.user.user_domain_id
try:
domain = domain_get(request, domain_id)
domain_name = domain.name
except Exception:
LOG.warning("Unable to retrieve Domain: %s" % domain_id)
domain = base.APIDictWrapper({"id": domain_id,
"name": domain_name})
return domain
# TODO(gabriel): Is there ever a valid case for admin to be false here?
# A quick search through the codebase reveals that it's always called with
# admin=true so I suspect we could eliminate it entirely as with the other
# tenant commands.
def tenant_get(request, project, admin=True):
manager = VERSIONS.get_project_manager(request, admin=admin)
return manager.get(project)
def tenant_delete(request, project):
manager = VERSIONS.get_project_manager(request, admin=True)
return manager.delete(project)
def tenant_list(request, paginate=False, marker=None, domain=None, user=None,
admin=True, filters=None):
manager = VERSIONS.get_project_manager(request, admin=admin)
page_size = utils.get_page_size(request)
limit = None
if paginate:
limit = page_size + 1
has_more_data = False
if VERSIONS.active < 3:
tenants = manager.list(limit, marker)
if paginate and len(tenants) > page_size:
tenants.pop(-1)
has_more_data = True
else:
kwargs = {
"domain": domain,
"user": user
}
if filters is not None:
kwargs.update(filters)
tenants = manager.list(**kwargs)
return (tenants, has_more_data)
def tenant_update(request, project, name=None, description=None,
enabled=None, domain=None, **kwargs):
manager = VERSIONS.get_project_manager(request, admin=True)
if VERSIONS.active < 3:
return manager.update(project, name, description, enabled, **kwargs)
else:
return manager.update(project, name=name, description=description,
enabled=enabled, domain=domain, **kwargs)
def user_list(request, project=None, domain=None, group=None, filters=None):
if VERSIONS.active < 3:
kwargs = {"tenant_id": project}
else:
kwargs = {
"project": project,
"domain": domain,
"group": group
}
if filters is not None:
kwargs.update(filters)
users = keystoneclient(request, admin=True).users.list(**kwargs)
return [VERSIONS.upgrade_v2_user(user) for user in users]
def user_create(request, name=None, email=None, password=None, project=None,
enabled=None, domain=None):
manager = keystoneclient(request, admin=True).users
try:
if VERSIONS.active < 3:
user = manager.create(name, password, email, project, enabled)
return VERSIONS.upgrade_v2_user(user)
else:
return manager.create(name, password=password, email=email,
project=project, enabled=enabled,
domain=domain)
except keystone_exceptions.Conflict:
raise exceptions.Conflict()
def user_delete(request, user_id):
return keystoneclient(request, admin=True).users.delete(user_id)
def user_get(request, user_id, admin=True):
user = keystoneclient(request, admin=admin).users.get(user_id)
return VERSIONS.upgrade_v2_user(user)
def user_update(request, user, **data):
manager = keystoneclient(request, admin=True).users
error = None
if not keystone_can_edit_user():
raise keystone_exceptions.ClientException(
405, _("Identity service does not allow editing user data."))
# The v2 API updates user model and default project separately
if VERSIONS.active < 3:
project = data.pop('project')
# Update user details
try:
user = manager.update(user, **data)
except keystone_exceptions.Conflict:
raise exceptions.Conflict()
except Exception:
error = exceptions.handle(request, ignore=True)
# Update default tenant
try:
user_update_tenant(request, user, project)
user.tenantId = project
except Exception:
error = exceptions.handle(request, ignore=True)
# Check for existing roles
# Show a warning if no role exists for the project
user_roles = roles_for_user(request, user, project)
if not user_roles:
messages.warning(request,
_('User %s has no role defined for '
'that project.')
% data.get('name', None))
if error is not None:
raise error
# v3 API is so much simpler...
else:
try:
user = manager.update(user, **data)
except keystone_exceptions.Conflict:
raise exceptions.Conflict()
def user_update_enabled(request, user, enabled):
manager = keystoneclient(request, admin=True).users
if VERSIONS.active < 3:
return manager.update_enabled(user, enabled)
else:
return manager.update(user, enabled=enabled)
def user_update_password(request, user, password, admin=True):
if not keystone_can_edit_user():
raise keystone_exceptions.ClientException(
405, _("Identity service does not allow editing user password."))
manager = keystoneclient(request, admin=admin).users
if VERSIONS.active < 3:
return manager.update_password(user, password)
else:
return manager.update(user, password=password)
def user_update_own_password(request, origpassword, password):
client = keystoneclient(request, admin=False)
client.user_id = request.user.id
if VERSIONS.active < 3:
return client.users.update_own_password(origpassword, password)
else:
return client.users.update_password(origpassword, password)
def user_update_tenant(request, user, project, admin=True):
manager = keystoneclient(request, admin=admin).users
if VERSIONS.active < 3:
return manager.update_tenant(user, project)
else:
return manager.update(user, project=project)
def group_create(request, domain_id, name, description=None):
manager = keystoneclient(request, admin=True).groups
return manager.create(domain=domain_id,
name=name,
description=description)
def group_get(request, group_id, admin=True):
manager = keystoneclient(request, admin=admin).groups
return manager.get(group_id)
def group_delete(request, group_id):
manager = keystoneclient(request, admin=True).groups
return manager.delete(group_id)
def group_list(request, domain=None, project=None, user=None):
manager = keystoneclient(request, admin=True).groups
groups = manager.list(user=user, domain=domain)
if project:
project_groups = []
for group in groups:
roles = roles_for_group(request, group=group.id, project=project)
if roles and len(roles) > 0:
project_groups.append(group)
groups = project_groups
return groups
def group_update(request, group_id, name=None, description=None):
manager = keystoneclient(request, admin=True).groups
return manager.update(group=group_id,
name=name,
description=description)
def add_group_user(request, group_id, user_id):
manager = keystoneclient(request, admin=True).users
return manager.add_to_group(group=group_id, user=user_id)
def remove_group_user(request, group_id, user_id):
manager = keystoneclient(request, admin=True).users
return manager.remove_from_group(group=group_id, user=user_id)
def get_project_groups_roles(request, project):
"""Gets the groups roles in a given project.
:param request: the request entity containing the login user information
:param project: the project to filter the groups roles. It accepts both
project object resource or project ID
:returns group_roles: a dictionary mapping the groups and their roles in
given project
"""
groups_roles = collections.defaultdict(list)
project_role_assignments = role_assignments_list(request,
project=project)
for role_assignment in project_role_assignments:
if not hasattr(role_assignment, 'group'):
continue
group_id = role_assignment.group['id']
role_id = role_assignment.role['id']
groups_roles[group_id].append(role_id)
return groups_roles
def role_assignments_list(request, project=None, user=None, role=None,
group=None, domain=None, effective=False):
if VERSIONS.active < 3:
raise exceptions.NotAvailable
manager = keystoneclient(request, admin=True).role_assignments
return manager.list(project=project, user=user, role=role, group=group,
domain=domain, effective=effective)
def role_create(request, name):
manager = keystoneclient(request, admin=True).roles
return manager.create(name)
def role_get(request, role_id):
manager = keystoneclient(request, admin=True).roles
return manager.get(role_id)
def role_update(request, role_id, name=None):
manager = keystoneclient(request, admin=True).roles
return manager.update(role_id, name)
def role_delete(request, role_id):
manager = keystoneclient(request, admin=True).roles
return manager.delete(role_id)
def role_list(request):
"""Returns a global list of available roles."""
return keystoneclient(request, admin=True).roles.list()
def roles_for_user(request, user, project=None, domain=None):
"""Returns a list of user roles scoped to a project or domain."""
manager = keystoneclient(request, admin=True).roles
if VERSIONS.active < 3:
return manager.roles_for_user(user, project)
else:
return manager.list(user=user, domain=domain, project=project)
def get_domain_users_roles(request, domain):
users_roles = collections.defaultdict(list)
domain_role_assignments = role_assignments_list(request,
domain=domain)
for role_assignment in domain_role_assignments:
if not hasattr(role_assignment, 'user'):
continue
user_id = role_assignment.user['id']
role_id = role_assignment.role['id']
users_roles[user_id].append(role_id)
return users_roles
def add_domain_user_role(request, user, role, domain):
"""Adds a role for a user on a domain."""
manager = keystoneclient(request, admin=True).roles
return manager.grant(role, user=user, domain=domain)
def remove_domain_user_role(request, user, role, domain=None):
"""Removes a given single role for a user from a domain."""
manager = keystoneclient(request, admin=True).roles
return manager.revoke(role, user=user, domain=domain)
def get_project_users_roles(request, project):
users_roles = collections.defaultdict(list)
if VERSIONS.active < 3:
project_users = user_list(request, project=project)
for user in project_users:
roles = roles_for_user(request, user.id, project)
roles_ids = [role.id for role in roles]
users_roles[user.id].extend(roles_ids)
else:
project_role_assignments = role_assignments_list(request,
project=project)
for role_assignment in project_role_assignments:
if not hasattr(role_assignment, 'user'):
continue
user_id = role_assignment.user['id']
role_id = role_assignment.role['id']
users_roles[user_id].append(role_id)
return users_roles
def add_tenant_user_role(request, project=None, user=None, role=None,
group=None, domain=None):
"""Adds a role for a user on a tenant."""
manager = keystoneclient(request, admin=True).roles
if VERSIONS.active < 3:
return manager.add_user_role(user, role, project)
else:
return manager.grant(role, user=user, project=project,
group=group, domain=domain)
def remove_tenant_user_role(request, project=None, user=None, role=None,
group=None, domain=None):
"""Removes a given single role for a user from a tenant."""
manager = keystoneclient(request, admin=True).roles
if VERSIONS.active < 3:
return manager.remove_user_role(user, role, project)
else:
return manager.revoke(role, user=user, project=project,
group=group, domain=domain)
def remove_tenant_user(request, project=None, user=None, domain=None):
"""Removes all roles from a user on a tenant, removing them from it."""
client = keystoneclient(request, admin=True)
roles = client.roles.roles_for_user(user, project)
for role in roles:
remove_tenant_user_role(request, user=user, role=role.id,
project=project, domain=domain)
def roles_for_group(request, group, domain=None, project=None):
manager = keystoneclient(request, admin=True).roles
return manager.list(group=group, domain=domain, project=project)
def add_group_role(request, role, group, domain=None, project=None):
"""Adds a role for a group on a domain or project."""
manager = keystoneclient(request, admin=True).roles
return manager.grant(role=role, group=group, domain=domain,
project=project)
def remove_group_role(request, role, group, domain=None, project=None):
"""Removes a given single role for a group from a domain or project."""
manager = keystoneclient(request, admin=True).roles
return manager.revoke(role=role, group=group, project=project,
domain=domain)
def remove_group_roles(request, group, domain=None, project=None):
"""Removes all roles from a group on a domain or project."""
client = keystoneclient(request, admin=True)
roles = client.roles.list(group=group, domain=domain, project=project)
for role in roles:
remove_group_role(request, role=role.id, group=group,
domain=domain, project=project)
def get_default_role(request):
"""Gets the default role object from Keystone and saves it as a global.
Since this is configured in settings and should not change from request
to request. Supports lookup by name or id.
"""
global DEFAULT_ROLE
default = getattr(settings, "OPENSTACK_KEYSTONE_DEFAULT_ROLE", None)
if default and DEFAULT_ROLE is None:
try:
roles = keystoneclient(request, admin=True).roles.list()
except Exception:
roles = []
exceptions.handle(request)
for role in roles:
if role.id == default or role.name == default:
DEFAULT_ROLE = role
break
return DEFAULT_ROLE
def ec2_manager(request):
client = keystoneclient(request)
if hasattr(client, 'ec2'):
return client.ec2
# Keystoneclient 4.0 was released without the ec2 creds manager.
from keystoneclient.v2_0 import ec2
return ec2.CredentialsManager(client)
def list_ec2_credentials(request, user_id):
return ec2_manager(request).list(user_id)
def create_ec2_credentials(request, user_id, tenant_id):
return ec2_manager(request).create(user_id, tenant_id)
def get_user_ec2_credentials(request, user_id, access_token):
return ec2_manager(request).get(user_id, access_token)
def keystone_can_edit_domain():
backend_settings = getattr(settings, "OPENSTACK_KEYSTONE_BACKEND", {})
can_edit_domain = backend_settings.get('can_edit_domain', True)
multi_domain_support = getattr(settings,
'OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT',
False)
return can_edit_domain and multi_domain_support
def keystone_can_edit_user():
backend_settings = getattr(settings, "OPENSTACK_KEYSTONE_BACKEND", {})
return backend_settings.get('can_edit_user', True)
def keystone_can_edit_project():
backend_settings = getattr(settings, "OPENSTACK_KEYSTONE_BACKEND", {})
return backend_settings.get('can_edit_project', True)
def keystone_can_edit_group():
backend_settings = getattr(settings, "OPENSTACK_KEYSTONE_BACKEND", {})
return backend_settings.get('can_edit_group', True)
def keystone_can_edit_role():
backend_settings = getattr(settings, "OPENSTACK_KEYSTONE_BACKEND", {})
return backend_settings.get('can_edit_role', True)
def keystone_backend_name():
if hasattr(settings, "OPENSTACK_KEYSTONE_BACKEND"):
return settings.OPENSTACK_KEYSTONE_BACKEND['name']
else:
return 'unknown'
| |
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import socket
import sys
import traceback
from oslo.config import cfg
import six
from six import moves
_PY26 = sys.version_info[0:2] == (2, 6)
from sahara.openstack.common.gettextutils import _
from sahara.openstack.common import importutils
from sahara.openstack.common import jsonutils
from sahara.openstack.common import local
# NOTE(flaper87): Pls, remove when graduating this module
# from the incubator.
from sahara.openstack.common.strutils import mask_password # noqa
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
'oslo.messaging=INFO', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'urllib3.connectionpool=WARN', 'websocket=WARN',
"keystonemiddleware=WARN", "routes.middleware=WARN",
"stevedore=WARN"]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message.'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
def isEnabledFor(self, level):
if _PY26:
# This method was added in python 2.7 (and it does the exact
# same logic, so we need to do the exact same logic so that
# python 2.6 has this capability as well).
return self.logger.isEnabledFor(level)
else:
return super(BaseLoggerAdapter, self).isEnabledFor(level)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(msg, six.text_type):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
try:
handler = importutils.import_object(
"sahara.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
except ImportError:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
if CONF.use_syslog:
try:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(facility=facility)
else:
syslog = logging.handlers.SysLogHandler(facility=facility)
log_root.addHandler(syslog)
except socket.error:
log_root.error('Unable to add syslog handler. Verify that syslog'
'is running.')
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(record.msg, six.text_type):
record.msg = six.text_type(record.msg)
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
| |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
"""
This module contains classes to wrap Python VTK to make nice molecular plots.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Nov 27, 2011"
import os
import itertools
import math
import subprocess
import time
import numpy as np
try:
import vtk
from vtk import vtkInteractorStyleTrackballCamera
except ImportError:
# VTK not present. The Camera is to set object to avoid errors in unittest.
vtk = None
vtkInteractorStyleTrackballCamera = object
from monty.serialization import loadfn
from monty.dev import requires
from pymatgen.util.coord_utils import in_coord_list
from pymatgen.core.periodic_table import Specie
from pymatgen.core.structure import Structure
from pymatgen.core.sites import PeriodicSite
module_dir = os.path.dirname(os.path.abspath(__file__))
EL_COLORS = loadfn(os.path.join(module_dir, "ElementColorSchemes.yaml"))
class StructureVis(object):
"""
Provides Structure object visualization using VTK.
"""
@requires(vtk, "Visualization requires the installation of VTK with "
"Python bindings.")
def __init__(self, element_color_mapping=None, show_unit_cell=True,
show_bonds=False, show_polyhedron=True,
poly_radii_tol_factor=0.5, excluded_bonding_elements=None):
"""
Constructs a Structure Visualization.
Args:
element_color_mapping: Optional color mapping for the elements,
as a dict of {symbol: rgb tuple}. For example, {"Fe": (255,
123,0), ....} If None is specified, a default based on
Jmol"s color scheme is used.
show_unit_cell: Set to False to not show the unit cell
boundaries. Defaults to True.
show_bonds: Set to True to show bonds. Defaults to True.
show_polyhedron: Set to True to show polyhedrons. Defaults to
False.
poly_radii_tol_factor: The polyhedron and bonding code uses the
ionic radii of the elements or species to determine if two
atoms are bonded. This specifies a tolerance scaling factor
such that atoms which are (1 + poly_radii_tol_factor) * sum
of ionic radii apart are still considered as bonded.
excluded_bonding_elements: List of atom types to exclude from
bonding determination. Defaults to an empty list. Useful
when trying to visualize a certain atom type in the
framework (e.g., Li in a Li-ion battery cathode material).
Useful keyboard shortcuts implemented.
h : Show help
A/a : Increase/decrease cell by one unit vector in a-direction
B/b : Increase/decrease cell by one unit vector in b-direction
C/c : Increase/decrease cell by one unit vector in c-direction
# : Toggle showing of polyhedrons
- : Toggle showing of bonds
[ : Decrease poly_radii_tol_factor by 0.05
] : Increase poly_radii_tol_factor by 0.05
r : Reset camera direction
o : Orthogonalize structure
Up/Down : Rotate view along Up direction by 90 clock/anticlockwise
Left/right : Rotate view along camera direction by 90
clock/anticlockwise
"""
# create a rendering window and renderer
self.ren = vtk.vtkRenderer()
self.ren_win = vtk.vtkRenderWindow()
self.ren_win.AddRenderer(self.ren)
self.ren.SetBackground(1, 1, 1)
self.title = "Structure Visualizer"
# create a renderwindowinteractor
self.iren = vtk.vtkRenderWindowInteractor()
self.iren.SetRenderWindow(self.ren_win)
self.mapper_map = {}
self.structure = None
if element_color_mapping:
self.el_color_mapping = element_color_mapping
else:
self.el_color_mapping = EL_COLORS["VESTA"]
self.show_unit_cell = show_unit_cell
self.show_bonds = show_bonds
self.show_polyhedron = show_polyhedron
self.poly_radii_tol_factor = poly_radii_tol_factor
self.excluded_bonding_elements = excluded_bonding_elements if \
excluded_bonding_elements else []
self.show_help = True
self.supercell = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
self.redraw()
style = StructureInteractorStyle(self)
self.iren.SetInteractorStyle(style)
self.ren.parent = self
def rotate_view(self, axis_ind=0, angle=0):
"""
Rotate the camera view.
Args:
axis_ind: Index of axis to rotate. Defaults to 0, i.e., a-axis.
angle: Angle to rotate by. Defaults to 0.
"""
camera = self.ren.GetActiveCamera()
if axis_ind == 0:
camera.Roll(angle)
elif axis_ind == 1:
camera.Azimuth(angle)
else:
camera.Pitch(angle)
self.ren_win.Render()
def write_image(self, filename="image.png", magnification=1,
image_format="png"):
"""
Save render window to an image.
Arguments:
filename:
filename to save to. Defaults to image.png.
magnification:
magnification. Use it to render high res images.
image_format:
choose between jpeg, png. Png is the default.
"""
render_large = vtk.vtkRenderLargeImage()
render_large.SetInput(self.ren)
if image_format == "jpeg":
writer = vtk.vtkJPEGWriter()
writer.SetQuality(80)
else:
writer = vtk.vtkPNGWriter()
render_large.SetMagnification(magnification)
writer.SetFileName(filename)
writer.SetInputConnection(render_large.GetOutputPort())
self.ren_win.Render()
writer.Write()
del render_large
def redraw(self, reset_camera=False):
"""
Redraw the render window.
Args:
reset_camera: Set to True to reset the camera to a
pre-determined default for each structure. Defaults to False.
"""
self.ren.RemoveAllViewProps()
self.picker = None
self.add_picker_fixed()
self.helptxt_mapper = vtk.vtkTextMapper()
tprops = self.helptxt_mapper.GetTextProperty()
tprops.SetFontSize(14)
tprops.SetFontFamilyToTimes()
tprops.SetColor(0, 0, 0)
if self.structure is not None:
self.set_structure(self.structure, reset_camera)
self.ren_win.Render()
def orthongonalize_structure(self):
if self.structure is not None:
self.set_structure(self.structure.copy(sanitize=True))
self.ren_win.Render()
def display_help(self):
"""
Display the help for various keyboard shortcuts.
"""
helptxt = ["h : Toggle help",
"A/a, B/b or C/c : Increase/decrease cell by one a,"
" b or c unit vector", "# : Toggle showing of polyhedrons",
"-: Toggle showing of bonds", "r : Reset camera direction",
"[/]: Decrease or increase poly_radii_tol_factor "
"by 0.05. Value = " + str(self.poly_radii_tol_factor),
"Up/Down: Rotate view along Up direction by 90 "
"clockwise/anticlockwise",
"Left/right: Rotate view along camera direction by "
"90 clockwise/anticlockwise", "s: Save view to image.png",
"o: Orthogonalize structure"]
self.helptxt_mapper.SetInput("\n".join(helptxt))
self.helptxt_actor.SetPosition(10, 10)
self.helptxt_actor.VisibilityOn()
def set_structure(self, structure, reset_camera=True, to_unit_cell=True):
"""
Add a structure to the visualizer.
Args:
structure: structure to visualize
reset_camera: Set to True to reset the camera to a default
determined based on the structure.
to_unit_cell: Whether or not to fall back sites into the unit cell.
"""
self.ren.RemoveAllViewProps()
has_lattice = hasattr(structure, "lattice")
if has_lattice:
s = Structure.from_sites(structure, to_unit_cell=to_unit_cell)
s.make_supercell(self.supercell, to_unit_cell=to_unit_cell)
else:
s = structure
inc_coords = []
for site in s:
self.add_site(site)
inc_coords.append(site.coords)
count = 0
labels = ["a", "b", "c"]
colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
if has_lattice:
matrix = s.lattice.matrix
if self.show_unit_cell and has_lattice:
#matrix = s.lattice.matrix
self.add_text([0, 0, 0], "o")
for vec in matrix:
self.add_line((0, 0, 0), vec, colors[count])
self.add_text(vec, labels[count], colors[count])
count += 1
for (vec1, vec2) in itertools.permutations(matrix, 2):
self.add_line(vec1, vec1 + vec2)
for (vec1, vec2, vec3) in itertools.permutations(matrix, 3):
self.add_line(vec1 + vec2, vec1 + vec2 + vec3)
if self.show_bonds or self.show_polyhedron:
elements = sorted(s.composition.elements, key=lambda a: a.X)
anion = elements[-1]
def contains_anion(site):
for sp in site.species_and_occu.keys():
if sp.symbol == anion.symbol:
return True
return False
anion_radius = anion.average_ionic_radius
for site in s:
exclude = False
max_radius = 0
color = np.array([0, 0, 0])
for sp, occu in site.species_and_occu.items():
if sp.symbol in self.excluded_bonding_elements \
or sp == anion:
exclude = True
break
max_radius = max(max_radius, sp.average_ionic_radius)
color = color + \
occu * np.array(self.el_color_mapping.get(sp.symbol,
[0, 0, 0]))
if not exclude:
max_radius = (1 + self.poly_radii_tol_factor) * \
(max_radius + anion_radius)
nn = structure.get_neighbors(site, float(max_radius))
nn_sites = []
for nnsite, dist in nn:
if contains_anion(nnsite):
nn_sites.append(nnsite)
if not in_coord_list(inc_coords, nnsite.coords):
self.add_site(nnsite)
if self.show_bonds:
self.add_bonds(nn_sites, site)
if self.show_polyhedron:
color = [i / 255 for i in color]
self.add_polyhedron(nn_sites, site, color)
if self.show_help:
self.helptxt_actor = vtk.vtkActor2D()
self.helptxt_actor.VisibilityOn()
self.helptxt_actor.SetMapper(self.helptxt_mapper)
self.ren.AddActor(self.helptxt_actor)
self.display_help()
camera = self.ren.GetActiveCamera()
if reset_camera:
if has_lattice:
#Adjust the camera for best viewing
lengths = s.lattice.abc
pos = (matrix[1] + matrix[2]) * 0.5 + \
matrix[0] * max(lengths) / lengths[0] * 3.5
camera.SetPosition(pos)
camera.SetViewUp(matrix[2])
camera.SetFocalPoint((matrix[0] + matrix[1] + matrix[2]) * 0.5)
else:
origin = s.center_of_mass
max_site = max(
s, key=lambda site: site.distance_from_point(origin))
camera.SetPosition(origin + 5 * (max_site.coords - origin))
camera.SetFocalPoint(s.center_of_mass)
self.structure = structure
self.title = s.composition.formula
def zoom(self, factor):
"""
Zoom the camera view by a factor.
"""
camera = self.ren.GetActiveCamera()
camera.Zoom(factor)
self.ren_win.Render()
def show(self):
"""
Display the visualizer.
"""
self.iren.Initialize()
self.ren_win.SetSize(800, 800)
self.ren_win.SetWindowName(self.title)
self.ren_win.Render()
self.iren.Start()
def add_site(self, site):
"""
Add a site to the render window. The site is displayed as a sphere, the
color of which is determined based on the element. Partially occupied
sites are displayed as a single element color, though the site info
still shows the partial occupancy.
Args:
site: Site to add.
"""
start_angle = 0
radius = 0
total_occu = 0
for specie, occu in site.species_and_occu.items():
radius += occu * (specie.ionic_radius
if isinstance(specie, Specie)
and specie.ionic_radius
else specie.average_ionic_radius)
total_occu += occu
vis_radius = 0.2 + 0.002 * radius
for specie, occu in site.species_and_occu.items():
if not specie:
color = (1, 1, 1)
elif specie.symbol in self.el_color_mapping:
color = [i / 255 for i in self.el_color_mapping[specie.symbol]]
mapper = self.add_partial_sphere(site.coords, vis_radius, color,
start_angle, start_angle + 360 * occu)
self.mapper_map[mapper] = [site]
start_angle += 360 * occu
if total_occu < 1:
mapper = self.add_partial_sphere(site.coords, vis_radius, (1,1,1),
start_angle, start_angle + 360 * (1 - total_occu))
self.mapper_map[mapper] = [site]
def add_partial_sphere(self, coords, radius, color, start=0, end=360,
opacity=1.0):
sphere = vtk.vtkSphereSource()
sphere.SetCenter(coords)
sphere.SetRadius(radius)
sphere.SetThetaResolution(18)
sphere.SetPhiResolution(18)
sphere.SetStartTheta(start)
sphere.SetEndTheta(end)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(sphere.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(color)
actor.GetProperty().SetOpacity(opacity)
self.ren.AddActor(actor)
return mapper
def add_text(self, coords, text, color=(0, 0, 0)):
"""
Add text at a coordinate.
Args:
coords: Coordinates to add text at.
text: Text to place.
color: Color for text as RGB. Defaults to black.
"""
source = vtk.vtkVectorText()
source.SetText(text)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
follower = vtk.vtkFollower()
follower.SetMapper(mapper)
follower.GetProperty().SetColor(color)
follower.SetPosition(coords)
follower.SetScale(0.5)
self.ren.AddActor(follower)
follower.SetCamera(self.ren.GetActiveCamera())
def add_line(self, start, end, color=(0.5, 0.5, 0.5), width=1):
"""
Adds a line.
Args:
start: Starting coordinates for line.
end: Ending coordinates for line.
color: Color for text as RGB. Defaults to grey.
width: Width of line. Defaults to 1.
"""
source = vtk.vtkLineSource()
source.SetPoint1(start)
source.SetPoint2(end)
vertexIDs = vtk.vtkStringArray()
vertexIDs.SetNumberOfComponents(1)
vertexIDs.SetName("VertexIDs")
# Set the vertex labels
vertexIDs.InsertNextValue("a")
vertexIDs.InsertNextValue("b")
source.GetOutput().GetPointData().AddArray(vertexIDs)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(color)
actor.GetProperty().SetLineWidth(width)
self.ren.AddActor(actor)
def add_polyhedron(self, neighbors, center, color, opacity=1.0,
draw_edges=False, edges_color=[0.0, 0.0, 0.0],
edges_linewidth=2):
"""
Adds a polyhedron.
Args:
neighbors: Neighbors of the polyhedron (the vertices).
center: The atom in the center of the polyhedron.
color: Color for text as RGB.
opacity: Opacity of the polyhedron
draw_edges: If set to True, the a line will be drawn at each edge
edges_color: Color of the line for the edges
edges_linewidth: Width of the line drawn for the edges
"""
points = vtk.vtkPoints()
conv = vtk.vtkConvexPointSet()
for i in range(len(neighbors)):
x, y, z = neighbors[i].coords
points.InsertPoint(i, x, y, z)
conv.GetPointIds().InsertId(i, i)
grid = vtk.vtkUnstructuredGrid()
grid.Allocate(1, 1)
grid.InsertNextCell(conv.GetCellType(), conv.GetPointIds())
grid.SetPoints(points)
dsm = vtk.vtkDataSetMapper()
polysites = [center]
polysites.extend(neighbors)
self.mapper_map[dsm] = polysites
if vtk.VTK_MAJOR_VERSION <= 5:
dsm.SetInputConnection(grid.GetProducerPort())
else:
dsm.SetInputData(grid)
ac = vtk.vtkActor()
#ac.SetMapper(mapHull)
ac.SetMapper(dsm)
ac.GetProperty().SetOpacity(opacity)
if color == 'element':
# If partial occupations are involved, the color of the specie with
# the highest occupation is used
myoccu = 0.0
for specie, occu in center.species_and_occu.items():
if occu > myoccu:
myspecie = specie
myoccu = occu
color = [i / 255 for i in self.el_color_mapping[myspecie.symbol]]
ac.GetProperty().SetColor(color)
else:
ac.GetProperty().SetColor(color)
if draw_edges:
ac.GetProperty().SetEdgeColor(edges_color)
ac.GetProperty().SetLineWidth(edges_linewidth)
ac.GetProperty().EdgeVisibilityOn()
self.ren.AddActor(ac)
def add_triangle(self, neighbors, color, center=None, opacity=0.4,
draw_edges=False, edges_color=[0.0, 0.0, 0.0],
edges_linewidth=2):
"""
Adds a triangular surface between three atoms.
Args:
atoms: Atoms between which a triangle will be drawn.
color: Color for triangle as RGB.
center: The "central atom" of the triangle
opacity: opacity of the triangle
draw_edges: If set to True, the a line will be drawn at each edge
edges_color: Color of the line for the edges
edges_linewidth: Width of the line drawn for the edges
"""
points = vtk.vtkPoints()
triangle = vtk.vtkTriangle()
for ii in range(3):
points.InsertNextPoint(neighbors[ii].x, neighbors[ii].y,
neighbors[ii].z)
triangle.GetPointIds().SetId(ii, ii)
triangles = vtk.vtkCellArray()
triangles.InsertNextCell(triangle)
# polydata object
trianglePolyData = vtk.vtkPolyData()
trianglePolyData.SetPoints( points )
trianglePolyData.SetPolys( triangles )
# mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(trianglePolyData)
ac = vtk.vtkActor()
ac.SetMapper(mapper)
ac.GetProperty().SetOpacity(opacity)
if color == 'element':
if center is None:
raise ValueError(
'Color should be chosen according to the central atom, '
'and central atom is not provided')
# If partial occupations are involved, the color of the specie with
# the highest occupation is used
myoccu = 0.0
for specie, occu in center.species_and_occu.items():
if occu > myoccu:
myspecie = specie
myoccu = occu
color = [i / 255 for i in self.el_color_mapping[myspecie.symbol]]
ac.GetProperty().SetColor(color)
else:
ac.GetProperty().SetColor(color)
if draw_edges:
ac.GetProperty().SetEdgeColor(edges_color)
ac.GetProperty().SetLineWidth(edges_linewidth)
ac.GetProperty().EdgeVisibilityOn()
self.ren.AddActor(ac)
def add_faces(self, faces, color, opacity=0.35):
for face in faces:
if len(face) == 3:
points = vtk.vtkPoints()
triangle = vtk.vtkTriangle()
for ii in range(3):
points.InsertNextPoint(face[ii][0], face[ii][1], face[ii][2])
triangle.GetPointIds().SetId(ii, ii)
triangles = vtk.vtkCellArray()
triangles.InsertNextCell(triangle)
trianglePolyData = vtk.vtkPolyData()
trianglePolyData.SetPoints(points)
trianglePolyData.SetPolys(triangles)
mapper = vtk.vtkPolyDataMapper()
if vtk.VTK_MAJOR_VERSION <= 5:
mapper.SetInputConnection(trianglePolyData.GetProducerPort())
else:
mapper.SetInputData(trianglePolyData)
# mapper.SetInput(trianglePolyData)
ac = vtk.vtkActor()
ac.SetMapper(mapper)
ac.GetProperty().SetOpacity(opacity)
ac.GetProperty().SetColor(color)
self.ren.AddActor(ac)
elif False and len(face) == 4:
points = vtk.vtkPoints()
for ii in range(4):
points.InsertNextPoint(face[ii][0], face[ii][1], face[ii][2])
line1 = vtk.vtkLine()
line1.GetPointIds().SetId(0, 0)
line1.GetPointIds().SetId(1, 2)
line2 = vtk.vtkLine()
line2.GetPointIds().SetId(0, 3)
line2.GetPointIds().SetId(1, 1)
lines = vtk.vtkCellArray()
lines.InsertNextCell(line1)
lines.InsertNextCell(line2)
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetLines(lines)
ruledSurfaceFilter = vtk.vtkRuledSurfaceFilter()
ruledSurfaceFilter.SetInput(polydata)
ruledSurfaceFilter.SetResolution(15, 15)
ruledSurfaceFilter.SetRuledModeToResample()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(ruledSurfaceFilter.GetOutput())
ac = vtk.vtkActor()
ac.SetMapper(mapper)
ac.GetProperty().SetOpacity(opacity)
ac.GetProperty().SetColor(color)
self.ren.AddActor(ac)
elif len(face) > 3:
center = np.zeros(3, np.float)
for site in face:
center += site
center /= np.float(len(face))
for ii in range(len(face)):
points = vtk.vtkPoints()
triangle = vtk.vtkTriangle()
points.InsertNextPoint(face[ii][0], face[ii][1], face[ii][2])
ii2 = np.mod(ii+1, len(face))
points.InsertNextPoint(face[ii2][0], face[ii2][1], face[ii2][2])
points.InsertNextPoint(center[0], center[1], center[2])
for ii in range(3):
triangle.GetPointIds().SetId(ii, ii)
triangles = vtk.vtkCellArray()
triangles.InsertNextCell(triangle)
trianglePolyData = vtk.vtkPolyData()
trianglePolyData.SetPoints(points)
trianglePolyData.SetPolys(triangles)
mapper = vtk.vtkPolyDataMapper()
if vtk.VTK_MAJOR_VERSION <= 5:
mapper.SetInputConnection(trianglePolyData.GetProducerPort())
else:
mapper.SetInputData(trianglePolyData)
# mapper.SetInput(trianglePolyData)
ac = vtk.vtkActor()
ac.SetMapper(mapper)
ac.GetProperty().SetOpacity(opacity)
ac.GetProperty().SetColor(color)
self.ren.AddActor(ac)
else:
raise ValueError("Number of points for a face should be >= 3")
def add_edges(self, edges, type='line', linewidth=2, color=[0.0, 0.0, 0.0]):
points = vtk.vtkPoints()
lines = vtk.vtkCellArray()
for iedge, edge in enumerate(edges):
points.InsertPoint(2*iedge, edge[0])
points.InsertPoint(2*iedge + 1, edge[1])
lines.InsertNextCell(2)
lines.InsertCellPoint(2*iedge)
lines.InsertCellPoint(2*iedge + 1)
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetLines(lines)
mapper = vtk.vtkPolyDataMapper()
if vtk.VTK_MAJOR_VERSION <= 5:
mapper.SetInputConnection(polydata.GetProducerPort())
else:
mapper.SetInputData(polydata)
# mapper.SetInput(polydata)
ac = vtk.vtkActor()
ac.SetMapper(mapper)
ac.GetProperty().SetColor(color)
ac.GetProperty().SetLineWidth(linewidth)
self.ren.AddActor(ac)
def add_bonds(self, neighbors, center, color=None, opacity=None,
radius=0.1):
"""
Adds bonds for a site.
Args:
neighbors: Neighbors of the site.
center: The site in the center for all bonds.
color: Color of the tubes representing the bonds
opacity: Opacity of the tubes representing the bonds
radius: Radius of tube s representing the bonds
"""
points = vtk.vtkPoints()
points.InsertPoint(0, center.x, center.y, center.z)
n = len(neighbors)
lines = vtk.vtkCellArray()
for i in range(n):
points.InsertPoint(i + 1, neighbors[i].coords)
lines.InsertNextCell(2)
lines.InsertCellPoint(0)
lines.InsertCellPoint(i + 1)
pd = vtk.vtkPolyData()
pd.SetPoints(points)
pd.SetLines(lines)
tube = vtk.vtkTubeFilter()
if vtk.VTK_MAJOR_VERSION <= 5:
tube.SetInputConnection(pd.GetProducerPort())
else:
tube.SetInputData(pd)
tube.SetRadius(radius)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(tube.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
if opacity is not None:
actor.GetProperty().SetOpacity(opacity)
if color is not None:
actor.GetProperty().SetColor(color)
self.ren.AddActor(actor)
def add_picker_fixed(self):
# Create a cell picker.
picker = vtk.vtkCellPicker()
# Create a Python function to create the text for the text mapper used
# to display the results of picking.
def annotate_pick(obj, event):
if picker.GetCellId() < 0 and not self.show_help:
self.helptxt_actor.VisibilityOff()
else:
mapper = picker.GetMapper()
if mapper in self.mapper_map:
output = []
for site in self.mapper_map[mapper]:
row = ["{} - ".format(site.species_string),
", ".join(["{:.3f}".format(c)
for c in site.frac_coords]),
"[" + ", ".join(["{:.3f}".format(c)
for c in site.coords]) +
"]"]
output.append("".join(row))
self.helptxt_mapper.SetInput("\n".join(output))
self.helptxt_actor.SetPosition(10, 10)
self.helptxt_actor.VisibilityOn()
self.show_help = False
self.picker = picker
picker.AddObserver("EndPickEvent", annotate_pick)
self.iren.SetPicker(picker)
def add_picker(self):
# Create a cell picker.
picker = vtk.vtkCellPicker()
# Create a Python function to create the text for the text mapper used
# to display the results of picking.
source = vtk.vtkVectorText()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
follower = vtk.vtkFollower()
follower.SetMapper(mapper)
follower.GetProperty().SetColor((0, 0, 0))
follower.SetScale(0.2)
self.ren.AddActor(follower)
follower.SetCamera(self.ren.GetActiveCamera())
follower.VisibilityOff()
def annotate_pick(obj, event):
if picker.GetCellId() < 0:
follower.VisibilityOff()
else:
pick_pos = picker.GetPickPosition()
mapper = picker.GetMapper()
if mapper in self.mapper_map:
site = self.mapper_map[mapper]
output = [site.species_string, "Frac. coords: " +
" ".join(["{:.4f}".format(c)
for c in
site.frac_coords])]
source.SetText("\n".join(output))
follower.SetPosition(pick_pos)
follower.VisibilityOn()
picker.AddObserver("EndPickEvent", annotate_pick)
self.picker = picker
self.iren.SetPicker(picker)
class StructureInteractorStyle(vtkInteractorStyleTrackballCamera):
"""
A custom interactor style for visualizing structures.
"""
def __init__(self, parent):
self.parent = parent
self.AddObserver("LeftButtonPressEvent", self.leftButtonPressEvent)
self.AddObserver("MouseMoveEvent", self.mouseMoveEvent)
self.AddObserver("LeftButtonReleaseEvent", self.leftButtonReleaseEvent)
self.AddObserver("KeyPressEvent", self.keyPressEvent)
def leftButtonPressEvent(self, obj, event):
self.mouse_motion = 0
self.OnLeftButtonDown()
return
def mouseMoveEvent(self, obj, event):
self.mouse_motion = 1
self.OnMouseMove()
return
def leftButtonReleaseEvent(self, obj, event):
ren = obj.GetCurrentRenderer()
iren = ren.GetRenderWindow().GetInteractor()
if self.mouse_motion == 0:
pos = iren.GetEventPosition()
iren.GetPicker().Pick(pos[0], pos[1], 0, ren)
self.OnLeftButtonUp()
return
def keyPressEvent(self, obj, event):
parent = obj.GetCurrentRenderer().parent
sym = parent.iren.GetKeySym()
if sym in "ABCabc":
if sym == "A":
parent.supercell[0][0] += 1
elif sym == "B":
parent.supercell[1][1] += 1
elif sym == "C":
parent.supercell[2][2] += 1
elif sym == "a":
parent.supercell[0][0] = max(parent.supercell[0][0] - 1, 1)
elif sym == "b":
parent.supercell[1][1] = max(parent.supercell[1][1] - 1, 1)
elif sym == "c":
parent.supercell[2][2] = max(parent.supercell[2][2] - 1, 1)
parent.redraw()
elif sym == "numbersign":
parent.show_polyhedron = not parent.show_polyhedron
parent.redraw()
elif sym == "minus":
parent.show_bonds = not parent.show_bonds
parent.redraw()
elif sym == "bracketleft":
parent.poly_radii_tol_factor -= 0.05 \
if parent.poly_radii_tol_factor > 0 else 0
parent.redraw()
elif sym == "bracketright":
parent.poly_radii_tol_factor += 0.05
parent.redraw()
elif sym == "h":
parent.show_help = not parent.show_help
parent.redraw()
elif sym == "r":
parent.redraw(True)
elif sym == "s":
parent.write_image("image.png")
elif sym == "Up":
parent.rotate_view(1, 90)
elif sym == "Down":
parent.rotate_view(1, -90)
elif sym == "Left":
parent.rotate_view(0, -90)
elif sym == "Right":
parent.rotate_view(0, 90)
elif sym == "o":
parent.orthongonalize_structure()
parent.redraw()
self.OnKeyPress()
def make_movie(structures, output_filename="movie.mp4", zoom=1.0, fps=20,
bitrate="10000k", quality=1, **kwargs):
"""
Generate a movie from a sequence of structures using vtk and ffmpeg.
Args:
structures ([Structure]): sequence of structures
output_filename (str): filename for structure output. defaults to
movie.mp4
zoom (float): A zoom to be applied to the visualizer. Defaults to 1.0.
fps (int): Frames per second for the movie. Defaults to 20.
bitrate (str): Video bitate. Defaults to "10000k" (fairly high
quality).
quality (int): A quality scale. Defaults to 1.
\\*\\*kwargs: Any kwargs supported by StructureVis to modify the images
generated.
"""
vis = StructureVis(**kwargs)
vis.show_help = False
vis.redraw()
vis.zoom(zoom)
sigfig = int(math.floor(math.log10(len(structures))) + 1)
filename = "image{0:0" + str(sigfig) + "d}.png"
for i, s in enumerate(structures):
vis.set_structure(s)
vis.write_image(filename.format(i), 3)
filename = "image%0" + str(sigfig) + "d.png"
args = ["ffmpeg", "-y", "-i", filename,
"-q:v", str(quality), "-r", str(fps), "-b:v", str(bitrate),
output_filename]
subprocess.Popen(args)
class MultiStructuresVis(StructureVis):
DEFAULT_ANIMATED_MOVIE_OPTIONS = {'time_between_frames': 0.1,
'looping_type': 'restart',
'number_of_loops': 1,
'time_between_loops': 1.0}
def __init__(self, element_color_mapping=None, show_unit_cell=True,
show_bonds=False, show_polyhedron=False,
poly_radii_tol_factor=0.5, excluded_bonding_elements=None,
animated_movie_options=DEFAULT_ANIMATED_MOVIE_OPTIONS):
super(MultiStructuresVis, self).__init__(element_color_mapping=element_color_mapping,
show_unit_cell=show_unit_cell,
show_bonds=show_bonds, show_polyhedron=show_polyhedron,
poly_radii_tol_factor=poly_radii_tol_factor,
excluded_bonding_elements=excluded_bonding_elements)
self.warningtxt_actor = vtk.vtkActor2D()
self.infotxt_actor = vtk.vtkActor2D()
self.structures = None
style = MultiStructuresInteractorStyle(self)
self.iren.SetInteractorStyle(style)
self.istruct = 0
self.current_structure = None
self.set_animated_movie_options(animated_movie_options=animated_movie_options)
def set_structures(self, structures, tags=None):
self.structures = structures
self.istruct = 0
self.current_structure = self.structures[self.istruct]
self.tags = tags if tags is not None else []
self.all_radii = []
self.all_vis_radii = []
for struct in self.structures:
struct_radii = []
struct_vis_radii = []
for site in struct:
radius = 0
for specie, occu in site.species_and_occu.items():
radius += occu * (specie.ionic_radius
if isinstance(specie, Specie)
and specie.ionic_radius
else specie.average_ionic_radius)
vis_radius = 0.2 + 0.002 * radius
struct_radii.append(radius)
struct_vis_radii.append(vis_radius)
self.all_radii.append(struct_radii)
self.all_vis_radii.append(struct_vis_radii)
self.set_structure(self.current_structure, reset_camera=True, to_unit_cell=False)
def set_structure(self, structure, reset_camera=True, to_unit_cell=False):
super(MultiStructuresVis, self).set_structure(structure=structure, reset_camera=reset_camera,
to_unit_cell=to_unit_cell)
self.apply_tags()
def apply_tags(self):
tags = {}
for tag in self.tags:
istruct = tag.get('istruct', 'all')
if istruct != 'all':
if istruct != self.istruct:
continue
site_index = tag['site_index']
color = tag.get('color', [0.5, 0.5, 0.5])
opacity = tag.get('opacity', 0.5)
if site_index == 'unit_cell_all':
struct_radii = self.all_vis_radii[self.istruct]
for isite, site in enumerate(self.current_structure):
vis_radius = 1.5 * tag.get('radius', struct_radii[isite])
tags[(isite, (0, 0, 0))] = {'radius': vis_radius,
'color': color,
'opacity': opacity}
continue
cell_index = tag['cell_index']
if 'radius' in tag:
vis_radius = tag['radius']
elif 'radius_factor' in tag:
vis_radius = tag['radius_factor'] * self.all_vis_radii[self.istruct][site_index]
else:
vis_radius = 1.5 * self.all_vis_radii[self.istruct][site_index]
tags[(site_index, cell_index)] = {'radius': vis_radius,
'color': color,
'opacity': opacity}
for site_and_cell_index, tag_style in tags.items():
isite, cell_index = site_and_cell_index
site = self.current_structure[isite]
if cell_index == (0, 0, 0):
coords = site.coords
else:
fcoords = site.frac_coords + np.array(cell_index)
site_image = PeriodicSite(site.species_and_occu, fcoords,
self.current_structure.lattice, to_unit_cell=False,
coords_are_cartesian=False,
properties=site.properties)
self.add_site(site_image)
coords = site_image.coords
vis_radius = tag_style['radius']
color = tag_style['color']
opacity = tag_style['opacity']
self.add_partial_sphere(coords=coords, radius=vis_radius,
color=color, start=0, end=360,
opacity=opacity)
def set_animated_movie_options(self, animated_movie_options=None):
if animated_movie_options is None:
self.animated_movie_options = self.DEFAULT_ANIMATED_MOVIE_OPTIONS.copy()
else:
self.animated_movie_options = self.DEFAULT_ANIMATED_MOVIE_OPTIONS.copy()
for key in animated_movie_options:
if key not in self.DEFAULT_ANIMATED_MOVIE_OPTIONS.keys():
raise ValueError('Wrong option for animated movie')
self.animated_movie_options.update(animated_movie_options)
def display_help(self):
"""
Display the help for various keyboard shortcuts.
"""
helptxt = ["h : Toggle help",
"A/a, B/b or C/c : Increase/decrease cell by one a,"
" b or c unit vector", "# : Toggle showing of polyhedrons",
"-: Toggle showing of bonds", "r : Reset camera direction",
"[/]: Decrease or increase poly_radii_tol_factor "
"by 0.05. Value = " + str(self.poly_radii_tol_factor),
"Up/Down: Rotate view along Up direction by 90 "
"clockwise/anticlockwise",
"Left/right: Rotate view along camera direction by "
"90 clockwise/anticlockwise", "s: Save view to image.png",
"o: Orthogonalize structure",
"n: Move to next structure",
"p: Move to previous structure",
"m: Animated movie of the structures"]
self.helptxt_mapper.SetInput("\n".join(helptxt))
self.helptxt_actor.SetPosition(10, 10)
self.helptxt_actor.VisibilityOn()
def display_warning(self, warning):
self.warningtxt_mapper = vtk.vtkTextMapper()
tprops = self.warningtxt_mapper.GetTextProperty()
tprops.SetFontSize(14)
tprops.SetFontFamilyToTimes()
tprops.SetColor(1, 0, 0)
tprops.BoldOn()
tprops.SetJustificationToRight()
self.warningtxt = "WARNING : {}".format(warning)
self.warningtxt_actor = vtk.vtkActor2D()
self.warningtxt_actor.VisibilityOn()
self.warningtxt_actor.SetMapper(self.warningtxt_mapper)
self.ren.AddActor(self.warningtxt_actor)
self.warningtxt_mapper.SetInput(self.warningtxt)
winsize = self.ren_win.GetSize()
self.warningtxt_actor.SetPosition(winsize[0]-10, 10)
self.warningtxt_actor.VisibilityOn()
def erase_warning(self):
self.warningtxt_actor.VisibilityOff()
def display_info(self, info):
self.infotxt_mapper = vtk.vtkTextMapper()
tprops = self.infotxt_mapper.GetTextProperty()
tprops.SetFontSize(14)
tprops.SetFontFamilyToTimes()
tprops.SetColor(0, 0, 1)
tprops.BoldOn()
tprops.SetVerticalJustificationToTop()
self.infotxt = "INFO : {}".format(info)
self.infotxt_actor = vtk.vtkActor2D()
self.infotxt_actor.VisibilityOn()
self.infotxt_actor.SetMapper(self.infotxt_mapper)
self.ren.AddActor(self.infotxt_actor)
self.infotxt_mapper.SetInput(self.infotxt)
winsize = self.ren_win.GetSize()
self.infotxt_actor.SetPosition(10, winsize[1]-10)
self.infotxt_actor.VisibilityOn()
def erase_info(self):
self.infotxt_actor.VisibilityOff()
class MultiStructuresInteractorStyle(StructureInteractorStyle):
def __init__(self, parent):
StructureInteractorStyle.__init__(self, parent=parent)
def keyPressEvent(self, obj, event):
parent = obj.GetCurrentRenderer().parent
sym = parent.iren.GetKeySym()
if sym == "n":
if parent.istruct == len(parent.structures) - 1:
parent.display_warning('LAST STRUCTURE')
parent.ren_win.Render()
else:
parent.istruct += 1
parent.current_structure = parent.structures[parent.istruct]
parent.set_structure(parent.current_structure, reset_camera=False, to_unit_cell=False)
parent.erase_warning()
parent.ren_win.Render()
elif sym == "p":
if parent.istruct == 0:
parent.display_warning('FIRST STRUCTURE')
parent.ren_win.Render()
else:
parent.istruct -= 1
parent.current_structure = parent.structures[parent.istruct]
parent.set_structure(parent.current_structure, reset_camera=False, to_unit_cell=False)
parent.erase_warning()
parent.ren_win.Render()
elif sym == "m":
parent.istruct = 0
parent.current_structure = parent.structures[parent.istruct]
parent.set_structure(parent.current_structure, reset_camera=False, to_unit_cell=False)
parent.erase_warning()
parent.ren_win.Render()
nloops = parent.animated_movie_options['number_of_loops']
tstep = parent.animated_movie_options['time_between_frames']
tloops = parent.animated_movie_options['time_between_loops']
if parent.animated_movie_options['looping_type'] == 'restart':
loop_istructs = range(len(parent.structures))
elif parent.animated_movie_options['looping_type'] == 'palindrome':
loop_istructs = range(len(parent.structures))+range(len(parent.structures)-2, -1, -1)
else:
raise ValueError('"looping_type" should be "restart" or "palindrome"')
for iloop in range(nloops):
for istruct in loop_istructs:
time.sleep(tstep)
parent.istruct = istruct
parent.current_structure = parent.structures[parent.istruct]
parent.set_structure(parent.current_structure, reset_camera=False, to_unit_cell=False)
parent.display_info('Animated movie : structure {:d}/{:d} '
'(loop {:d}/{:d})'.format(istruct+1, len(parent.structures),
iloop+1, nloops))
parent.ren_win.Render()
time.sleep(tloops)
parent.erase_info()
parent.display_info('Ended animated movie ...')
parent.ren_win.Render()
StructureInteractorStyle.keyPressEvent(self, obj, event)
| |
import struct
import time
from .bitcoin import Bitcoin
class SerializeDataTooShort(Exception):
pass
class InvalidNetworkMagic(Exception):
pass
class InvalidCommandEncoding(Exception):
pass
class MessageChecksumFailure(Exception):
pass
class Serialize:
@staticmethod
def serialize_variable_int(i):
if i < 0xfd:
return struct.pack("B", i)
if i <= 0xffff:
return struct.pack("<BH", 0xfd, i)
if i <= 0xffffffff:
return struct.pack("<BL", 0xfe, i)
return struct.pack("<BQ", 0xff, i)
@staticmethod
def serialize_variable_int_size(i):
if i < 0xfd:
return 1
if i <= 0xffff:
return 3
if i <= 0xffffffff:
return 5
return 9
@staticmethod
def unserialize_variable_int(data):
if len(data) == 0:
raise SerializeDataTooShort()
i = data[0]
if i < 0xfd:
return i, data[1:]
elif i == 0xfd:
if len(data) < 3:
raise SerializeDataTooShort()
return struct.unpack("<H", data[1:3])[0], data[3:]
elif i == 0xfe:
if len(data) < 5:
raise SerializeDataTooShort()
return struct.unpack("<L", data[1:5])[0], data[5:]
else:
if len(data) < 9:
raise SerializeDataTooShort()
return struct.unpack("<Q", data[1:9])[0], data[9:]
@staticmethod
def serialize_bytes(b):
length = Serialize.serialize_variable_int(len(b))
return length + b
@staticmethod
def unserialize_bytes(data):
length, data = Serialize.unserialize_variable_int(data)
b = data[:length]
return b, data[length:]
@staticmethod
def serialize_string(s):
return Serialize.serialize_bytes(s.encode('utf8'))
@staticmethod
def unserialize_string(data):
s, data = Serialize.unserialize_bytes(data)
return s.decode('utf8'), data
@staticmethod
def serialize_object(o):
if isinstance(o, int):
return b'v' + Serialize.serialize_variable_int(o)
elif isinstance(o, bytes):
return b'b' + Serialize.serialize_bytes(o)
elif isinstance(o, str):
return b's' + Serialize.serialize_string(o)
elif isinstance(o, list):
return b'l' + Serialize.serialize_list(o)
elif isinstance(o, dict):
return b'd' + Serialize.serialize_dict(o)
@staticmethod
def unserialize_object(data):
t = bytes([data[0]])
if t == b'v':
return Serialize.unserialize_variable_int(data[1:])
elif t == b'b':
return Serialize.unserialize_bytes(data[1:])
elif t == b's':
return Serialize.unserialize_string(data[1:])
elif t == b'l':
return Serialize.unserialize_list(data[1:])
elif t == b'd':
return Serialize.unserialize_dict(data[1:])
@staticmethod
def serialize_list(items):
count = Serialize.serialize_variable_int(len(items))
r = []
for item in items:
r.append(Serialize.serialize_object(item))
return count + b''.join(r)
@staticmethod
def unserialize_list(data):
count, data = Serialize.unserialize_variable_int(data)
r = []
for _ in range(count):
item, data = Serialize.unserialize_object(data)
r.append(item)
return r, data
@staticmethod
def serialize_dict(d):
count = Serialize.serialize_variable_int(len(d.keys()))
r = []
for k in d.keys():
r.append(Serialize.serialize_object(k))
r.append(Serialize.serialize_object(d[k]))
return count + b''.join(r)
@staticmethod
def unserialize_dict(data):
count, data = Serialize.unserialize_variable_int(data)
r = {}
for _ in range(count):
k, data = Serialize.unserialize_object(data)
v, data = Serialize.unserialize_object(data)
r[k] = v
return r, data
@staticmethod
def serialize_network_address(address, services, with_timestamp=True):
if address is not None:
quads = address[0].split(".")
address = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, int(quads[0]), int(quads[1]), int(quads[2]), int(quads[3])])
port = struct.pack(">H", address[1])
else:
address = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0, 0, 0, 0])
port = bytes([0, 0])
if with_timestamp:
return struct.pack("<LQ", int(time.time()), services) + address + port
else:
return struct.pack("<Q", services) + address + port
@staticmethod
def unserialize_network_address(data, with_timestamp=True):
if with_timestamp and len(data) < 30:
raise MessageTooShort()
elif not with_timestamp and len(data) < 26:
raise MessageTooShort()
if with_timestamp:
when, services = struct.unpack("<LQ", data[:12])
data = data[12:]
else:
services = struct.unpack("<Q", data[:8])[0]
data = data[8:]
address = data[:16]
port = struct.unpack(">H", data[16:18])[0]
if address[0:-4] == bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff]):
address = '.'.join('{}'.format(v) for v in address[-4:])
data = data[18:]
if with_timestamp:
return ((address, port), services, when, data)
else:
return ((address, port), services, data)
@staticmethod
def wrap_network_message(coin, command, payload):
magic = coin.NETWORK_MAGIC
command = command[:12].encode("ascii")
command += bytes([0] * (12 - len(command))) # pad to 12 bytes
length = struct.pack("<L", len(payload))
checksum = Bitcoin.hash(payload)[:4] # Checksum is first 4 bytes
return magic + command + length + checksum + payload
@staticmethod
def unwrap_network_message(coin, data):
'''a message is only complete if the 2nd return value is not None. We return
command and length so that the network can decide to drop an unresponsive or
misbehaving peer'''
if len(data) < 24:
return None, None, None, data
magic = data[:4]
if magic != coin.NETWORK_MAGIC:
raise InvalidNetworkMagic()
i = 0
while data[4+i] != 0 and i < 12:
i += 1
try:
command = data[4:4+i].decode('ascii')
except UnicodeDecodeError:
raise InvalidCommandEncoding()
length = struct.unpack("<L", data[16:20])[0]
checksum = data[20:24]
if (len(data) - 24) < length:
return command, None, length, data
payload = data[24:24+length]
leftover = data[24+length:]
hash = Bitcoin.hash(payload)
if hash[:4] != checksum:
raise MessageChecksumFailure()
return command, payload, length, leftover
| |
# Copyright 2013 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.networks import workflows \
as network_workflows
LOG = logging.getLogger(__name__)
class CreateSubnetInfoAction(network_workflows.CreateSubnetInfoAction):
with_subnet = forms.BooleanField(initial=True, required=False,
widget=forms.HiddenInput())
msg = _('Specify "Network Address"')
class Meta:
name = _("Subnet")
help_text = _('Create a subnet associated with the network. '
'Advanced configuration is available by clicking on the '
'"Subnet Details" tab.')
def clean(self):
cleaned_data = workflows.Action.clean(self)
self._check_subnet_data(cleaned_data)
return cleaned_data
class CreateSubnetInfo(network_workflows.CreateSubnetInfo):
action_class = CreateSubnetInfoAction
depends_on = ("network_id",)
class CreateSubnet(network_workflows.CreateNetwork):
slug = "create_subnet"
name = _("Create Subnet")
finalize_button_name = _("Create")
success_message = _('Created subnet "%s".')
failure_message = _('Unable to create subnet "%s".')
default_steps = (CreateSubnetInfo,
network_workflows.CreateSubnetDetail)
def format_status_message(self, message):
name = self.context.get('subnet_name') or self.context.get('subnet_id')
return message % name
def get_success_url(self):
return reverse("horizon:project:networks:detail",
args=(self.context.get('network_id'),))
def get_failure_url(self):
return reverse("horizon:project:networks:detail",
args=(self.context.get('network_id'),))
def handle(self, request, data):
subnet = self._create_subnet(request, data)
return True if subnet else False
class UpdateSubnetInfoAction(CreateSubnetInfoAction):
cidr = forms.IPField(label=_("Network Address"),
required=False,
initial="",
widget=forms.TextInput(
attrs={'readonly': 'readonly'}),
help_text=_("Network address in CIDR format "
"(e.g. 192.168.0.0/24)"),
version=forms.IPv4 | forms.IPv6,
mask=True)
# NOTE(amotoki): When 'disabled' attribute is set for the ChoiceField
# and ValidationError is raised for POST request, the initial value of
# the ip_version ChoiceField is not set in the re-displayed form
# As a result, 'IPv4' is displayed even when IPv6 is used if
# ValidationError is detected. In addition 'required=True' check complains
# when re-POST since the value of the ChoiceField is not set.
# Thus now I use HiddenInput for the ip_version ChoiceField as a work
# around.
ip_version = forms.ChoiceField(choices=[(4, 'IPv4'), (6, 'IPv6')],
widget=forms.HiddenInput(),
label=_("IP Version"))
gateway_ip = forms.IPField(
label=_("Gateway IP (optional)"),
required=False,
initial="",
help_text=_("IP address of Gateway (e.g. 192.168.0.254). "
"Specify an explicit address to set the gateway. "
"If you do not want to use a gateway, "
"check 'Disable Gateway' below."),
version=forms.IPv4 | forms.IPv6,
mask=False)
no_gateway = forms.BooleanField(label=_("Disable Gateway"),
initial=False, required=False)
class Meta:
name = _("Subnet")
help_text = _('Update a subnet associated with the network. '
'Advanced configuration are available at '
'"Subnet Details" tab.')
def clean(self):
cleaned_data = workflows.Action.clean(self)
self._check_subnet_data(cleaned_data, is_create=False)
return cleaned_data
class UpdateSubnetInfo(CreateSubnetInfo):
action_class = UpdateSubnetInfoAction
depends_on = ("network_id", "subnet_id")
class UpdateSubnetDetailAction(network_workflows.CreateSubnetDetailAction):
allocation_pools = forms.CharField(widget=forms.HiddenInput(),
required=False)
def __init__(self, request, context, *args, **kwargs):
super(UpdateSubnetDetailAction, self).__init__(request, context,
*args, **kwargs)
# TODO(amotoki): Due to Neutron bug 1362966, we cannot pass "None"
# to Neutron. It means we cannot set IPv6 two modes to
# "No option selected".
# Until bug 1362966 is fixed, we disable this field.
# if context['ip_version'] != 6:
# self.fields['ipv6_modes'].widget = forms.HiddenInput()
# self.fields['ipv6_modes'].required = False
self.fields['ipv6_modes'].widget = forms.HiddenInput()
self.fields['ipv6_modes'].required = False
class Meta:
name = _("Subnet Details")
help_text = _('Specify additional attributes for the subnet.')
class UpdateSubnetDetail(network_workflows.CreateSubnetDetail):
action_class = UpdateSubnetDetailAction
class UpdateSubnet(network_workflows.CreateNetwork):
slug = "update_subnet"
name = _("Edit Subnet")
finalize_button_name = _("Save")
success_message = _('Updated subnet "%s".')
failure_message = _('Unable to update subnet "%s".')
success_url = "horizon:project:networks:detail"
failure_url = "horizon:project:networks:detail"
default_steps = (UpdateSubnetInfo,
UpdateSubnetDetail)
def format_status_message(self, message):
name = self.context.get('subnet_name') or self.context.get('subnet_id')
return message % name
def get_success_url(self):
return reverse(self.success_url,
args=(self.context.get('network_id'),))
def _update_subnet(self, request, data):
network_id = self.context.get('network_id')
try:
subnet_id = self.context.get('subnet_id')
params = {}
params['name'] = data['subnet_name']
if data['no_gateway']:
params['gateway_ip'] = None
elif data['gateway_ip']:
params['gateway_ip'] = data['gateway_ip']
# We should send gateway_ip only when it is changed, because
# updating gateway_ip is prohibited when the ip is used.
# See bug 1227268.
subnet = api.neutron.subnet_get(request, subnet_id)
if params['gateway_ip'] == subnet.gateway_ip:
del params['gateway_ip']
self._setup_subnet_parameters(params, data, is_create=False)
subnet = api.neutron.subnet_update(request, subnet_id, **params)
msg = _('Subnet "%s" was successfully updated.') % data['cidr']
LOG.debug(msg)
return subnet
except Exception as e:
msg = (_('Failed to update subnet "%(sub)s": '
' %(reason)s') %
{"sub": data['cidr'], "reason": e})
redirect = reverse(self.failure_url, args=(network_id,))
exceptions.handle(request, msg, redirect=redirect)
return False
def handle(self, request, data):
subnet = self._update_subnet(request, data)
return True if subnet else False
| |
# Copyright (c) 2012 Citrix Systems, Inc.
# Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for host-related functions (start, reboot, etc).
"""
import re
from oslo.config import cfg
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import exception
from nova.i18n import _
from nova import objects
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.pci import pci_whitelist
from nova.virt.xenapi import pool_states
from nova.virt.xenapi import vm_utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class Host(object):
"""Implements host related operations."""
def __init__(self, session, virtapi):
self._session = session
self._virtapi = virtapi
def host_power_action(self, _host, action):
"""Reboots or shuts down the host."""
args = {"action": jsonutils.dumps(action)}
methods = {"reboot": "host_reboot", "shutdown": "host_shutdown"}
response = call_xenhost(self._session, methods[action], args)
return response.get("power_action", response)
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
if not mode:
return 'off_maintenance'
host_list = [host_ref for host_ref in
self._session.host.get_all()
if host_ref != self._session.host_ref]
migrations_counter = vm_counter = 0
ctxt = context.get_admin_context()
for vm_ref, vm_rec in vm_utils.list_vms(self._session):
for host_ref in host_list:
try:
# Ensure only guest instances are migrated
uuid = vm_rec['other_config'].get('nova_uuid')
if not uuid:
name = vm_rec['name_label']
uuid = _uuid_find(ctxt, host, name)
if not uuid:
LOG.info(_('Instance %(name)s running on %(host)s'
' could not be found in the database:'
' assuming it is a worker VM and skip'
' ping migration to a new host'),
{'name': name, 'host': host})
continue
instance = objects.Instance.get_by_uuid(ctxt, uuid)
vm_counter = vm_counter + 1
aggregate = objects.AggregateList.get_by_host(
ctxt, host, key=pool_states.POOL_FLAG)
if not aggregate:
msg = _('Aggregate for host %(host)s count not be'
' found.') % dict(host=host)
raise exception.NotFound(msg)
dest = _host_find(ctxt, self._session, aggregate[0],
host_ref)
instance.host = dest
instance.task_state = task_states.MIGRATING
instance.save()
self._session.VM.pool_migrate(vm_ref, host_ref,
{"live": "true"})
migrations_counter = migrations_counter + 1
instance.vm_state = vm_states.ACTIVE
instance.save()
break
except self._session.XenAPI.Failure:
LOG.exception(_('Unable to migrate VM %(vm_ref)s '
'from %(host)s'),
{'vm_ref': vm_ref, 'host': host})
instance.host = host
instance.vm_state = vm_states.ACTIVE
instance.save()
if vm_counter == migrations_counter:
return 'on_maintenance'
else:
raise exception.NoValidHost(reason='Unable to find suitable '
'host for VMs evacuation')
def set_host_enabled(self, enabled):
"""Sets the compute host's ability to accept new instances."""
# Since capabilities are gone, use service table to disable a node
# in scheduler
cntxt = context.get_admin_context()
service = objects.Service.get_by_args(cntxt, CONF.host,
'nova-compute')
service.disabled = not enabled
service.disabled_reason = 'set by xenapi host_state'
service.save()
args = {"enabled": jsonutils.dumps(enabled)}
response = call_xenhost(self._session, "set_host_enabled", args)
return response.get("status", response)
def get_host_uptime(self, _host):
"""Returns the result of calling "uptime" on the target host."""
response = call_xenhost(self._session, "host_uptime", {})
return response.get("uptime", response)
class HostState(object):
"""Manages information about the XenServer host this compute
node is running on.
"""
def __init__(self, session):
super(HostState, self).__init__()
self._session = session
self._stats = {}
self._pci_device_filter = pci_whitelist.get_pci_devices_filter()
self.update_status()
def _get_passthrough_devices(self):
"""Get a list pci devices that are available for pci passthtough.
We use a plugin to get the output of the lspci command runs on dom0.
From this list we will extract pci devices that are using the pciback
kernel driver. Then we compare this list to the pci whitelist to get
a new list of pci devices that can be used for pci passthrough.
:returns: a list of pci devices available for pci passthrough.
"""
def _compile_hex(pattern):
"""Return a compiled regular expression pattern into which we have
replaced occurrences of hex by [\da-fA-F].
"""
return re.compile(pattern.replace("hex", r"[\da-fA-F]"))
def _parse_pci_device_string(dev_string):
"""Exctract information from the device string about the slot, the
vendor and the product ID. The string is as follow:
"Slot:\tBDF\nClass:\txxxx\nVendor:\txxxx\nDevice:\txxxx\n..."
Return a dictionary with informations about the device.
"""
slot_regex = _compile_hex(r"Slot:\t"
r"((?:hex{4}:)?" # Domain: (optional)
r"hex{2}:" # Bus:
r"hex{2}\." # Device.
r"hex{1})") # Function
vendor_regex = _compile_hex(r"\nVendor:\t(hex+)")
product_regex = _compile_hex(r"\nDevice:\t(hex+)")
slot_id = slot_regex.findall(dev_string)
vendor_id = vendor_regex.findall(dev_string)
product_id = product_regex.findall(dev_string)
if not slot_id or not vendor_id or not product_id:
raise exception.NovaException(
_("Failed to parse information about"
" a pci device for passthrough"))
type_pci = self._session.call_plugin_serialized(
'xenhost', 'get_pci_type', slot_id[0])
return {'label': '_'.join(['label',
vendor_id[0],
product_id[0]]),
'vendor_id': vendor_id[0],
'product_id': product_id[0],
'address': slot_id[0],
'dev_id': '_'.join(['pci', slot_id[0]]),
'dev_type': type_pci,
'status': 'available'}
# Devices are separated by a blank line. That is why we
# use "\n\n" as separator.
lspci_out = self._session.call_plugin_serialized(
'xenhost', 'get_pci_device_details')
pci_list = lspci_out.split("\n\n")
# For each device of the list, check if it uses the pciback
# kernel driver and if it does, get informations and add it
# to the list of passthrough_devices. Ignore it if the driver
# is not pciback.
passthrough_devices = []
for dev_string_info in pci_list:
if "Driver:\tpciback" in dev_string_info:
new_dev = _parse_pci_device_string(dev_string_info)
if self._pci_device_filter.device_assignable(new_dev):
passthrough_devices.append(new_dev)
return passthrough_devices
def get_host_stats(self, refresh=False):
"""Return the current state of the host. If 'refresh' is
True, run the update first.
"""
if refresh or not self._stats:
self.update_status()
return self._stats
def update_status(self):
"""Since under Xenserver, a compute node runs on a given host,
we can get host status information using xenapi.
"""
LOG.debug("Updating host stats")
data = call_xenhost(self._session, "host_data", {})
if data:
sr_ref = vm_utils.scan_default_sr(self._session)
sr_rec = self._session.SR.get_record(sr_ref)
total = int(sr_rec["physical_size"])
used = int(sr_rec["physical_utilisation"])
data["disk_total"] = total
data["disk_used"] = used
data["disk_allocated"] = int(sr_rec["virtual_allocation"])
data["disk_available"] = total - used
data["supported_instances"] = to_supported_instances(
data.get("host_capabilities")
)
host_memory = data.get('host_memory', None)
if host_memory:
data["host_memory_total"] = host_memory.get('total', 0)
data["host_memory_overhead"] = host_memory.get('overhead', 0)
data["host_memory_free"] = host_memory.get('free', 0)
data["host_memory_free_computed"] = host_memory.get(
'free-computed', 0)
del data['host_memory']
if (data['host_hostname'] !=
self._stats.get('host_hostname', data['host_hostname'])):
LOG.error(_('Hostname has changed from %(old)s '
'to %(new)s. A restart is required to take effect.'
) % {'old': self._stats['host_hostname'],
'new': data['host_hostname']})
data['host_hostname'] = self._stats['host_hostname']
data['hypervisor_hostname'] = data['host_hostname']
vcpus_used = 0
for vm_ref, vm_rec in vm_utils.list_vms(self._session):
vcpus_used = vcpus_used + int(vm_rec['VCPUs_max'])
data['vcpus_used'] = vcpus_used
data['pci_passthrough_devices'] = self._get_passthrough_devices()
self._stats = data
def to_supported_instances(host_capabilities):
if not host_capabilities:
return []
result = []
for capability in host_capabilities:
try:
ostype, _version, arch = capability.split("-")
result.append((arch, 'xapi', ostype))
except ValueError:
LOG.warning(
_("Failed to extract instance support from %s"), capability)
return result
def call_xenhost(session, method, arg_dict):
"""There will be several methods that will need this general
handling for interacting with the xenhost plugin, so this abstracts
out that behavior.
"""
# Create a task ID as something that won't match any instance ID
try:
result = session.call_plugin('xenhost', method, args=arg_dict)
if not result:
return ''
return jsonutils.loads(result)
except ValueError:
LOG.exception(_("Unable to get updated status"))
return None
except session.XenAPI.Failure as e:
LOG.error(_("The call to %(method)s returned "
"an error: %(e)s."), {'method': method, 'e': e})
return e.details[1]
def _uuid_find(context, host, name_label):
"""Return instance uuid by name_label."""
for i in objects.InstanceList.get_by_host(context, host):
if i.name == name_label:
return i.uuid
return None
def _host_find(context, session, src_aggregate, host_ref):
"""Return the host from the xenapi host reference.
:param src_aggregate: the aggregate that the compute host being put in
maintenance (source of VMs) belongs to
:param host_ref: the hypervisor host reference (destination of VMs)
:return: the compute host that manages host_ref
"""
# NOTE: this would be a lot simpler if nova-compute stored
# CONF.host in the XenServer host's other-config map.
# TODO(armando-migliaccio): improve according the note above
uuid = session.host.get_uuid(host_ref)
for compute_host, host_uuid in src_aggregate.metadetails.iteritems():
if host_uuid == uuid:
return compute_host
raise exception.NoValidHost(reason='Host %(host_uuid)s could not be found '
'from aggregate metadata: %(metadata)s.' %
{'host_uuid': uuid,
'metadata': src_aggregate.metadetails})
| |
#!/usr/bin/python
''' Richer Oled information.'''
import subprocess, time, logging, datetime, sys, threading
from weather import Weather
import keys
import importlib
# ToDo: Add scrolling, based on overflow from row0.
# Put this into a Timer.
### Display layout for tft
# ROWS 0 to 3 = Prog info
TITLE_ROW = 0 # for tft
TIMING_ROW = 7
NEXT_STATION_ROW = 8
#
# radio extras
# button labels
###
SCROLL_PAUSE = -5
INFOROWUPDATEPERIOD = 60
CLEANUPTIMEOUT = 5
class InfoDisplay(threading.Thread):
''' Richer info on the oled. '''
def __init__(self, testmode = False, scrolling = False):
self.logger = logging.getLogger(__name__)
threading.Thread.__init__(self, name='infodisplay')
self.Event = threading.Event()
self.logger.info("Starting InfoDisplay class")
self.chgvol_flag = False
self.vol_string = ' '
if keys.board not in ['oled4', 'oled2', 'lcd', 'uoled', 'tft', 'emulator']:
print 'Error: display type not recognised.'
sys.exit()
print 'Infodisplay board = ',keys.board
board = importlib.import_module(keys.board)
self.myScreen = board.Screen()
if keys.board == 'oled2':
self.myScreen.set_rowcount(2)
self.myWeather = Weather(keys.wunder, keys.locn)
self.myWeather.start()
self.ending = False
self.myScreen.start()
self.rowcount, self.rowlength = self.myScreen.info()
self.writerow(TITLE_ROW, 'Starting up...'.center(self.rowlength))
# self.update_info_row()
self.lasttime = 0
self.delta = 0.001
self.scroll_pointer = SCROLL_PAUSE
self.scroll_string = ' '
self.prog = 'Info test'
if testmode:
self.timer = 2
else:
self.timer = INFOROWUPDATEPERIOD
self.scrolling = scrolling
self.told = time.clock()
def cleanup(self):
self.ending = True # must be first line.
time.sleep(1) # these delays needed to get cleanup to work
self.Event.set() # stop the display updates
try:
if self.rowcount == 2:
if self.scrolling:
self.scrollt.cancel()
time.sleep(1)
self.scrollt.cancel()
else:
self.t.cancel() # cancel timer for update display
except:
print 'Scroll timer not started'
self.myWeather.Event.set() # send the stop signal
self.myWeather.join(CLEANUPTIMEOUT) # wait for thread to finish
if self.myWeather.is_alive(): # so we timed out
print 'Weather thread did not die'
self.myScreen.Event.set()
self.myScreen.join(CLEANUPTIMEOUT) # wait for thread to finish
if self.myScreen.is_alive(): # so we timed out
print 'Screen thread did not die'
self.logger.info('Finished infodisplay cleanup.')
def clear(self):
'''Clear screen.'''
self.myScreen.clear()
def writerow(self, row, string):
if row < self.rowcount:
self.myScreen.q.put([row, string]) # add to the queue
else:
print 'Trying to write to non-existent row:', row
def run(self):
print 'Starting infodisplay thread'
myevent = False
while not myevent:
self.update_display()
myevent = self.Event.wait(self.timer) # wait for this timeout or the flag being set.
print 'Infodisplay exiting.'
def update_display(self):
'''Update the whole display, including the prog info and the status line.'''
self._update_info_row()
if self.rowcount == 2:
if self.scrolling:
self._scroll(self.prog)
else:
self.myScreen.q.put([TITLE_ROW,self.prog[:self.rowlength]]) # just show one row
else:
self._show_prog_info(self.prog)
# if not self.ending:
# print 'refreshing display update with timer = ',self.timer
# self.t = threading.Timer(self.timer, self.update_display)
# self.t.start()
# self.t.name = 'update_display'
return(0)
def _update_info_row(self):
'''Time and temperature display on the info line = bottom row.
This now repeats itself courtesy of the Timer.'''
clock = time.strftime("%H:%M")
if self.chgvol_flag:
self.myScreen.write_radio_extras(self.vol_string, ' ', True)
else:
self.myScreen.write_radio_extras(clock, self.myWeather.wunder_temperature)
self.myScreen.write_button_labels(False, False)
return(0)
def _show_prog_info(self,string):
'''Display up to 2 rows from bottom of display of the program name and details.'''
retstr, string = self._find_station_name(string)
if retstr: # if the station is recognised.
self.myScreen.q.put([TITLE_ROW,retstr.center(self.rowlength)])
else:
self.myScreen.q.put([TITLE_ROW,string[:self.rowlength].ljust(self.rowlength)])
string = string[self.rowlength:]
for i in range(TITLE_ROW+1, self.myScreen.last_prog_row+1): # run through the rest of the rows.
string = self._process_next_row(i,string)
return(0)
def _process_next_row(self, row, string):
'''Called by show_prog_info to process all rows after first row.'''
if len(string) > 0:
if string[0] == ' ': # strip off any leading space.
string = string[1:]
if row == self.myScreen.last_prog_row:
self.myScreen.q.put([row,string[:self.rowlength].ljust(self.rowlength)])
else:
self.myScreen.q.put([row,string[:self.rowlength].ljust(self.rowlength)])
string = string[self.rowlength:]
else: # nothing left to show
if row < 4:
string = ' ' # blank the last rows
self.myScreen.q.put([row,string])
self.scroll_string = ' '
return(string)
def _find_station_name(self,string):
'''Called by show_prog_info. Decode the BBC station from the proginfo.'''
a = string.split()
if a[0] == 'BBC':
if a[3] == '6':
retstr = a[0]+' '+a[2]+a[3]+' '+a[4]
remainder = string[len(retstr)+4:] # trim off the station name.
elif a[4] == 'Extra': # BBC Radio 4 Extra
retstr = a[0]+' '+a[2]+a[3]+' '+a[4]
remainder = string[len(retstr)+4:] # trim off the station name.
else:
retstr = a[0]+' '+a[2]+a[3]
remainder = string[len(retstr)+4:] # trim off the station name.
return(retstr, remainder)
else:
return(False, string)
def show_timings(self, elapsed=0, maxelapsed=0):
'''Show time gone.'''
if ((elapsed - self.lasttime) > self.delta) or ((self.lasttime - elapsed) > self.delta):
self.myScreen.writerow(TIMING_ROW,'Now={0:4.2f}s Max={1:5.2f}s'.format(elapsed, maxelapsed))
self.lasttime = elapsed
return(0)
def _scroll(self):
self.told = time.clock()
# self._update_info_row()
if len(self.prog) > 10:
row = 0 # used to be 2
if self.scroll_pointer < 0:
self.myScreen.q.put([row, self.prog[0:self.rowlength]])
else:
self.myScreen.q.put([row, self.prog[self.scroll_pointer:self.scroll_pointer+self.rowlength]])
self.scroll_pointer += 1
if self.scroll_pointer > (len(self.prog)-15):
self.scroll_pointer = SCROLL_PAUSE
if not self.ending:
self.scrollt = threading.Timer(.5, self._scroll)
self.scrollt.start()
self.scrollt.name = 'scroll'
self.tstart = time.clock()
print 'Scroll time:',self.tstart - self.told
self.told = self.tstart
return(0)
def writelabels(self, next = False, stop = False):
'''Show the action labels on the screen.'''
self.logger.info('writelabels')
self.myScreen.write_button_labels(next, stop)
return(0)
if __name__ == "__main__":
logging.basicConfig(filename='./log/infodisplay.log',
filemode='w',
level=logging.INFO) #filemode means that we do not append anymore
# Default level is warning, level=logging.INFO log lots, level=logging.DEBUG log everything
logging.warning(datetime.datetime.now().strftime('%d %b %H:%M')+". Running infodisplay class as a standalone app")
print 'Infodisplay test'
myID = InfoDisplay(testmode = True)
# myID.update_display()
myID._scroll()
print 'Timer running'
string = ['String zero. Here is some text that is long enough to scroll.',
'String1. Lets put a lot of text here so that it wraps and need plenty to test scroll.',
'Strings 2.',
'Final string']
for i in range(2):
print 'String ',i
myID.prog = string[i]
time.sleep(20)
print 'cleaning up'
myID.cleanup()
print threading.enumerate()
print 'Main prog is finished.'
| |
"""
implementation of Moderna algorithm for detecting stackings
"""
import simplejson as json
from xml.dom import minidom
import sys, os, re
import numpy as np
import itertools
import string
import gzip
__email__ = "gchojnowski@genesilico.pl"
from operator import itemgetter
from string import strip
from cStringIO import StringIO
from Bio import PDB
from scipy.spatial import KDTree
from optparse import OptionParser
# *************************************************************************************************
# *************************************************************************************************
# *************************************************************************************************
# *************************************************************************************************
# Moderna stacking detector
#__author__ = "Magdalena Rother, Tomasz Puton, Kristian Rother"
#__copyright__ = "Copyright 2008, The Moderna Project"
#__credits__ = ["Janusz Bujnicki"]
#__license__ = "GPL"
#__version__ = "1.5.0"
#__maintainer__ = "Magdalena Rother"
#__email__ = "mmusiel@genesilico.pl"
#__status__ = "Production"
# ADAPTED for CCTBX objects from: StackingCalculator.py
# 27.06.2011 gchojnowski@genesilico.pl
#from numpy import array, add, cross, sqrt, arccos
#import math
# between purines and pyrimidines, the normals are reversed, because
# the rings are reversed with respect to the helix axis.
NORMAL_SUPPORT = {
'C':['N1','C2','N3','C4','C5','C6'],
'U':['N1','C2','N3','C4','C5','C6'],
'T':['N1','C2','N3','C4','C5','C6'],
'G':['N1','C2','C4','N3','C5','C6'],
'A':['N1','C2','C4','N3','C5','C6'],
}
STACKINGS = {
(True, True): '>>',
(True, False): '<<',
(False, False): '<>',
(False, True): '><',
}
ARCPI = 180.0/np.pi
"""
A procedure for calculating stacking of RNA nucleotides.
The definition of base stacking from Major & Lemieux
MC-Annotate paper (JMB 2001, 308, p.919ff):
"Stacking between two nitrogen bases is considered
if the distance between their rings is less
than 5.5 Ang., the angle between the two normals to
the base planes is inferior to 30 deg., and the angle
between the normal of one base plane and the vector
between the center of the rings from the two
bases is less than 40 deg."
There are two classes defined here:
- ResidueVector
- StackingCalculator
The latter class should be used for calculating stacking. There are two
public methods inside StackingCalculator class that can be used
for calculating stacking:
- process_pdbfile(file_name, chain_id='A') - which runs StackingCalculator
on the RNA from the 'file_name'.
The second parameter is optional and has to be set, if the chain ID
of RNA from PDB file is different than 'A'.
"""
# *************************************************************************************************
class ResidueVector:
def __init__(self, residue):
"""Creates a dictionary of vectors for each atom from a ModernaResidue."""
self.residue = residue
self.atoms = {}
for atom in residue:
atom_name = atom.name.strip().upper()
self.atoms[atom_name] = np.array(atom.get_coord())
self.resn = residue.resname.strip()
self.normal_set = NORMAL_SUPPORT.get(self.resn.upper())
self.normal = None
self.center = None
# vector placeholder functions
# code snatched from Scientific.Geometry
def angle(self, vec_a, vec_b):
cosa = np.add.reduce(vec_a*vec_b) / \
np.sqrt(np.add.reduce(vec_a*vec_a) * \
np.add.reduce(vec_b*vec_b))
cosa = max(-1., min(1., cosa))
return np.arccos(cosa) * ARCPI
def is_valid(self):
"""Checks if all necessary atoms are present."""
if self.normal_set:
for name in self.normal_set:
if not self.atoms.has_key(name):
return False
return True
def calculate_vectors(self):
"""
Constructs the normal vectors for nucleotide bases.
Returns a tuple of vectors, the first pointing
from O to the center of the six-ring of the according base,
and the second being the normal
vector according to the definition of Major & Thibault 2006.
Assumes the residue has a complete set of atoms.
"""
# sum all six atom vectors up to get center point.
asum = np.array([0.0, 0.0, 0.0])
for atomname in self.normal_set:
if not self.atoms.has_key(atomname):
self.normal = None
return
asum += self.atoms[atomname]
self.center = asum / 6.0
# get two pairs of atoms spanning a plane
# and calculate the normal vector
atoma = self.atoms[self.normal_set[1]] - self.atoms[self.normal_set[0]]
atomb = self.atoms[self.normal_set[3]] - self.atoms[self.normal_set[2]]
self.normal = np.cross(atoma, atomb)
self.normal = self.normal/np.sqrt(np.add.reduce(self.normal*self.normal))
def calc_angles(self, rvec):
"""
Calculates whether the distance and angles between the vectors are OK.
Returns a tuple of (dist,nn_angle,n1cc_angle,n2cc_angle) or None.
"""
# calculate the distance between the two ring centers
ccvec = rvec.center - self.center
dist = np.sqrt(np.add.reduce(ccvec*ccvec)) # vector length
# check whether the distance is small enough to allow stacking
if 0.0 < dist < 5.5:
# check whether the angles are in the allowed range
nn_angle = self.angle(self.normal, rvec.normal)
if (nn_angle < 30 or nn_angle > 150):
n1cc_angle = self.angle(self.normal, ccvec)
n2cc_angle = self.angle(rvec.normal, ccvec)
return (dist, nn_angle, n1cc_angle, n2cc_angle)
return (None, None, None, None)
def get_stacking(self, rvec):
"""
Returns dictionary with one of the types
(<<, >>, <>, ><) for the two residues.
Or None, if they are not stacked.
"""
if self.normal is None or rvec.normal is None:
return None
distance, nn_ang, n1cc_ang, n2cc_ang = self.calc_angles(rvec)
#print n1cc_ang, n2cc_ang
if distance and (n1cc_ang < 40 or n1cc_ang > 140 \
or n2cc_ang < 40 or n2cc_ang > 140):
# find out whether the normals are opposed or straight
# (pointing in the same direction).
if nn_ang < 30:
straight = True
elif nn_ang > 150:
straight = False
else:
return None # invalid normal angle
# find out whether base2 is on top of base1
# calculate whether the normal on base1 brings one closer to base2
n1c2 = rvec.center - self.center - self.normal
n1c2dist = np.sqrt(np.add.reduce(n1c2*n1c2)) # vector length
is_up = n1c2dist < distance
stacktype = STACKINGS[(straight, is_up)]
return stacktype#self.residue, rvec.residue, stacktype
# *************************************************************************************************
# *************************************************************************************************
# *************************************************************************************************
# *************************************************************************************************
class contacts:
def __init__(self, structure):
self.structure = structure
# -------------------------------------------------------------------------
def calc_stacking(self, resi_1, resi_2):
residue_vector1 = ResidueVector(resi_1)
residue_vector2 = ResidueVector(resi_2)
if not residue_vector1.normal_set or not residue_vector2.normal_set:
return None
else:
residue_vector1.calculate_vectors()
residue_vector2.calculate_vectors()
return residue_vector1.get_stacking(residue_vector2)
# -------------------------------------------------------------------------
def run(self,use_fr3d_format=False):
use_fr3d_format_old = False
if use_fr3d_format_old:
print "# stacking"
elif use_fr3d_format:
pass
else:
print "Stackings -------------------------------------------------------"
residues = [r for r in self.structure.get_residues()]
n = len(residues)
r_coord = []
for r in residues:
p = None
for a in r:
if p is None:
p = a.get_coord()
if a.name.strip() == "P":
p = a.get_coord()
r_coord.append(p)
tree = KDTree(r_coord)
residues_name = [ "%s%s" % (r.get_parent().get_id(), r.get_id()[1]) for r in residues]
n_types = [r.get_resname().strip() for r in residues]
num = 0
for i in xrange(n):
for j in tree.query(r_coord[i], k=100)[1]:
if i>=j or j>=len(residues):
continue
stacking = self.calc_stacking(residues[i], residues[j])
if stacking is not None:
if use_fr3d_format:
# example:
# "0_A_1_U","s35","0_A_2_G"
num += 1
n_type_i = n_types[i]
n_type_j = n_types[j]
chain_i = residues_name[i][0]
num_i = residues_name[i][1:]
chain_j = residues_name[j][0]
num_j = residues_name[j][1:]
print '"1_%s_%s_%s","%s","1_%s_%s_%s"' % (chain_i,num_i,n_type_i,stacking,chain_j,num_j,n_type_j)
elif use_fr3d_format_old:
# example:
# 2210 G1003A(A) - G1003(A) - s53 - 0
num += 1
n_type_i = n_types[i]
n_type_j = n_types[j]
print "%-4d %s(%s) - %s(%s) - %s - 0" % (num, residues_name[i],n_type_i,residues_name[j],n_type_j,stacking)
else:
print "%s-%s : %s" % (residues_name[i],residues_name[j],stacking)
def parse_args():
"""setup program options parsing"""
parser = OptionParser(description="""some experiments with classifier generation""")
parser.add_option("--use-fr3d-format", dest="use_fr3d_format", action='store_true',
help="use fr3d output format",default=False)
parser.add_option("--use-fr3d-format-old", dest="use_fr3d_format_old", action='store_true',
help="use fr3d output format (OLD)",default=False)
(options, args) = parser.parse_args()
return (parser, options, args)
def process_structures(filename,use_fr3d_format):
if re.match("^.*.gz$",filename):
f = gzip.open(filename)
else:
f = filename
parser = PDB.PDBParser()
structure = parser.get_structure("c", f)
contacts_obj = contacts(structure)
contacts_obj.run(use_fr3d_format)
if __name__=="__main__":
(_parser,options,args) = parse_args()
process_structures(args[0],options.use_fr3d_format)
| |
"""Project forms"""
from random import choice
from django import forms
from django.conf import settings
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from guardian.shortcuts import assign
from readthedocs.builds.constants import TAG
from readthedocs.core.utils import trigger_build
from readthedocs.redirects.models import Redirect
from readthedocs.projects import constants
from readthedocs.projects.models import Project, EmailHook, WebHook, Domain
from readthedocs.privacy.loader import AdminPermission
class ProjectForm(forms.ModelForm):
"""Project form
:param user: If provided, add this user as a project user on save
"""
required_css_class = "required"
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(ProjectForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
project = super(ProjectForm, self).save(commit)
if commit:
if self.user and not project.users.filter(pk=self.user.pk).exists():
project.users.add(self.user)
return project
class ProjectTriggerBuildMixin(object):
"""Mixin to trigger build on form save
This should be replaced with signals instead of calling trigger_build
explicitly.
"""
def save(self, commit=True):
"""Trigger build on commit save"""
project = super(ProjectTriggerBuildMixin, self).save(commit)
if commit:
trigger_build(project=project)
return project
class ProjectBackendForm(forms.Form):
"""Get the import backend"""
backend = forms.CharField()
class ProjectBasicsForm(ProjectForm):
"""Form for basic project fields"""
class Meta:
model = Project
fields = ('name', 'repo', 'repo_type')
def __init__(self, *args, **kwargs):
show_advanced = kwargs.pop('show_advanced', False)
super(ProjectBasicsForm, self).__init__(*args, **kwargs)
if show_advanced:
self.fields['advanced'] = forms.BooleanField(
required=False,
label=_('Edit advanced project options')
)
self.fields['repo'].widget.attrs['placeholder'] = self.placehold_repo()
self.fields['repo'].widget.attrs['required'] = True
def clean_name(self):
name = self.cleaned_data.get('name', '')
if not self.instance.pk:
potential_slug = slugify(name)
if Project.objects.filter(slug=potential_slug).exists():
raise forms.ValidationError(
_('Invalid project name, a project already exists with that name'))
return name
def clean_repo(self):
repo = self.cleaned_data.get('repo', '').strip()
pvt_repos = getattr(settings, 'ALLOW_PRIVATE_REPOS', False)
if '&&' in repo or '|' in repo:
raise forms.ValidationError(_(u'Invalid character in repo name'))
elif '@' in repo and not pvt_repos:
raise forms.ValidationError(
_(u'It looks like you entered a private repo - please use the '
u'public (http:// or git://) clone url'))
return repo
def placehold_repo(self):
return choice([
'https://bitbucket.org/cherrypy/cherrypy',
'https://bitbucket.org/birkenfeld/sphinx',
'https://bitbucket.org/hpk42/tox',
'https://github.com/zzzeek/sqlalchemy.git',
'https://github.com/django/django.git',
'https://github.com/fabric/fabric.git',
'https://github.com/ericholscher/django-kong.git',
])
class ProjectExtraForm(ProjectForm):
"""Additional project information form"""
class Meta:
model = Project
fields = (
'description',
'documentation_type',
'language', 'programming_language',
'project_url',
'tags',
)
class ProjectAdvancedForm(ProjectTriggerBuildMixin, ProjectForm):
"""Advanced project option form"""
python_interpreter = forms.ChoiceField(
choices=constants.PYTHON_CHOICES, initial='python',
help_text=_("(Beta) The Python interpreter used to create the virtual "
"environment."))
class Meta:
model = Project
fields = (
# Standard build edits
'use_virtualenv',
'requirements_file',
'single_version',
'conf_py_file',
'default_branch',
'default_version',
'enable_pdf_build',
'enable_epub_build',
# Privacy
'privacy_level',
# 'version_privacy_level',
# Python specific
'use_system_packages',
'python_interpreter',
# Fringe
'analytics_code',
# Version Support
# 'num_major', 'num_minor', 'num_point',
)
def clean_conf_py_file(self):
filename = self.cleaned_data.get('conf_py_file', '').strip()
if filename and 'conf.py' not in filename:
raise forms.ValidationError(
_('Your configuration file is invalid, make sure it contains '
'conf.py in it.'))
return filename
class UpdateProjectForm(ProjectTriggerBuildMixin, ProjectBasicsForm,
ProjectExtraForm):
class Meta:
model = Project
fields = (
# Basics
'name', 'repo', 'repo_type',
# Extra
# 'allow_comments',
# 'comment_moderation',
'description',
'documentation_type',
'language', 'programming_language',
'project_url',
'tags',
)
class DualCheckboxWidget(forms.CheckboxInput):
"""Checkbox with link to the version's built documentation"""
def __init__(self, version, attrs=None, check_test=bool):
super(DualCheckboxWidget, self).__init__(attrs, check_test)
self.version = version
def render(self, name, value, attrs=None):
checkbox = super(DualCheckboxWidget, self).render(name, value, attrs)
icon = self.render_icon()
return mark_safe(u'%s%s' % (checkbox, icon))
def render_icon(self):
context = {
'MEDIA_URL': settings.MEDIA_URL,
'built': self.version.built,
'uploaded': self.version.uploaded,
'url': self.version.get_absolute_url()
}
return render_to_string('projects/includes/icon_built.html', context)
class BaseVersionsForm(forms.Form):
"""Form for versions page"""
def save(self):
versions = self.project.versions.all()
for version in versions:
self.save_version(version)
default_version = self.cleaned_data.get('default-version', None)
if default_version:
self.project.default_version = default_version
self.project.save()
def save_version(self, version):
"""Save version if there has been a change, trigger a rebuild"""
new_value = self.cleaned_data.get('version-%s' % version.slug, None)
privacy_level = self.cleaned_data.get('privacy-%s' % version.slug,
None)
if ((new_value is None or
new_value == version.active) and (
privacy_level is None or
privacy_level == version.privacy_level)):
return
version.active = new_value
version.privacy_level = privacy_level
version.save()
if version.active and not version.built and not version.uploaded:
trigger_build(project=self.project, version=version)
def build_versions_form(project):
"""Versions form with a list of versions and version privacy levels"""
attrs = {
'project': project,
}
versions_qs = project.versions.all() # Admin page, so show all versions
active = versions_qs.filter(active=True)
if active.exists():
choices = [(version.slug, version.verbose_name) for version in active]
attrs['default-version'] = forms.ChoiceField(
label=_("Default Version"),
choices=choices,
initial=project.get_default_version(),
)
for version in versions_qs:
field_name = 'version-%s' % version.slug
privacy_name = 'privacy-%s' % version.slug
if version.type == TAG:
label = "%s (%s)" % (version.verbose_name, version.identifier[:8])
else:
label = version.verbose_name
attrs[field_name] = forms.BooleanField(
label=label,
widget=DualCheckboxWidget(version),
initial=version.active,
required=False,
)
attrs[privacy_name] = forms.ChoiceField(
# This isn't a real label, but just a slug for the template
label="privacy",
choices=constants.PRIVACY_CHOICES,
initial=version.privacy_level,
)
return type('VersionsForm', (BaseVersionsForm,), attrs)
class BaseUploadHTMLForm(forms.Form):
content = forms.FileField(label=_("Zip file of HTML"))
overwrite = forms.BooleanField(required=False,
label=_("Overwrite existing HTML?"))
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(BaseUploadHTMLForm, self).__init__(*args, **kwargs)
def clean(self):
version_slug = self.cleaned_data['version']
filename = self.request.FILES['content']
version = self.project.versions.get(slug=version_slug)
# Validation
if version.active and not self.cleaned_data.get('overwrite', False):
raise forms.ValidationError(_("That version is already active!"))
if not filename.name.endswith('zip'):
raise forms.ValidationError(_("Must upload a zip file."))
return self.cleaned_data
def build_upload_html_form(project):
"""Upload HTML form with list of versions to upload HTML for"""
attrs = {
'project': project,
}
active = project.versions.public()
if active.exists():
choices = []
choices += [(version.slug, version.verbose_name) for version in active]
attrs['version'] = forms.ChoiceField(
label=_("Version of the project you are uploading HTML for"),
choices=choices,
)
return type('UploadHTMLForm', (BaseUploadHTMLForm,), attrs)
class SubprojectForm(forms.Form):
"""Project subproject form"""
subproject = forms.CharField()
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
self.parent = kwargs.pop('parent')
super(SubprojectForm, self).__init__(*args, **kwargs)
def clean_subproject(self):
"""Normalize subproject field
Does lookup on against :py:cls:`Project` to ensure matching project
exists. Return the :py:cls:`Project` object instead.
"""
subproject_name = self.cleaned_data['subproject']
subproject_qs = Project.objects.filter(slug=subproject_name)
if not subproject_qs.exists():
raise forms.ValidationError((_("Project %(name)s does not exist")
% {'name': subproject_name}))
subproject = subproject_qs.first()
if not AdminPermission.is_admin(self.user, subproject):
raise forms.ValidationError(_(
'You need to be admin of {name} in order to add it as '
'a subproject.'.format(name=subproject_name)))
return subproject
def save(self):
relationship = self.parent.add_subproject(
self.cleaned_data['subproject'])
return relationship
class UserForm(forms.Form):
"""Project user association form"""
user = forms.CharField()
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
super(UserForm, self).__init__(*args, **kwargs)
def clean_user(self):
name = self.cleaned_data['user']
user_qs = User.objects.filter(username=name)
if not user_qs.exists():
raise forms.ValidationError(_("User %(name)s does not exist") %
{'name': name})
self.user = user_qs[0]
return name
def save(self):
self.project.users.add(self.user)
# Force update of permissions
assign('view_project', self.user, self.project)
return self.user
class EmailHookForm(forms.Form):
"""Project email notification form"""
email = forms.EmailField()
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
super(EmailHookForm, self).__init__(*args, **kwargs)
def clean_email(self):
self.email = EmailHook.objects.get_or_create(
email=self.cleaned_data['email'], project=self.project)[0]
return self.email
def save(self):
self.project.emailhook_notifications.add(self.email)
return self.project
class WebHookForm(forms.Form):
"""Project webhook form"""
url = forms.URLField()
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
super(WebHookForm, self).__init__(*args, **kwargs)
def clean_url(self):
self.webhook = WebHook.objects.get_or_create(
url=self.cleaned_data['url'], project=self.project)[0]
return self.webhook
def save(self):
self.project.webhook_notifications.add(self.webhook)
return self.project
class TranslationForm(forms.Form):
"""Project translation form"""
project = forms.CharField()
def __init__(self, *args, **kwargs):
self.parent = kwargs.pop('parent', None)
super(TranslationForm, self).__init__(*args, **kwargs)
def clean_project(self):
subproject_name = self.cleaned_data['project']
subproject_qs = Project.objects.filter(slug=subproject_name)
if not subproject_qs.exists():
raise forms.ValidationError((_("Project %(name)s does not exist")
% {'name': subproject_name}))
self.subproject = subproject_qs[0]
return subproject_name
def save(self):
project = self.parent.translations.add(self.subproject)
return project
class RedirectForm(forms.ModelForm):
"""Form for project redirects"""
class Meta:
model = Redirect
fields = ['redirect_type', 'from_url', 'to_url']
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
super(RedirectForm, self).__init__(*args, **kwargs)
def save(self, **_):
redirect = Redirect.objects.create(
project=self.project,
redirect_type=self.cleaned_data['redirect_type'],
from_url=self.cleaned_data['from_url'],
to_url=self.cleaned_data['to_url'],
)
return redirect
class DomainForm(forms.ModelForm):
project = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Domain
exclude = ['machine', 'cname', 'count']
def __init__(self, *args, **kwargs):
self.project = kwargs.pop('project', None)
super(DomainForm, self).__init__(*args, **kwargs)
def clean_project(self):
return self.project
def clean_canonical(self):
canonical = self.cleaned_data['canonical']
if canonical and Domain.objects.filter(
project=self.project, canonical=True
).exclude(url=self.cleaned_data['url']).exists():
raise forms.ValidationError(_(u'Only 1 Domain can be canonical at a time.'))
return canonical
| |
"""Utility functions used by projects"""
import fnmatch
import os
import subprocess
import traceback
import logging
from httplib2 import Http
import redis
from django.conf import settings
from django.core.cache import cache
log = logging.getLogger(__name__)
def version_from_slug(slug, version):
from readthedocs.projects import tasks
from readthedocs.builds.models import Version
from readthedocs.restapi.client import api
if getattr(settings, 'DONT_HIT_DB', True):
version_data = api.version().get(project=slug, slug=version)['results'][0]
v = tasks.make_api_version(version_data)
else:
v = Version.objects.get(project__slug=slug, slug=version)
return v
def symlink(project):
"""This is here to avoid circular imports in models.py"""
from readthedocs.projects import symlinks
log.info("Symlinking %s", project)
symlinks.symlink_cnames(project)
symlinks.symlink_translations(project)
symlinks.symlink_subprojects(project)
if project.single_version:
symlinks.symlink_single_version(project)
else:
symlinks.remove_symlink_single_version(project)
def update_static_metadata(project_pk):
"""This is here to avoid circular imports in models.py"""
from readthedocs.projects import tasks
tasks.update_static_metadata.delay(project_pk)
def find_file(filename):
"""Recursively find matching file from the current working path
:param file: Filename to match
:returns: A list of matching filenames.
"""
matches = []
for root, __, filenames in os.walk('.'):
for filename in fnmatch.filter(filenames, filename):
matches.append(os.path.join(root, filename))
return matches
def run(*commands, **kwargs):
"""Run one or more commands
If more than one command is given, then this is equivalent to
chaining them together with ``&&``; if all commands succeed, then
``(status, out, err)`` will represent the last successful command.
If one command failed, then ``(status, out, err)`` will represent
the failed command.
:returns: ``(status, out, err)``
"""
environment = os.environ.copy()
environment['READTHEDOCS'] = 'True'
if 'DJANGO_SETTINGS_MODULE' in environment:
del environment['DJANGO_SETTINGS_MODULE']
if 'PYTHONPATH' in environment:
del environment['PYTHONPATH']
# Remove PYTHONHOME env variable if set, otherwise pip install of requirements
# into virtualenv will install incorrectly
if 'PYTHONHOME' in environment:
del environment['PYTHONHOME']
cwd = os.getcwd()
if not commands:
raise ValueError("run() requires one or more command-line strings")
shell = kwargs.get('shell', False)
for command in commands:
if shell:
log.info("Running commands in a shell")
run_command = command
else:
run_command = command.split()
log.info("Running: '%s' [%s]", command, cwd)
try:
p = subprocess.Popen(run_command, shell=shell, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=environment)
out, err = p.communicate()
ret = p.returncode
except OSError:
out = ''
err = traceback.format_exc()
ret = -1
log.error("Command failed", exc_info=True)
return (ret, out, err)
def safe_write(filename, contents):
"""Normalize and write to filename
Write ``contents`` to the given ``filename``. If the filename's
directory does not exist, it is created. Contents are written as UTF-8,
ignoring any characters that cannot be encoded as UTF-8.
:param filename: Filename to write to
:param contents: File contents to write to file
"""
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'w') as fh:
fh.write(contents.encode('utf-8', 'ignore'))
fh.close()
def purge_version(version, mainsite=False, subdomain=False, cname=False):
varnish_servers = getattr(settings, 'VARNISH_SERVERS', None)
h = Http()
if varnish_servers:
for server in varnish_servers:
if subdomain:
# Send a request to the Server, to purge the URL of the Host.
host = "%s.readthedocs.org" % version.project.slug
headers = {'Host': host}
url = "/en/%s/*" % version.slug
to_purge = "http://%s%s" % (server, url)
log.info("Purging %s on %s", url, host)
h.request(to_purge, method="PURGE", headers=headers)
if mainsite:
headers = {'Host': "readthedocs.org"}
url = "/docs/%s/en/%s/*" % (version.project.slug, version.slug)
to_purge = "http://%s%s" % (server, url)
log.info("Purging %s on readthedocs.org", url)
h.request(to_purge, method="PURGE", headers=headers)
root_url = "/docs/%s/" % version.project.slug
to_purge = "http://%s%s" % (server, root_url)
log.info("Purging %s on readthedocs.org", root_url)
h.request(to_purge, method="PURGE", headers=headers)
if cname:
try:
redis_client = cache.get_client(None)
for cnamed in redis_client.smembers('rtd_slug:v1:%s'
% version.project.slug):
headers = {'Host': cnamed}
url = "/en/%s/*" % version.slug
to_purge = "http://%s%s" % (server, url)
log.info("Purging %s on %s", url, cnamed)
h.request(to_purge, method="PURGE", headers=headers)
root_url = "/"
to_purge = "http://%s%s" % (server, root_url)
log.info("Purging %s on %s", root_url, cnamed)
h.request(to_purge, method="PURGE", headers=headers)
except (AttributeError, redis.exceptions.ConnectionError):
pass
class DictObj(object):
def __getattr__(self, attr):
return self.__dict__.get(attr)
# Prevent saving the temporary Project instance
def _new_save(*dummy_args, **dummy_kwargs):
log.warning("Called save on a non-real object.")
return 0
def make_api_version(version_data):
"""Make mock Version instance from API return"""
from readthedocs.builds.models import Version
for key in ['resource_uri', 'absolute_url', 'downloads']:
if key in version_data:
del version_data[key]
project_data = version_data['project']
project = make_api_project(project_data)
version_data['project'] = project
ver = Version(**version_data)
ver.save = _new_save
return ver
def make_api_project(project_data):
"""Make mock Project instance from API return"""
from readthedocs.projects.models import Project
for key in ['users', 'resource_uri', 'absolute_url', 'downloads',
'main_language_project', 'related_projects']:
if key in project_data:
del project_data[key]
project = Project(**project_data)
project.save = _new_save
return project
| |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script tests the installer with test cases specified in the config file.
For each test case, it checks that the machine states after the execution of
each command match the expected machine states. For more details, take a look at
the design documentation at http://goo.gl/Q0rGM6
"""
import argparse
import datetime
import inspect
import json
import os
import subprocess
import sys
import time
import traceback
import unittest
import _winreg
from variable_expander import VariableExpander
import verifier_runner
def LogMessage(message):
"""Logs a message to stderr.
Args:
message: The message string to be logged.
"""
now = datetime.datetime.now()
frameinfo = inspect.getframeinfo(inspect.currentframe().f_back)
filename = os.path.basename(frameinfo.filename)
line = frameinfo.lineno
sys.stderr.write('[%s:%s(%s)] %s\n' % (now.strftime('%m%d/%H%M%S'),
filename, line, message))
class Config:
"""Describes the machine states, actions, and test cases.
Attributes:
states: A dictionary where each key is a state name and the associated value
is a property dictionary describing that state.
actions: A dictionary where each key is an action name and the associated
value is the action's command.
tests: An array of test cases.
"""
def __init__(self):
self.states = {}
self.actions = {}
self.tests = []
class InstallerTest(unittest.TestCase):
"""Tests a test case in the config file."""
def __init__(self, name, test, config, variable_expander, quiet):
"""Constructor.
Args:
name: The name of this test.
test: An array of alternating state names and action names, starting and
ending with state names.
config: The Config object.
variable_expander: A VariableExpander object.
"""
super(InstallerTest, self).__init__()
self._name = name
self._test = test
self._config = config
self._variable_expander = variable_expander
self._quiet = quiet
self._verifier_runner = verifier_runner.VerifierRunner()
self._clean_on_teardown = True
def __str__(self):
"""Returns a string representing the test case.
Returns:
A string created by joining state names and action names together with
' -> ', for example, 'Test: clean -> install chrome -> chrome_installed'.
"""
return '%s: %s\n' % (self._name, ' -> '.join(self._test))
def id(self):
"""Returns the name of the test."""
# Overridden from unittest.TestCase so that id() contains the name of the
# test case from the config file in place of the name of this class's test
# function.
return unittest.TestCase.id(self).replace(self._testMethodName, self._name)
def runTest(self):
"""Run the test case."""
# |test| is an array of alternating state names and action names, starting
# and ending with state names. Therefore, its length must be odd.
self.assertEqual(1, len(self._test) % 2,
'The length of test array must be odd')
state = self._test[0]
self._VerifyState(state)
# Starting at index 1, we loop through pairs of (action, state).
for i in range(1, len(self._test), 2):
action = self._test[i]
if not self._quiet:
LogMessage('Beginning action %s' % action)
RunCommand(self._config.actions[action], self._variable_expander)
if not self._quiet:
LogMessage('Finished action %s' % action)
state = self._test[i + 1]
self._VerifyState(state)
# If the test makes it here, it means it was successful, because RunCommand
# and _VerifyState throw an exception on failure.
self._clean_on_teardown = False
def tearDown(self):
"""Cleans up the machine if the test case fails."""
if self._clean_on_teardown:
RunCleanCommand(True, self._variable_expander)
def shortDescription(self):
"""Overridden from unittest.TestCase.
We return None as the short description to suppress its printing.
The default implementation of this method returns the docstring of the
runTest method, which is not useful since it's the same for every test case.
The description from the __str__ method is informative enough.
"""
return None
def _VerifyState(self, state):
"""Verifies that the current machine state matches a given state.
Args:
state: A state name.
"""
if not self._quiet:
LogMessage('Verifying state %s' % state)
try:
self._verifier_runner.VerifyAll(self._config.states[state],
self._variable_expander)
except AssertionError as e:
# If an AssertionError occurs, we intercept it and add the state name
# to the error message so that we know where the test fails.
raise AssertionError("In state '%s', %s" % (state, e))
def RunCommand(command, variable_expander):
"""Runs the given command from the current file's directory.
This function throws an Exception if the command returns with non-zero exit
status.
Args:
command: A command to run. It is expanded using Expand.
variable_expander: A VariableExpander object.
"""
expanded_command = variable_expander.Expand(command)
script_dir = os.path.dirname(os.path.abspath(__file__))
exit_status = subprocess.call(expanded_command, shell=True, cwd=script_dir)
if exit_status != 0:
raise Exception('Command %s returned non-zero exit status %s' % (
expanded_command, exit_status))
def DeleteGoogleUpdateRegistration(system_level, registry_subkey,
variable_expander):
"""Deletes Chrome's registration with Google Update.
Args:
system_level: True if system-level Chrome is to be deleted.
registry_subkey: The pre-expansion registry subkey for the product.
variable_expander: A VariableExpander object.
"""
root = (_winreg.HKEY_LOCAL_MACHINE if system_level
else _winreg.HKEY_CURRENT_USER)
key_name = variable_expander.Expand(registry_subkey)
try:
key_handle = _winreg.OpenKey(root, key_name, 0,
_winreg.KEY_SET_VALUE |
_winreg.KEY_WOW64_32KEY)
_winreg.DeleteValue(key_handle, 'pv')
except WindowsError:
# The key isn't present, so there is no value to delete.
pass
def RunCleanCommand(force_clean, variable_expander):
"""Puts the machine in the clean state (i.e. Chrome not installed).
Args:
force_clean: A boolean indicating whether to force cleaning existing
installations.
variable_expander: A VariableExpander object.
"""
# A list of (system_level, product_name, product_switch, registry_subkey)
# tuples for the possible installed products.
data = [
(False, '$CHROME_LONG_NAME', '',
'$CHROME_UPDATE_REGISTRY_SUBKEY'),
(True, '$CHROME_LONG_NAME', '--system-level',
'$CHROME_UPDATE_REGISTRY_SUBKEY'),
]
if variable_expander.Expand('$SUPPORTS_SXS') == 'True':
data.append((False, '$CHROME_LONG_NAME_SXS', '',
'$CHROME_UPDATE_REGISTRY_SUBKEY_SXS'))
interactive_option = '--interactive' if not force_clean else ''
for system_level, product_name, product_switch, registry_subkey in data:
command = ('python uninstall_chrome.py '
'--chrome-long-name="%s" '
'--no-error-if-absent %s %s' %
(product_name, product_switch, interactive_option))
try:
RunCommand(command, variable_expander)
except:
message = traceback.format_exception(*sys.exc_info())
message.insert(0, 'Error cleaning up an old install with:\n')
LogMessage(''.join(message))
if force_clean:
DeleteGoogleUpdateRegistration(system_level, registry_subkey,
variable_expander)
def MergePropertyDictionaries(current_property, new_property):
"""Merges the new property dictionary into the current property dictionary.
This is different from general dictionary merging in that, in case there are
keys with the same name, we merge values together in the first level, and we
override earlier values in the second level. For more details, take a look at
http://goo.gl/uE0RoR
Args:
current_property: The property dictionary to be modified.
new_property: The new property dictionary.
"""
for key, value in new_property.iteritems():
if key not in current_property:
current_property[key] = value
else:
assert(isinstance(current_property[key], dict) and
isinstance(value, dict))
# This merges two dictionaries together. In case there are keys with
# the same name, the latter will override the former.
current_property[key] = dict(
current_property[key].items() + value.items())
def FilterConditionalElem(elem, condition_name, variable_expander):
"""Returns True if a conditional element should be processed.
Args:
elem: A dictionary.
condition_name: The name of the condition property in |elem|.
variable_expander: A variable expander used to evaluate conditions.
Returns:
True if |elem| should be processed.
"""
if condition_name not in elem:
return True
condition = variable_expander.Expand(elem[condition_name])
return eval(condition, {'__builtins__': {'False': False, 'True': True}})
def ParsePropertyFiles(directory, filenames, variable_expander):
"""Parses an array of .prop files.
Args:
directory: The directory where the Config file and all Property files
reside in.
filenames: An array of Property filenames.
variable_expander: A variable expander used to evaluate conditions.
Returns:
A property dictionary created by merging all property dictionaries specified
in the array.
"""
current_property = {}
for filename in filenames:
path = os.path.join(directory, filename)
new_property = json.load(open(path))
if not FilterConditionalElem(new_property, 'Condition', variable_expander):
continue
# Remove any Condition from the propery dict before merging since it serves
# no purpose from here on out.
if 'Condition' in new_property:
del new_property['Condition']
MergePropertyDictionaries(current_property, new_property)
return current_property
def ParseConfigFile(filename, variable_expander):
"""Parses a .config file.
Args:
config_filename: A Config filename.
Returns:
A Config object.
"""
with open(filename, 'r') as fp:
config_data = json.load(fp)
directory = os.path.dirname(os.path.abspath(filename))
config = Config()
config.tests = config_data['tests']
# Drop conditional tests that should not be run in the current configuration.
config.tests = filter(lambda t: FilterConditionalElem(t, 'condition',
variable_expander),
config.tests)
for state_name, state_property_filenames in config_data['states']:
config.states[state_name] = ParsePropertyFiles(directory,
state_property_filenames,
variable_expander)
for action_name, action_command in config_data['actions']:
config.actions[action_name] = action_command
return config
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--build-dir', default='out',
help='Path to main build directory (the parent of the '
'Release or Debug directory)')
parser.add_argument('--target', default='Release',
help='Build target (Release or Debug)')
parser.add_argument('--force-clean', action='store_true', default=False,
help='Force cleaning existing installations')
parser.add_argument('-q', '--quiet', action='store_true', default=False,
help='Reduce test runner output')
parser.add_argument('--write-full-results-to', metavar='FILENAME',
help='Path to write the list of full results to.')
parser.add_argument('--config', metavar='FILENAME',
help='Path to test configuration file')
parser.add_argument('test', nargs='*',
help='Name(s) of tests to run.')
args = parser.parse_args()
if not args.config:
parser.error('missing mandatory --config FILENAME argument')
mini_installer_path = os.path.join(args.build_dir, args.target,
'mini_installer.exe')
assert os.path.exists(mini_installer_path), ('Could not find file %s' %
mini_installer_path)
next_version_mini_installer_path = os.path.join(
args.build_dir, args.target, 'next_version_mini_installer.exe')
assert os.path.exists(next_version_mini_installer_path), (
'Could not find file %s' % next_version_mini_installer_path)
suite = unittest.TestSuite()
variable_expander = VariableExpander(mini_installer_path,
next_version_mini_installer_path)
config = ParseConfigFile(args.config, variable_expander)
RunCleanCommand(args.force_clean, variable_expander)
for test in config.tests:
# If tests were specified via |tests|, their names are formatted like so:
test_name = '%s/%s/%s' % (InstallerTest.__module__,
InstallerTest.__name__,
test['name'])
if not args.test or test_name in args.test:
suite.addTest(InstallerTest(test['name'], test['traversal'], config,
variable_expander, args.quiet))
verbosity = 2 if not args.quiet else 1
result = unittest.TextTestRunner(verbosity=verbosity).run(suite)
if args.write_full_results_to:
with open(args.write_full_results_to, 'w') as fp:
json.dump(_FullResults(suite, result, {}), fp, indent=2)
fp.write('\n')
return 0 if result.wasSuccessful() else 1
# TODO(dpranke): Find a way for this to be shared with the mojo and other tests.
TEST_SEPARATOR = '.'
def _FullResults(suite, result, metadata):
"""Convert the unittest results to the Chromium JSON test result format.
This matches run-webkit-tests (the layout tests) and the flakiness dashboard.
"""
full_results = {}
full_results['interrupted'] = False
full_results['path_delimiter'] = TEST_SEPARATOR
full_results['version'] = 3
full_results['seconds_since_epoch'] = time.time()
for md in metadata:
key, val = md.split('=', 1)
full_results[key] = val
all_test_names = _AllTestNames(suite)
failed_test_names = _FailedTestNames(result)
full_results['num_failures_by_type'] = {
'FAIL': len(failed_test_names),
'PASS': len(all_test_names) - len(failed_test_names),
}
full_results['tests'] = {}
for test_name in all_test_names:
value = {}
value['expected'] = 'PASS'
if test_name in failed_test_names:
value['actual'] = 'FAIL'
value['is_unexpected'] = True
else:
value['actual'] = 'PASS'
_AddPathToTrie(full_results['tests'], test_name, value)
return full_results
def _AllTestNames(suite):
test_names = []
# _tests is protected pylint: disable=W0212
for test in suite._tests:
if isinstance(test, unittest.suite.TestSuite):
test_names.extend(_AllTestNames(test))
else:
test_names.append(test.id())
return test_names
def _FailedTestNames(result):
return set(test.id() for test, _ in result.failures + result.errors)
def _AddPathToTrie(trie, path, value):
if TEST_SEPARATOR not in path:
trie[path] = value
return
directory, rest = path.split(TEST_SEPARATOR, 1)
if directory not in trie:
trie[directory] = {}
_AddPathToTrie(trie[directory], rest, value)
if __name__ == '__main__':
sys.exit(main())
| |
from __future__ import with_statement
import sys, os, re
import subprocess
import py
import tox
from tox._config import DepConfig
class CreationConfig:
def __init__(self, md5, python, version, distribute, sitepackages,
develop, deps):
self.md5 = md5
self.python = python
self.version = version
self.distribute = distribute
self.sitepackages = sitepackages
self.develop = develop
self.deps = deps
def writeconfig(self, path):
lines = ["%s %s" % (self.md5, self.python)]
lines.append("%s %d %d %d" % (self.version, self.distribute,
self.sitepackages, self.develop))
for dep in self.deps:
lines.append("%s %s" % dep)
path.ensure()
path.write("\n".join(lines))
@classmethod
def readconfig(cls, path):
try:
lines = path.readlines(cr=0)
value = lines.pop(0).split(None, 1)
md5, python = value
version, distribute, sitepackages, develop = lines.pop(0).split(
None, 3)
distribute = bool(int(distribute))
sitepackages = bool(int(sitepackages))
develop = bool(int(develop))
deps = []
for line in lines:
md5, depstring = line.split(None, 1)
deps.append((md5, depstring))
return CreationConfig(md5, python, version,
distribute, sitepackages, develop, deps)
except KeyboardInterrupt:
raise
except:
return None
def matches(self, other):
return (other and self.md5 == other.md5
and self.python == other.python
and self.version == other.version
and self.distribute == other.distribute
and self.sitepackages == other.sitepackages
and self.develop == other.develop
and self.deps == other.deps)
class VirtualEnv(object):
def __init__(self, envconfig=None, session=None):
self.envconfig = envconfig
self.session = session
self.path = envconfig.envdir
self.path_config = self.path.join(".tox-config1")
@property
def name(self):
return self.envconfig.envname
def __repr__(self):
return "<VirtualEnv at %r>" %(self.path)
def getcommandpath(self, name=None, venv=True, cwd=None):
if name is None:
return self.envconfig.envpython
name = str(name)
if os.path.isabs(name):
return name
if os.path.split(name)[0] == ".":
p = cwd.join(name)
if p.check():
return str(p)
p = None
if venv:
p = py.path.local.sysfind(name, paths=[self.envconfig.envbindir])
if p is not None:
return p
p = py.path.local.sysfind(name)
if p is None:
raise tox.exception.InvocationError(
"could not find executable %r" % (name,))
# p is not found in virtualenv script/bin dir
if venv:
if not self.is_allowed_external(p):
self.session.report.warning(
"test command found but not installed in testenv\n"
" cmd: %s\n"
" env: %s\n"
"Maybe forgot to specify a dependency?" % (p,
self.envconfig.envdir))
return str(p) # will not be rewritten for reporting
def is_allowed_external(self, p):
tryadd = [""]
if sys.platform == "win32":
tryadd += [os.path.normcase(x)
for x in os.environ['PATHEXT'].split(os.pathsep)]
p = py.path.local(os.path.normcase(str(p)))
for x in self.envconfig.whitelist_externals:
for add in tryadd:
if p.fnmatch(x + add):
return True
return False
def _ispython3(self):
return "python3" in str(self.envconfig.basepython)
def update(self, action=None):
""" return status string for updating actual venv to match configuration.
if status string is empty, all is ok.
"""
if action is None:
action = self.session.newaction(self, "update")
report = self.session.report
name = self.envconfig.envname
rconfig = CreationConfig.readconfig(self.path_config)
if not self.envconfig.recreate and rconfig and \
rconfig.matches(self._getliveconfig()):
action.info("reusing", self.envconfig.envdir)
return
if rconfig is None:
action.setactivity("create", self.envconfig.envdir)
else:
action.setactivity("recreate", self.envconfig.envdir)
try:
self.create(action)
except tox.exception.UnsupportedInterpreter:
return sys.exc_info()[1]
except tox.exception.InterpreterNotFound:
return sys.exc_info()[1]
try:
self.install_deps(action)
except tox.exception.InvocationError:
v = sys.exc_info()[1]
return "could not install deps %s" %(self.envconfig.deps,)
def _getliveconfig(self):
python = self.envconfig._basepython_info.executable
md5 = getdigest(python)
version = tox.__version__
distribute = self.envconfig.distribute
sitepackages = self.envconfig.sitepackages
develop = self.envconfig.develop
deps = []
for dep in self._getresolvedeps():
raw_dep = dep.name
md5 = getdigest(raw_dep)
deps.append((md5, raw_dep))
return CreationConfig(md5, python, version,
distribute, sitepackages, develop, deps)
def _getresolvedeps(self):
l = []
for dep in self.envconfig.deps:
if dep.indexserver is None:
res = self.session._resolve_pkg(dep.name)
if res != dep.name:
dep = dep.__class__(res)
l.append(dep)
return l
def getsupportedinterpreter(self):
return self.envconfig.getsupportedinterpreter()
def create(self, action=None):
#if self.getcommandpath("activate").dirpath().check():
# return
if action is None:
action = self.session.newaction(self, "create")
config_interpreter = self.getsupportedinterpreter()
args = [sys.executable, '-mvirtualenv']
if self.envconfig.distribute:
args.append("--distribute")
else:
args.append("--setuptools")
if self.envconfig.sitepackages:
args.append('--system-site-packages')
# add interpreter explicitly, to prevent using
# default (virtualenv.ini)
args.extend(['--python', str(config_interpreter)])
#if sys.platform == "win32":
# f, path, _ = py.std.imp.find_module("virtualenv")
# f.close()
# args[:1] = [str(config_interpreter), str(path)]
#else:
self.session.make_emptydir(self.path)
basepath = self.path.dirpath()
basepath.ensure(dir=1)
args.append(self.path.basename)
self._pcall(args, venv=False, action=action, cwd=basepath)
self.just_created = True
def finish(self):
self._getliveconfig().writeconfig(self.path_config)
def _needs_reinstall(self, setupdir, action):
setup_py = setupdir.join('setup.py')
setup_cfg = setupdir.join('setup.cfg')
args = [self.envconfig.envpython, str(setup_py), '--name']
output = action.popen(args, cwd=setupdir, redirect=False,
returnout=True)
name = output.strip()
egg_info = setupdir.join('.'.join((name, 'egg-info')))
for conf_file in (setup_py, setup_cfg):
if (not egg_info.check() or (conf_file.check()
and conf_file.mtime() > egg_info.mtime())):
return True
return False
def developpkg(self, setupdir, action):
assert action is not None
if getattr(self, 'just_created', False):
action.setactivity("develop-inst", setupdir)
self.finish()
extraopts = []
else:
if not self._needs_reinstall(setupdir, action):
action.setactivity("develop-inst-noop", setupdir)
return
action.setactivity("develop-inst-nodeps", setupdir)
extraopts = ['--no-deps']
self._install(['-e', setupdir], extraopts=extraopts, action=action)
def installpkg(self, sdistpath, action):
assert action is not None
if getattr(self, 'just_created', False):
action.setactivity("inst", sdistpath)
self.finish()
extraopts = []
else:
action.setactivity("inst-nodeps", sdistpath)
extraopts = ['-U', '--no-deps']
self._install([sdistpath], extraopts=extraopts, action=action)
def install_deps(self, action=None):
if action is None:
action = self.session.newaction(self, "install_deps")
deps = self._getresolvedeps()
if deps:
depinfo = ", ".join(map(str, deps))
action.setactivity("installdeps",
"%s" % depinfo)
self._install(deps, action=action)
def _installopts(self, indexserver):
l = []
if indexserver:
l += ["-i", indexserver]
if self.envconfig.downloadcache:
self.envconfig.downloadcache.ensure(dir=1)
l.append("--download-cache=%s" % self.envconfig.downloadcache)
return l
def run_install_command(self, packages, options=(),
indexserver=None, action=None,
extraenv=None):
argv = self.envconfig.install_command[:]
# use pip-script on win32 to avoid the executable locking
i = argv.index('{packages}')
argv[i:i+1] = packages
if '{opts}' in argv:
i = argv.index('{opts}')
argv[i:i+1] = list(options)
for x in ('PIP_RESPECT_VIRTUALENV', 'PIP_REQUIRE_VIRTUALENV'):
try:
del os.environ[x]
except KeyError:
pass
env = dict(PYTHONIOENCODING='utf_8')
if extraenv is not None:
env.update(extraenv)
self._pcall(argv, cwd=self.envconfig.config.toxinidir,
extraenv=env, action=action)
def _install(self, deps, extraopts=None, action=None):
if not deps:
return
d = {}
l = []
for dep in deps:
if isinstance(dep, (str, py.path.local)):
dep = DepConfig(str(dep), None)
assert isinstance(dep, DepConfig), dep
if dep.indexserver is None:
ixserver = self.envconfig.config.indexserver['default']
else:
ixserver = dep.indexserver
d.setdefault(ixserver, []).append(dep.name)
if ixserver not in l:
l.append(ixserver)
assert ixserver.url is None or isinstance(ixserver.url, str)
for ixserver in l:
if self.envconfig.config.option.sethome:
extraenv = hack_home_env(
homedir=self.envconfig.envtmpdir.join("pseudo-home"),
index_url = ixserver.url)
else:
extraenv = {}
packages = d[ixserver]
options = self._installopts(ixserver.url)
if extraopts:
options.extend(extraopts)
self.run_install_command(packages=packages, options=options,
action=action, extraenv=extraenv)
def _getenv(self, extraenv={}):
env = os.environ.copy()
setenv = self.envconfig.setenv
if setenv:
env.update(setenv)
env.update(extraenv)
return env
def test(self, redirect=False):
action = self.session.newaction(self, "runtests")
with action:
self.status = 0
self.session.make_emptydir(self.envconfig.envtmpdir)
cwd = self.envconfig.changedir
env = self._getenv()
# Display PYTHONHASHSEED to assist with reproducibility.
action.setactivity("runtests", "PYTHONHASHSEED=%r" % env.get('PYTHONHASHSEED'))
for i, argv in enumerate(self.envconfig.commands):
# have to make strings as _pcall changes argv[0] to a local()
# happens if the same environment is invoked twice
message = "commands[%s] | %s" % (i, ' '.join(
[str(x) for x in argv]))
action.setactivity("runtests", message)
try:
self._pcall(argv, cwd=cwd, action=action, redirect=redirect)
except tox.exception.InvocationError:
val = sys.exc_info()[1]
self.session.report.error(str(val))
self.status = "commands failed"
except KeyboardInterrupt:
self.status = "keyboardinterrupt"
self.session.report.error(self.status)
raise
def _pcall(self, args, venv=True, cwd=None, extraenv={},
action=None, redirect=True):
for name in ("VIRTUALENV_PYTHON", "PYTHONDONTWRITEBYTECODE"):
try:
del os.environ[name]
except KeyError:
pass
assert cwd
cwd.ensure(dir=1)
old = self.patchPATH()
try:
args[0] = self.getcommandpath(args[0], venv, cwd)
env = self._getenv(extraenv)
return action.popen(args, cwd=cwd, env=env, redirect=redirect)
finally:
os.environ['PATH'] = old
def patchPATH(self):
oldPATH = os.environ['PATH']
bindir = str(self.envconfig.envbindir)
os.environ['PATH'] = os.pathsep.join([bindir, oldPATH])
self.session.report.verbosity2("setting PATH=%s" % os.environ["PATH"])
return oldPATH
def getdigest(path):
path = py.path.local(path)
if not path.check(file=1):
return "0" * 32
return path.computehash()
def hack_home_env(homedir, index_url=None):
# XXX HACK (this could also live with tox itself, consider)
# if tox uses pip on a package that requires setup_requires
# the index url set with pip is usually not recognized
# because it is setuptools executing very early.
# We therefore run the tox command in an artifical home
# directory and set .pydistutils.cfg and pip.conf files
# accordingly.
if not homedir.check():
homedir.ensure(dir=1)
d = dict(HOME=str(homedir))
if not index_url:
index_url = os.environ.get("TOX_INDEX_URL")
if index_url:
homedir.join(".pydistutils.cfg").write(
"[easy_install]\n"
"index_url = %s\n" % index_url)
d["PIP_INDEX_URL"] = index_url
d["TOX_INDEX_URL"] = index_url
return d
| |
from nose.tools import assert_raises, eq_, ok_
from mock import patch, Mock
from django.contrib.auth.models import User
from django.http import Http404
from django.test import TestCase
from us_ignite.common.tests import utils
from us_ignite.events import views
from us_ignite.events.forms import EventURLFormSet
from us_ignite.events.models import Event
from us_ignite.events.tests import fixtures
from us_ignite.profiles.tests.fixtures import get_user
class TestEventDetailView(TestCase):
@patch('us_ignite.events.views.get_object_or_404')
def test_missing_event_raises_404(self, mock_get):
mock_get.side_effect = Http404
request = utils.get_request(
'get', '/event/abc/', user=utils.get_anon_mock())
assert_raises(Http404, views.event_detail, request, 'abc')
mock_get.assert_called_once_with(
Event, slug__exact='abc')
@patch('us_ignite.events.views.get_object_or_404')
def test_not_visible_event_raises_404(self, mock_get):
mock_instance = Mock(spec=Event)()
mock_instance.is_visible_by.return_value = False
mock_get.return_value = mock_instance
request = utils.get_request(
'get', '/event/abc/', user=utils.get_anon_mock())
assert_raises(Http404, views.event_detail, request, 'abc')
mock_get.assert_called_once_with(
Event, slug__exact='abc')
mock_instance.is_visible_by.assert_called_once()
@patch('us_ignite.events.views.get_object_or_404')
def test_get_request_is_valid(self, mock_get):
mock_instance = Mock(spec=Event)()
mock_instance.audiences.all.return_value = []
mock_instance.is_visible_by.return_value = True
mock_instance.hubs.all.return_value = []
mock_get.return_value = mock_instance
request = utils.get_request(
'get', '/event/abc/', user=utils.get_anon_mock())
response = views.event_detail(request, 'abc')
mock_get.assert_called_once_with(
Event, slug__exact='abc')
mock_instance.is_visible_by.assert_called_once_with(request.user)
eq_(response.status_code, 200)
eq_(sorted(response.context_data.keys()),
sorted(['hub_list', 'is_owner', 'object',
'audience_list', 'url_list']))
eq_(response.template_name, 'events/object_detail.html')
class TestEventDetailICSView(TestCase):
@patch('us_ignite.events.views.get_object_or_404')
def test_missing_event_raises_404(self, mock_get):
mock_get.side_effect = Http404
request = utils.get_request(
'get', '/event/abc/ics/', user=utils.get_anon_mock())
assert_raises(Http404, views.event_detail_ics, request, 'abc')
mock_get.assert_called_once_with(Event.published, slug__exact='abc')
def test_valid_event_returns_calendar(self):
user = get_user('ignite-user')
event = fixtures.get_event(
user=user, slug='abc', status=Event.PUBLISHED)
request = utils.get_request(
'get', '/event/abc/ics/', user=utils.get_anon_mock())
response = views.event_detail_ics(request, 'abc')
eq_(response.status_code, 200)
ok_('attachment; filename="' in response['Content-Disposition'])
eq_(response['Content-Type'], 'text/calendar')
ok_(response.content)
class TestEventAddView(TestCase):
def _tear_down(self):
for model in [Event, User]:
model.objects.all().delete()
def test_add_event_requires_auth(self):
request = utils.get_request(
'get', '/event/add/', user=utils.get_anon_mock())
response = views.event_add(request)
eq_(response.status_code, 302)
eq_(response['Location'], utils.get_login_url('/event/add/'))
def test_event_add_detail(self):
request = utils.get_request(
'get', '/event/add/', user=utils.get_user_mock())
response = views.event_add(request)
eq_(response.status_code, 200)
eq_(response.template_name, 'events/object_add.html')
eq_(sorted(response.context_data.keys()), ['form', 'formset', ])
def invalid_payload_fails(self):
user = get_user('ignite-user')
request = utils.get_request(
'post', '/event/add/', data={}, user=user)
response = views.event_add(request)
eq_(response.status_code, 200)
ok_(response.context_data['form'].errors)
self._tear_down()
def test_valid_payload_succeeds(self):
user = get_user('ignite-user')
data = {
'name': 'Gigabit community',
'status': Event.DRAFT,
'start_datetime': '2013-12-14 14:30:59',
'address': 'London UK',
'scope': 1,
'description': 'Gigabit event',
'timezone': 'US/Eastern',
}
formset_data = utils.get_inline_payload(EventURLFormSet)
data.update(formset_data)
request = utils.get_request(
'post', '/event/add/', data=data, user=user)
request._messages = utils.TestMessagesBackend(request)
response = views.event_add(request)
eq_(response.status_code, 302)
event = Event.objects.get(name='Gigabit community')
eq_(response['Location'], event.get_absolute_url())
self._tear_down()
class TestEventListView(TestCase):
@patch('us_ignite.events.models.Event.published.filter')
def test_event_list_request_is_successful(self, mock_filter):
mock_filter.return_value = []
request = utils.get_request(
'get', '/events/', user=utils.get_anon_mock())
response = views.event_list(request)
eq_(response.status_code, 200)
eq_(response.template_name, 'events/object_list.html')
eq_(sorted(response.context_data.keys()),
sorted(['page', 'timeframe', 'featured_list']))
mock_filter.assert_called_once()
class TestEventEditView(TestCase):
def _tear_down(self):
for model in [Event, User]:
model.objects.all().delete()
def test_add_event_requires_auth(self):
request = utils.get_request(
'get', '/event/foo/edit/', user=utils.get_anon_mock())
response = views.event_edit(request, 'foo')
eq_(response.status_code, 302)
eq_(response['Location'], utils.get_login_url('/event/foo/edit/'))
def test_event_edit_requires_owner(self):
user = get_user('ignite-user')
event = fixtures.get_event(
user=user, slug='foo', status=Event.PUBLISHED)
request = utils.get_request(
'get', event.get_absolute_url(), user=get_user('other'))
assert_raises(Http404, views.event_edit, request, 'foo')
self._tear_down()
def test_event_detail_is_successful(self):
user = get_user('ignite-user')
event = fixtures.get_event(
user=user, slug='foo', status=Event.PUBLISHED)
request = utils.get_request('get', event.get_absolute_url(), user=user)
response = views.event_edit(request, 'foo')
eq_(response.status_code, 200)
eq_(response.template_name, 'events/object_edit.html')
eq_(sorted(response.context_data.keys()),
['form', 'formset', 'object'])
def test_event_invalid_payload_fails(self):
user = get_user('ignite-user')
event = fixtures.get_event(
user=user, slug='foo', status=Event.PUBLISHED)
request = utils.get_request(
'post', event.get_absolute_url(), data={}, user=user)
response = views.event_edit(request, 'foo')
eq_(response.status_code, 200)
eq_(response.template_name, 'events/object_edit.html')
ok_(response.context_data['form'].errors)
def test_valid_payload_is_saved(self):
user = get_user('ignite-user')
event = fixtures.get_event(
user=user, slug='foo', status=Event.PUBLISHED)
data = {
'name': 'Gigabit community',
'status': Event.DRAFT,
'start_datetime': '2013-12-14 14:30:59',
'address': 'London UK',
'scope': 1,
'description': 'Gigabit event',
'timezone': 'US/Eastern',
}
formset_data = utils.get_inline_payload(EventURLFormSet)
data.update(formset_data)
request = utils.get_request(
'post', event.get_absolute_url(), data=data, user=user)
request._messages = utils.TestMessagesBackend(request)
response = views.event_edit(request, 'foo')
eq_(response.status_code, 302)
eq_(response['Location'], event.get_absolute_url())
| |
import sys
from _pytest.doctest import DoctestItem, DoctestModule, DoctestTextfile
import py
import pytest
class TestDoctests:
def test_collect_testtextfile(self, testdir):
w = testdir.maketxtfile(whatever="")
checkfile = testdir.maketxtfile(test_something="""
alskdjalsdk
>>> i = 5
>>> i-1
4
""")
for x in (testdir.tmpdir, checkfile):
#print "checking that %s returns custom items" % (x,)
items, reprec = testdir.inline_genitems(x)
assert len(items) == 1
assert isinstance(items[0], DoctestTextfile)
items, reprec = testdir.inline_genitems(w)
assert len(items) == 1
def test_collect_module_empty(self, testdir):
path = testdir.makepyfile(whatever="#")
for p in (path, testdir.tmpdir):
items, reprec = testdir.inline_genitems(p,
'--doctest-modules')
assert len(items) == 0
def test_collect_module_single_modulelevel_doctest(self, testdir):
path = testdir.makepyfile(whatever='""">>> pass"""')
for p in (path, testdir.tmpdir):
items, reprec = testdir.inline_genitems(p,
'--doctest-modules')
assert len(items) == 1
assert isinstance(items[0], DoctestItem)
assert isinstance(items[0].parent, DoctestModule)
def test_collect_module_two_doctest_one_modulelevel(self, testdir):
path = testdir.makepyfile(whatever="""
'>>> x = None'
def my_func():
">>> magic = 42 "
""")
for p in (path, testdir.tmpdir):
items, reprec = testdir.inline_genitems(p,
'--doctest-modules')
assert len(items) == 2
assert isinstance(items[0], DoctestItem)
assert isinstance(items[1], DoctestItem)
assert isinstance(items[0].parent, DoctestModule)
assert items[0].parent is items[1].parent
def test_collect_module_two_doctest_no_modulelevel(self, testdir):
path = testdir.makepyfile(whatever="""
'# Empty'
def my_func():
">>> magic = 42 "
def unuseful():
'''
# This is a function
# >>> # it doesn't have any doctest
'''
def another():
'''
# This is another function
>>> import os # this one does have a doctest
'''
""")
for p in (path, testdir.tmpdir):
items, reprec = testdir.inline_genitems(p,
'--doctest-modules')
assert len(items) == 2
assert isinstance(items[0], DoctestItem)
assert isinstance(items[1], DoctestItem)
assert isinstance(items[0].parent, DoctestModule)
assert items[0].parent is items[1].parent
def test_simple_doctestfile(self, testdir):
p = testdir.maketxtfile(test_doc="""
>>> x = 1
>>> x == 1
False
""")
reprec = testdir.inline_run(p, )
reprec.assertoutcome(failed=1)
def test_new_pattern(self, testdir):
p = testdir.maketxtfile(xdoc ="""
>>> x = 1
>>> x == 1
False
""")
reprec = testdir.inline_run(p, "--doctest-glob=x*.txt")
reprec.assertoutcome(failed=1)
def test_doctest_unexpected_exception(self, testdir):
testdir.maketxtfile("""
>>> i = 0
>>> 0 / i
2
""")
result = testdir.runpytest("--doctest-modules")
result.stdout.fnmatch_lines([
"*unexpected_exception*",
"*>>> i = 0*",
"*>>> 0 / i*",
"*UNEXPECTED*ZeroDivision*",
])
def test_doctest_linedata_missing(self, testdir):
testdir.tmpdir.join('hello.py').write(py.code.Source("""
class Fun(object):
@property
def test(self):
'''
>>> a = 1
>>> 1/0
'''
"""))
result = testdir.runpytest("--doctest-modules")
result.stdout.fnmatch_lines([
"*hello*",
"*EXAMPLE LOCATION UNKNOWN, not showing all tests of that example*",
"*1/0*",
"*UNEXPECTED*ZeroDivision*",
"*1 failed*",
])
def test_doctest_unex_importerror(self, testdir):
testdir.tmpdir.join("hello.py").write(py.code.Source("""
import asdalsdkjaslkdjasd
"""))
testdir.maketxtfile("""
>>> import hello
>>>
""")
result = testdir.runpytest("--doctest-modules")
result.stdout.fnmatch_lines([
"*>>> import hello",
"*UNEXPECTED*ImportError*",
"*import asdals*",
])
def test_doctestmodule(self, testdir):
p = testdir.makepyfile("""
'''
>>> x = 1
>>> x == 1
False
'''
""")
reprec = testdir.inline_run(p, "--doctest-modules")
reprec.assertoutcome(failed=1)
def test_doctestmodule_external_and_issue116(self, testdir):
p = testdir.mkpydir("hello")
p.join("__init__.py").write(py.code.Source("""
def somefunc():
'''
>>> i = 0
>>> i + 1
2
'''
"""))
result = testdir.runpytest(p, "--doctest-modules")
result.stdout.fnmatch_lines([
'004 *>>> i = 0',
'005 *>>> i + 1',
'*Expected:',
"* 2",
"*Got:",
"* 1",
"*:5: DocTestFailure"
])
def test_txtfile_failing(self, testdir):
p = testdir.maketxtfile("""
>>> i = 0
>>> i + 1
2
""")
result = testdir.runpytest(p, "-s")
result.stdout.fnmatch_lines([
'001 >>> i = 0',
'002 >>> i + 1',
'Expected:',
" 2",
"Got:",
" 1",
"*test_txtfile_failing.txt:2: DocTestFailure"
])
def test_txtfile_with_fixtures(self, testdir):
p = testdir.maketxtfile("""
>>> dir = getfixture('tmpdir')
>>> type(dir).__name__
'LocalPath'
""")
reprec = testdir.inline_run(p, )
reprec.assertoutcome(passed=1)
def test_txtfile_with_usefixtures_in_ini(self, testdir):
testdir.makeini("""
[pytest]
usefixtures = myfixture
""")
testdir.makeconftest("""
import pytest
@pytest.fixture
def myfixture(monkeypatch):
monkeypatch.setenv("HELLO", "WORLD")
""")
p = testdir.maketxtfile("""
>>> import os
>>> os.environ["HELLO"]
'WORLD'
""")
reprec = testdir.inline_run(p, )
reprec.assertoutcome(passed=1)
def test_doctestmodule_with_fixtures(self, testdir):
p = testdir.makepyfile("""
'''
>>> dir = getfixture('tmpdir')
>>> type(dir).__name__
'LocalPath'
'''
""")
reprec = testdir.inline_run(p, "--doctest-modules")
reprec.assertoutcome(passed=1)
def test_doctestmodule_three_tests(self, testdir):
p = testdir.makepyfile("""
'''
>>> dir = getfixture('tmpdir')
>>> type(dir).__name__
'LocalPath'
'''
def my_func():
'''
>>> magic = 42
>>> magic - 42
0
'''
def unuseful():
pass
def another():
'''
>>> import os
>>> os is os
True
'''
""")
reprec = testdir.inline_run(p, "--doctest-modules")
reprec.assertoutcome(passed=3)
def test_doctestmodule_two_tests_one_fail(self, testdir):
p = testdir.makepyfile("""
class MyClass:
def bad_meth(self):
'''
>>> magic = 42
>>> magic
0
'''
def nice_meth(self):
'''
>>> magic = 42
>>> magic - 42
0
'''
""")
reprec = testdir.inline_run(p, "--doctest-modules")
reprec.assertoutcome(failed=1, passed=1)
def test_ignored_whitespace(self, testdir):
testdir.makeini("""
[pytest]
doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE
""")
p = testdir.makepyfile("""
class MyClass:
'''
>>> a = "foo "
>>> print(a)
foo
'''
pass
""")
reprec = testdir.inline_run(p, "--doctest-modules")
reprec.assertoutcome(passed=1)
def test_non_ignored_whitespace(self, testdir):
testdir.makeini("""
[pytest]
doctest_optionflags = ELLIPSIS
""")
p = testdir.makepyfile("""
class MyClass:
'''
>>> a = "foo "
>>> print(a)
foo
'''
pass
""")
reprec = testdir.inline_run(p, "--doctest-modules")
reprec.assertoutcome(failed=1, passed=0)
def test_ignored_whitespace_glob(self, testdir):
testdir.makeini("""
[pytest]
doctest_optionflags = ELLIPSIS NORMALIZE_WHITESPACE
""")
p = testdir.maketxtfile(xdoc="""
>>> a = "foo "
>>> print(a)
foo
""")
reprec = testdir.inline_run(p, "--doctest-glob=x*.txt")
reprec.assertoutcome(passed=1)
def test_non_ignored_whitespace_glob(self, testdir):
testdir.makeini("""
[pytest]
doctest_optionflags = ELLIPSIS
""")
p = testdir.maketxtfile(xdoc="""
>>> a = "foo "
>>> print(a)
foo
""")
reprec = testdir.inline_run(p, "--doctest-glob=x*.txt")
reprec.assertoutcome(failed=1, passed=0)
def test_ignore_import_errors_on_doctest(self, testdir):
p = testdir.makepyfile("""
import asdf
def add_one(x):
'''
>>> add_one(1)
2
'''
return x + 1
""")
reprec = testdir.inline_run(p, "--doctest-modules",
"--doctest-ignore-import-errors")
reprec.assertoutcome(skipped=1, failed=1, passed=0)
def test_junit_report_for_doctest(self, testdir):
"""
#713: Fix --junit-xml option when used with --doctest-modules.
"""
p = testdir.makepyfile("""
def foo():
'''
>>> 1 + 1
3
'''
pass
""")
reprec = testdir.inline_run(p, "--doctest-modules",
"--junit-xml=junit.xml")
reprec.assertoutcome(failed=1)
@pytest.mark.parametrize('config_mode', ['ini', 'comment'])
def test_allow_unicode(self, testdir, config_mode):
"""Test that doctests which output unicode work in all python versions
tested by pytest when the ALLOW_UNICODE option is used (either in
the ini file or by an inline comment).
"""
if config_mode == 'ini':
testdir.makeini('''
[pytest]
doctest_optionflags = ALLOW_UNICODE
''')
comment = ''
else:
comment = '#doctest: +ALLOW_UNICODE'
testdir.maketxtfile(test_doc="""
>>> b'12'.decode('ascii') {comment}
'12'
""".format(comment=comment))
testdir.makepyfile(foo="""
def foo():
'''
>>> b'12'.decode('ascii') {comment}
'12'
'''
""".format(comment=comment))
reprec = testdir.inline_run("--doctest-modules")
reprec.assertoutcome(passed=2)
def test_unicode_string(self, testdir):
"""Test that doctests which output unicode fail in Python 2 when
the ALLOW_UNICODE option is not used. The same test should pass
in Python 3.
"""
testdir.maketxtfile(test_doc="""
>>> b'12'.decode('ascii')
'12'
""")
reprec = testdir.inline_run()
passed = int(sys.version_info[0] >= 3)
reprec.assertoutcome(passed=passed, failed=int(not passed))
class TestDoctestSkips:
"""
If all examples in a doctest are skipped due to the SKIP option, then
the tests should be SKIPPED rather than PASSED. (#957)
"""
@pytest.fixture(params=['text', 'module'])
def makedoctest(self, testdir, request):
def makeit(doctest):
mode = request.param
if mode == 'text':
testdir.maketxtfile(doctest)
else:
assert mode == 'module'
testdir.makepyfile('"""\n%s"""' % doctest)
return makeit
def test_one_skipped(self, testdir, makedoctest):
makedoctest("""
>>> 1 + 1 # doctest: +SKIP
2
>>> 2 + 2
4
""")
reprec = testdir.inline_run("--doctest-modules")
reprec.assertoutcome(passed=1)
def test_one_skipped_failed(self, testdir, makedoctest):
makedoctest("""
>>> 1 + 1 # doctest: +SKIP
2
>>> 2 + 2
200
""")
reprec = testdir.inline_run("--doctest-modules")
reprec.assertoutcome(failed=1)
def test_all_skipped(self, testdir, makedoctest):
makedoctest("""
>>> 1 + 1 # doctest: +SKIP
2
>>> 2 + 2 # doctest: +SKIP
200
""")
reprec = testdir.inline_run("--doctest-modules")
reprec.assertoutcome(skipped=1)
class TestDoctestAutoUseFixtures:
SCOPES = ['module', 'session', 'class', 'function']
def test_doctest_module_session_fixture(self, testdir):
"""Test that session fixtures are initialized for doctest modules (#768)
"""
# session fixture which changes some global data, which will
# be accessed by doctests in a module
testdir.makeconftest("""
import pytest
import sys
@pytest.yield_fixture(autouse=True, scope='session')
def myfixture():
assert not hasattr(sys, 'pytest_session_data')
sys.pytest_session_data = 1
yield
del sys.pytest_session_data
""")
testdir.makepyfile(foo="""
import sys
def foo():
'''
>>> assert sys.pytest_session_data == 1
'''
def bar():
'''
>>> assert sys.pytest_session_data == 1
'''
""")
result = testdir.runpytest("--doctest-modules")
result.stdout.fnmatch_lines('*2 passed*')
@pytest.mark.parametrize('scope', SCOPES)
@pytest.mark.parametrize('enable_doctest', [True, False])
def test_fixture_scopes(self, testdir, scope, enable_doctest):
"""Test that auto-use fixtures work properly with doctest modules.
See #1057 and #1100.
"""
testdir.makeconftest('''
import pytest
@pytest.fixture(autouse=True, scope="{scope}")
def auto(request):
return 99
'''.format(scope=scope))
testdir.makepyfile(test_1='''
def test_foo():
"""
>>> getfixture('auto') + 1
100
"""
def test_bar():
assert 1
''')
params = ('--doctest-modules',) if enable_doctest else ()
passes = 3 if enable_doctest else 2
result = testdir.runpytest(*params)
result.stdout.fnmatch_lines(['*=== %d passed in *' % passes])
@pytest.mark.parametrize('scope', SCOPES)
@pytest.mark.parametrize('autouse', [True, False])
@pytest.mark.parametrize('use_fixture_in_doctest', [True, False])
def test_fixture_module_doctest_scopes(self, testdir, scope, autouse,
use_fixture_in_doctest):
"""Test that auto-use fixtures work properly with doctest files.
See #1057 and #1100.
"""
testdir.makeconftest('''
import pytest
@pytest.fixture(autouse={autouse}, scope="{scope}")
def auto(request):
return 99
'''.format(scope=scope, autouse=autouse))
if use_fixture_in_doctest:
testdir.maketxtfile(test_doc="""
>>> getfixture('auto')
99
""")
else:
testdir.maketxtfile(test_doc="""
>>> 1 + 1
2
""")
result = testdir.runpytest('--doctest-modules')
assert 'FAILURES' not in str(result.stdout.str())
result.stdout.fnmatch_lines(['*=== 1 passed in *'])
@pytest.mark.parametrize('scope', SCOPES)
def test_auto_use_request_attributes(self, testdir, scope):
"""Check that all attributes of a request in an autouse fixture
behave as expected when requested for a doctest item.
"""
testdir.makeconftest('''
import pytest
@pytest.fixture(autouse=True, scope="{scope}")
def auto(request):
if "{scope}" == 'module':
assert request.module is None
if "{scope}" == 'class':
assert request.cls is None
if "{scope}" == 'function':
assert request.function is None
return 99
'''.format(scope=scope))
testdir.maketxtfile(test_doc="""
>>> 1 + 1
2
""")
result = testdir.runpytest('--doctest-modules')
assert 'FAILURES' not in str(result.stdout.str())
result.stdout.fnmatch_lines(['*=== 1 passed in *'])
| |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
if sys.version >= '3':
basestring = str
from pyspark.rdd import RDD, ignore_unicode_prefix
from pyspark.mllib.common import callMLlibFunc, JavaModelWrapper
from pyspark.mllib.linalg import Matrix, _convert_to_vector
from pyspark.mllib.regression import LabeledPoint
from pyspark.mllib.stat.test import ChiSqTestResult, KolmogorovSmirnovTestResult
__all__ = ['MultivariateStatisticalSummary', 'Statistics']
class MultivariateStatisticalSummary(JavaModelWrapper):
"""
Trait for multivariate statistical summary of a data matrix.
"""
def mean(self):
return self.call("mean").toArray()
def variance(self):
return self.call("variance").toArray()
def count(self):
return int(self.call("count"))
def numNonzeros(self):
return self.call("numNonzeros").toArray()
def max(self):
return self.call("max").toArray()
def min(self):
return self.call("min").toArray()
def normL1(self):
return self.call("normL1").toArray()
def normL2(self):
return self.call("normL2").toArray()
class Statistics(object):
@staticmethod
def colStats(rdd):
"""
Computes column-wise summary statistics for the input RDD[Vector].
:param rdd: an RDD[Vector] for which column-wise summary statistics
are to be computed.
:return: :class:`MultivariateStatisticalSummary` object containing
column-wise summary statistics.
>>> from pyspark.mllib.linalg import Vectors
>>> rdd = sc.parallelize([Vectors.dense([2, 0, 0, -2]),
... Vectors.dense([4, 5, 0, 3]),
... Vectors.dense([6, 7, 0, 8])])
>>> cStats = Statistics.colStats(rdd)
>>> cStats.mean()
array([ 4., 4., 0., 3.])
>>> cStats.variance()
array([ 4., 13., 0., 25.])
>>> cStats.count()
3
>>> cStats.numNonzeros()
array([ 3., 2., 0., 3.])
>>> cStats.max()
array([ 6., 7., 0., 8.])
>>> cStats.min()
array([ 2., 0., 0., -2.])
"""
cStats = callMLlibFunc("colStats", rdd.map(_convert_to_vector))
return MultivariateStatisticalSummary(cStats)
@staticmethod
def corr(x, y=None, method=None):
"""
Compute the correlation (matrix) for the input RDD(s) using the
specified method.
Methods currently supported: I{pearson (default), spearman}.
If a single RDD of Vectors is passed in, a correlation matrix
comparing the columns in the input RDD is returned. Use C{method=}
to specify the method to be used for single RDD inout.
If two RDDs of floats are passed in, a single float is returned.
:param x: an RDD of vector for which the correlation matrix is to be computed,
or an RDD of float of the same cardinality as y when y is specified.
:param y: an RDD of float of the same cardinality as x.
:param method: String specifying the method to use for computing correlation.
Supported: `pearson` (default), `spearman`
:return: Correlation matrix comparing columns in x.
>>> x = sc.parallelize([1.0, 0.0, -2.0], 2)
>>> y = sc.parallelize([4.0, 5.0, 3.0], 2)
>>> zeros = sc.parallelize([0.0, 0.0, 0.0], 2)
>>> abs(Statistics.corr(x, y) - 0.6546537) < 1e-7
True
>>> Statistics.corr(x, y) == Statistics.corr(x, y, "pearson")
True
>>> Statistics.corr(x, y, "spearman")
0.5
>>> from math import isnan
>>> isnan(Statistics.corr(x, zeros))
True
>>> from pyspark.mllib.linalg import Vectors
>>> rdd = sc.parallelize([Vectors.dense([1, 0, 0, -2]), Vectors.dense([4, 5, 0, 3]),
... Vectors.dense([6, 7, 0, 8]), Vectors.dense([9, 0, 0, 1])])
>>> pearsonCorr = Statistics.corr(rdd)
>>> print(str(pearsonCorr).replace('nan', 'NaN'))
[[ 1. 0.05564149 NaN 0.40047142]
[ 0.05564149 1. NaN 0.91359586]
[ NaN NaN 1. NaN]
[ 0.40047142 0.91359586 NaN 1. ]]
>>> spearmanCorr = Statistics.corr(rdd, method="spearman")
>>> print(str(spearmanCorr).replace('nan', 'NaN'))
[[ 1. 0.10540926 NaN 0.4 ]
[ 0.10540926 1. NaN 0.9486833 ]
[ NaN NaN 1. NaN]
[ 0.4 0.9486833 NaN 1. ]]
>>> try:
... Statistics.corr(rdd, "spearman")
... print("Method name as second argument without 'method=' shouldn't be allowed.")
... except TypeError:
... pass
"""
# Check inputs to determine whether a single value or a matrix is needed for output.
# Since it's legal for users to use the method name as the second argument, we need to
# check if y is used to specify the method name instead.
if type(y) == str:
raise TypeError("Use 'method=' to specify method name.")
if not y:
return callMLlibFunc("corr", x.map(_convert_to_vector), method).toArray()
else:
return callMLlibFunc("corr", x.map(float), y.map(float), method)
@staticmethod
@ignore_unicode_prefix
def chiSqTest(observed, expected=None):
"""
If `observed` is Vector, conduct Pearson's chi-squared goodness
of fit test of the observed data against the expected distribution,
or againt the uniform distribution (by default), with each category
having an expected frequency of `1 / len(observed)`.
If `observed` is matrix, conduct Pearson's independence test on the
input contingency matrix, which cannot contain negative entries or
columns or rows that sum up to 0.
If `observed` is an RDD of LabeledPoint, conduct Pearson's independence
test for every feature against the label across the input RDD.
For each feature, the (feature, label) pairs are converted into a
contingency matrix for which the chi-squared statistic is computed.
All label and feature values must be categorical.
.. note:: `observed` cannot contain negative values
:param observed: it could be a vector containing the observed categorical
counts/relative frequencies, or the contingency matrix
(containing either counts or relative frequencies),
or an RDD of LabeledPoint containing the labeled dataset
with categorical features. Real-valued features will be
treated as categorical for each distinct value.
:param expected: Vector containing the expected categorical counts/relative
frequencies. `expected` is rescaled if the `expected` sum
differs from the `observed` sum.
:return: ChiSquaredTest object containing the test statistic, degrees
of freedom, p-value, the method used, and the null hypothesis.
>>> from pyspark.mllib.linalg import Vectors, Matrices
>>> observed = Vectors.dense([4, 6, 5])
>>> pearson = Statistics.chiSqTest(observed)
>>> print(pearson.statistic)
0.4
>>> pearson.degreesOfFreedom
2
>>> print(round(pearson.pValue, 4))
0.8187
>>> pearson.method
u'pearson'
>>> pearson.nullHypothesis
u'observed follows the same distribution as expected.'
>>> observed = Vectors.dense([21, 38, 43, 80])
>>> expected = Vectors.dense([3, 5, 7, 20])
>>> pearson = Statistics.chiSqTest(observed, expected)
>>> print(round(pearson.pValue, 4))
0.0027
>>> data = [40.0, 24.0, 29.0, 56.0, 32.0, 42.0, 31.0, 10.0, 0.0, 30.0, 15.0, 12.0]
>>> chi = Statistics.chiSqTest(Matrices.dense(3, 4, data))
>>> print(round(chi.statistic, 4))
21.9958
>>> data = [LabeledPoint(0.0, Vectors.dense([0.5, 10.0])),
... LabeledPoint(0.0, Vectors.dense([1.5, 20.0])),
... LabeledPoint(1.0, Vectors.dense([1.5, 30.0])),
... LabeledPoint(0.0, Vectors.dense([3.5, 30.0])),
... LabeledPoint(0.0, Vectors.dense([3.5, 40.0])),
... LabeledPoint(1.0, Vectors.dense([3.5, 40.0])),]
>>> rdd = sc.parallelize(data, 4)
>>> chi = Statistics.chiSqTest(rdd)
>>> print(chi[0].statistic)
0.75
>>> print(chi[1].statistic)
1.5
"""
if isinstance(observed, RDD):
if not isinstance(observed.first(), LabeledPoint):
raise ValueError("observed should be an RDD of LabeledPoint")
jmodels = callMLlibFunc("chiSqTest", observed)
return [ChiSqTestResult(m) for m in jmodels]
if isinstance(observed, Matrix):
jmodel = callMLlibFunc("chiSqTest", observed)
else:
if expected and len(expected) != len(observed):
raise ValueError("`expected` should have same length with `observed`")
jmodel = callMLlibFunc("chiSqTest", _convert_to_vector(observed), expected)
return ChiSqTestResult(jmodel)
@staticmethod
@ignore_unicode_prefix
def kolmogorovSmirnovTest(data, distName="norm", *params):
"""
Performs the Kolmogorov-Smirnov (KS) test for data sampled from
a continuous distribution. It tests the null hypothesis that
the data is generated from a particular distribution.
The given data is sorted and the Empirical Cumulative
Distribution Function (ECDF) is calculated
which for a given point is the number of points having a CDF
value lesser than it divided by the total number of points.
Since the data is sorted, this is a step function
that rises by (1 / length of data) for every ordered point.
The KS statistic gives us the maximum distance between the
ECDF and the CDF. Intuitively if this statistic is large, the
probabilty that the null hypothesis is true becomes small.
For specific details of the implementation, please have a look
at the Scala documentation.
:param data: RDD, samples from the data
:param distName: string, currently only "norm" is supported.
(Normal distribution) to calculate the
theoretical distribution of the data.
:param params: additional values which need to be provided for
a certain distribution.
If not provided, the default values are used.
:return: KolmogorovSmirnovTestResult object containing the test
statistic, degrees of freedom, p-value,
the method used, and the null hypothesis.
>>> kstest = Statistics.kolmogorovSmirnovTest
>>> data = sc.parallelize([-1.0, 0.0, 1.0])
>>> ksmodel = kstest(data, "norm")
>>> print(round(ksmodel.pValue, 3))
1.0
>>> print(round(ksmodel.statistic, 3))
0.175
>>> ksmodel.nullHypothesis
u'Sample follows theoretical distribution'
>>> data = sc.parallelize([2.0, 3.0, 4.0])
>>> ksmodel = kstest(data, "norm", 3.0, 1.0)
>>> print(round(ksmodel.pValue, 3))
1.0
>>> print(round(ksmodel.statistic, 3))
0.175
"""
if not isinstance(data, RDD):
raise TypeError("data should be an RDD, got %s." % type(data))
if not isinstance(distName, basestring):
raise TypeError("distName should be a string, got %s." % type(distName))
params = [float(param) for param in params]
return KolmogorovSmirnovTestResult(
callMLlibFunc("kolmogorovSmirnovTest", data, distName, params))
def _test():
import doctest
import numpy
from pyspark.sql import SparkSession
try:
# Numpy 1.14+ changed it's string format.
numpy.set_printoptions(legacy='1.13')
except TypeError:
pass
globs = globals().copy()
spark = SparkSession.builder\
.master("local[4]")\
.appName("mllib.stat.statistics tests")\
.getOrCreate()
globs['sc'] = spark.sparkContext
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
_test()
| |
from __future__ import absolute_import, unicode_literals
import os
from django import VERSION as DJANGO_VERSION
from django.utils.translation import ugettext_lazy as _
######################
# MEZZANINE SETTINGS #
######################
# The following settings are already defined with default values in
# the ``defaults.py`` module within each of Mezzanine's apps, but are
# common enough to be put here, commented out, for conveniently
# overriding. Please consult the settings documentation for a full list
# of settings Mezzanine implements:
# http://mezzanine.jupo.org/docs/configuration.html#default-settings
# Controls the ordering and grouping of the admin menu.
#
# ADMIN_MENU_ORDER = (
# ("Content", ("pages.Page", "blog.BlogPost",
# "generic.ThreadedComment", (_("Media Library"), "media-library"),)),
# ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")),
# ("Users", ("auth.User", "auth.Group",)),
# )
# A three item sequence, each containing a sequence of template tags
# used to render the admin dashboard.
#
# DASHBOARD_TAGS = (
# ("blog_tags.quick_blog", "mezzanine_tags.app_list"),
# ("comment_tags.recent_comments",),
# ("mezzanine_tags.recent_actions",),
# )
# A sequence of templates used by the ``page_menu`` template tag. Each
# item in the sequence is a three item sequence, containing a unique ID
# for the template, a label for the template, and the template path.
# These templates are then available for selection when editing which
# menus a page should appear in. Note that if a menu template is used
# that doesn't appear in this setting, all pages will appear in it.
# PAGE_MENU_TEMPLATES = (
# (1, _("Top navigation bar"), "pages/menus/dropdown.html"),
# (2, _("Left-hand tree"), "pages/menus/tree.html"),
# (3, _("Footer"), "pages/menus/footer.html"),
# )
# A sequence of fields that will be injected into Mezzanine's (or any
# library's) models. Each item in the sequence is a four item sequence.
# The first two items are the dotted path to the model and its field
# name to be added, and the dotted path to the field class to use for
# the field. The third and fourth items are a sequence of positional
# args and a dictionary of keyword args, to use when creating the
# field instance. When specifying the field class, the path
# ``django.models.db.`` can be omitted for regular Django model fields.
#
# EXTRA_MODEL_FIELDS = (
# (
# # Dotted path to field.
# "mezzanine.blog.models.BlogPost.image",
# # Dotted path to field class.
# "somelib.fields.ImageField",
# # Positional args for field class.
# (_("Image"),),
# # Keyword args for field class.
# {"blank": True, "upload_to": "blog"},
# ),
# # Example of adding a field to *all* of Mezzanine's content types:
# (
# "mezzanine.pages.models.Page.another_field",
# "IntegerField", # 'django.db.models.' is implied if path is omitted.
# (_("Another name"),),
# {"blank": True, "default": 1},
# ),
# )
# Setting to turn on featured images for blog posts. Defaults to False.
#
# BLOG_USE_FEATURED_IMAGE = True
# If True, the django-modeltranslation will be added to the
# INSTALLED_APPS setting.
USE_MODELTRANSLATION = False
########################
# MAIN DJANGO SETTINGS #
########################
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en"
# Supported languages
LANGUAGES = (
('en', _('English')),
)
# A boolean that turns on/off debug mode. When set to ``True``, stack traces
# are displayed for error pages. Should always be set to ``False`` in
# production. Best set to ``True`` in local_settings.py
DEBUG = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# The numeric mode to set newly-uploaded files to. The value should be
# a mode you'd pass directly to os.chmod.
FILE_UPLOAD_PERMISSIONS = 0o644
#############
# DATABASES #
#############
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.",
# DB name or path to database file if using sqlite3.
"NAME": "",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#########
# PATHS #
#########
# Full filesystem path to the project.
PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_APP = os.path.basename(PROJECT_APP_PATH)
PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH)
# Every cache key will get prefixed with this value - here we set it to
# the name of the directory the project is in to try and use something
# project specific.
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = "/static/"
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = STATIC_URL + "media/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/"))
# Package/module name to import the root urlpatterns from for the project.
ROOT_URLCONF = "%s.urls" % PROJECT_APP
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [
os.path.join(PROJECT_ROOT, "templates")
],
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.static",
"django.template.context_processors.media",
"django.template.context_processors.request",
"django.template.context_processors.tz",
"mezzanine.conf.context_processors.settings",
"mezzanine.pages.context_processors.page",
],
"builtins": [
"mezzanine.template.loader_tags",
],
"loaders": [
"mezzanine.template.loaders.host_themes.Loader",
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
]
},
},
]
if DJANGO_VERSION < (1, 9):
del TEMPLATES[0]["OPTIONS"]["builtins"]
################
# APPLICATIONS #
################
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.messages",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.pages",
"mezzanine.blog",
"mezzanine.forms",
"mezzanine.galleries",
"mezzanine.twitter",
# "mezzanine.accounts",
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware',
# Uncomment if using internationalisation or localisation
# 'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
)
if DJANGO_VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
del MIDDLEWARE
# Store these package names here as they may change in the future since
# at the moment we are using custom forks of them.
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
#########################
# OPTIONAL APPLICATIONS #
#########################
# These will be added to ``INSTALLED_APPS``, only if available.
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
##################
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
# Instead of doing "from .local_settings import *", we use exec so that
# local_settings has full access to everything defined in this module.
# Also force into sys.modules so it's visible to Django's autoreload.
f = os.path.join(PROJECT_APP_PATH, "local_settings.py")
if os.path.exists(f):
import sys
import imp
module_name = "%s.local_settings" % PROJECT_APP
module = imp.new_module(module_name)
module.__file__ = f
sys.modules[module_name] = module
exec(open(f, "rb").read())
####################
# DYNAMIC SETTINGS #
####################
# set_dynamic_settings() will rewrite globals based on what has been
# defined so far, in order to provide some better defaults where
# applicable. We also allow this settings module to be imported
# without Mezzanine installed, as the case may be when using the
# fabfile, where setting the dynamic settings below isn't strictly
# required.
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())
| |
'''Utility functions used in other modules.'''
# Built-ins
from collections import OrderedDict, Callable
import copy
import difflib
import functools
import os
import pickle
import types
import numpy as np
import pandas as pd
from . import paths
DEFAULT_DPI = 300
'''
The DPI to use when generating all image figures.
'''
def fuzzy_find(needle, haystack):
'''
Find the longest matching subsequence of needle within haystack.
Returns the corresponding index from the beginning of needle.
Parameters
----------
needle : str
haystack : str
Returns
-------
index : int
'''
s = difflib.SequenceMatcher(a=haystack, b=needle)
best = s.find_longest_match(0, len(haystack), 0, len(needle))
return best.a - len(needle) + best.size
def make_folder(data=None, folder_name=None, sub='Output'):
if folder_name is None:
folder_name = os.path.join(
paths.FIGURES_DIR,
data.name
if data is not None else
'All',
sub,
)
return makedirs(folder_name)
def makedirs(folder_name=None):
'''
Creates a folder if it does not exist.
Parameters
----------
folder_name : str, optional
Returns
-------
folder_name : str
'''
if folder_name:
try:
os.makedirs(folder_name)
except OSError:
pass
return folder_name
def norm(channels):
'''
Converts a list of channels to their normalized names.
Parameters
----------
channels : list of str or dict of (str, str) or None
Returns
-------
new_channels : list of str or dict of str, str
'''
if channels is None:
return None
if isinstance(channels, str):
return channels + '_norm'
if isinstance(channels, list):
return [norm(i) for i in channels]
if isinstance(channels, (dict, OrderedDict)):
return OrderedDict(
(key, norm(val))
for key, val in channels.items()
)
def which(program):
'''
Checks if a program exists in PATH's list of directories.
Parameters
----------
program : str
Returns
-------
path : str or None
'''
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, _ = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('\'')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def flatten_set(lst):
'''
Flattens an Iterable with arbitrary nesting into a single set.
Parameters
----------
lst : Iterable
Returns
-------
flattened : set
Examples
--------
>>> utils.flatten_set([0, [1, 2], [[3]], 'string'])
set([0, 1, 2, 3, 'string'])
'''
if isinstance(
lst,
(list, tuple, set, types.GeneratorType, pd.Series, np.ndarray)
):
ret = set()
for element in lst:
for new_element in flatten_set(element):
ret.add(new_element)
return ret
return set([lst])
def flatten_list(lst):
'''
Flattens an Iterable with arbitrary nesting into a single list.
Parameters
----------
lst : Iterable
Returns
-------
flattened : list
Examples
--------
>>> utils.flatten_list([0, [1, 2], [[3]], 'string'])
[0, 1, 2, 3, 'string']
'''
if isinstance(
lst,
(list, tuple, set, types.GeneratorType, pd.Series, np.ndarray)
):
ret = []
for element in lst:
for new_element in flatten_list(element):
ret.append(new_element)
return ret
return [lst]
class DefaultOrderedDict(OrderedDict):
# Source: http://stackoverflow.com/a/6190500/562769
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not isinstance(default_factory, Callable)):
raise TypeError('first argument must be callable')
OrderedDict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return OrderedDict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return 'OrderedDefaultDict(%s, %s)' % (
self.default_factory,
OrderedDict.__repr__(self),
)
def memoize(func):
'''
Memoize a function, saving its returned value for a given set of parameters
in an in-memory cache.
Examples
--------
>>> from pyproteome import utils
>>> @utils.memoize
... def download_data(species):
... ... # Fetch / calculate the return value once
Parameters
----------
func : func
Returns
-------
memorized : func
'''
cache = func.cache = {}
@functools.wraps(func)
def memoized_func(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = func(*args, **kwargs)
return cache[key]
return memoized_func
PICKLE_DIR = '.pyproteome'
'''
Default directory to use for saving / loading pickle files.
'''
def save(name, val=None):
'''
Save a variable using the pickle module.
Parameters
----------
name : str
The name to use for data storage.
val : object, optional
Returns
-------
val : object
'''
filename = os.path.join(PICKLE_DIR, '{}.pkl'.format(name))
makedirs(PICKLE_DIR)
with open(filename, 'wb') as f:
pickle.dump(val, f)
return val
def load(name, default=None):
'''
Load a variable using the pickle module.
Parameters
----------
name : str
The name to use for data storage.
default : object, optional
Returns
-------
val : object
'''
filename = os.path.join(PICKLE_DIR, '{}.pkl'.format(name))
try:
with open(filename, 'rb') as f:
val = pickle.load(f)
except (
OSError, pickle.UnpicklingError, IOError,
AttributeError, EOFError, ImportError, IndexError,
):
val = default
return val
def adjust_text(*args, **kwargs):
'''
Wraps importing and calling :func:`adjustText.adjust_text`.
'''
from adjustText import adjust_text as at
return at(*args, **kwargs)
def get_name(proteins):
'''
Generates a shortened version of a protein name. For peptides
that map to multiple proteins, this function finds the longest
common prefix (excluding digits) that matches all proteins.
Parameters
----------
proteins : :class:`.data_sets.protein.Proteins`
Returns
-------
str
Examples
--------
>>> pyp.utils.get_name(
... protein.Proteins([
... protein.Protein(gene='Dpysl2'),
... protein.Protein(gene='Dpysl3'),
... ])
... )
'Dpysl2/3'
>>> pyp.utils.get_name(
... protein.Proteins([
... protein.Protein(gene='Src'),
... protein.Protein(gene='Fgr'),
... protein.Protein(gene='Fyn'),
... ])
... )
'Src / Fgr / Fyn'
>>> pyp.utils.get_name(
... protein.Proteins([
... protein.Protein(gene='Tuba1a'),
... protein.Protein(gene='Tuba1b'),
... protein.Protein(gene='Tuba1c'),
... protein.Protein(gene='Tuba4a'),
... protein.Protein(gene='Tuba8'),
... ])
... )
'Tuba1a/1b/1c/3a/4a/8'
'''
genes = sorted(proteins.genes)
common = ''
sep = ' / '
if len(genes) > 1:
common = os.path.commonprefix(genes)
last_digit = [
ind
for ind, i in list(enumerate(common))[::-1]
if i.isdigit()
]
if last_digit:
common = common[:last_digit[0]]
if common:
sep = '/'
return common + sep.join(i[len(common):] for i in genes)
def stars(p, ns='ns'):
'''
Calculate the stars to indicate significant changes.
\\*\\*\\*\\* : p < 1e-4
\\*\\*\\* : p < 1e-3
\\*\\* : p < 1e-2
\\* : p < 5e-2
ns : not significant
Parameters
----------
p : float
ns : str, optional
Returns
-------
str
'''
if p < 1e-4:
return '****'
elif (p < 1e-3):
return '***'
elif (p < 1e-2):
return '**'
elif (p < 0.05):
return '*'
else:
return ns
| |
#!/usr/bin/python
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Summary:
mlab_export.py serializes RRD data collected by the M-Lab, collectd plugin.
mlab_export.py supports many options for use by a human operator. However,
typical usage should be automated using something like crond. Because typical
usage is automated, all parameters have sensible defaults, tuned to running on
M-Lab.
Overview of default operation:
* Script finds all RRD files under --rrddir_prefix.
* Script opens a file for output under --output_dir (optionally compressed).
* Script reads values from each RRD between --ts_start and --ts_end.
* Script determines ts_start and ts_end from 'mtime' on LAST_EXPORT_FILENAME.
- On first run, LAST_EXPORT_FILENAME is created. ts_end is set to nearest
hour from current time, and ts_start is set to ts_end minus --length.
- On later runs, ts_start is set to the 'mtime' of LAST_EXPORT_FILENAME.
And, ts_end is set to the most recent hour.
* Script only exports metrics defined in the export_metrics.conf config file.
* On success, script sets mtime on LAST_EXPORT_FILENAME to ts_end, in
preparation for the next run.
Examples:
# Default operation should not require additional parameters.
./mlab_export.py
# Useful for testing, --noupdate exports the last hour without the
# side-effects of modifying the mtime of LAST_EXPORT_FILENAME.
./mlab_export.py --noupdate --output example.json
# To export a different set of metrics than the global default.
./mlab_export.py --noupdate --output example.json --export_metrics metrics.cfg
# To export "pretty" json more suitable for debugging.
./mlab_export.py --noupdate --pretty_json --output example.json
# List rrd file names, the raw metric names, or the canonical metric names.
./mlab_export.py --show_rrdfile
./mlab_export.py --show_rawmetric --show_metric
# Show the collectd-nagios command line for checking each metric.
./mlab_export.py --show_nagios
"""
import ConfigParser
import contextlib
import fcntl
import gzip
import json
import logging
import os
import socket
import sys
import time
# Third-party modules.
import gflags as flags
import rrdtool
COLLECTD_INTERVAL = 10
EXPORT_DIR = '/var/spool/mlab_utility'
EXPORT_LOCKFILE = os.path.join(EXPORT_DIR, 'mlab_export.lock')
HOSTNAME = None
LAST_EXPORT_FILENAME = '/var/lib/collectd/lastexport.tstamp'
LIST_OPTIONS = ('rrdfile', 'metric', 'metric_raw', 'metric_skipped')
METRIC_MAP = None
METRIC_MAP_CONF = '/usr/share/collectd-mlab/export_metrics.conf'
RRD_PREFIX = '/var/lib/collectd/rrd/'
flags.DEFINE_string('rrddir_prefix', RRD_PREFIX,
'Root directory of RRD files to export.')
flags.DEFINE_integer('length',
3600,
'Length of time to export in seconds. Length should be '
'a multiple of step.',
lower_bound=0)
flags.DEFINE_integer(
'step',
COLLECTD_INTERVAL,
'Time between RRD values in seconds. This value '
'must equal the value in the collectd config. Inaccurate values will not '
'change the intervals of exported data, but could result in samples being '
'skipped at the beginning or end of the export window.',
lower_bound=1)
flags.DEFINE_integer(
'ts_start',
None,
'Timestamp to start export, in seconds since the epoch. Only use this '
'option for debugging. Normally ts_start is calculated automatically from '
'the previous export end time. If given, ts_start should be a multiple of '
'step.',
lower_bound=0)
flags.DEFINE_integer(
'ts_end',
None,
'Timestamp to end export, in seconds since the epoch. Only use this option '
'for debugging. Normally, ts_end is calculated automatically from: '
'ts_start + length.',
lower_bound=0)
flags.DEFINE_integer(
'ts_offset',
600,
'Amount of time (seconds) that must have passed after '
'ts_end to ensure that values cached by collectd have been flushed to disk '
'before attempting an export.',
lower_bound=0)
flags.DEFINE_multistring('ignored_experiments', [],
'List of experiment names to ignore. Experiment '
'must be in "slice.site" form not "site_slice".')
flags.DEFINE_bool('pretty_json', None,
'Add extra indenting to json output (for debugging).')
flags.DEFINE_string('output_dir', EXPORT_DIR,
'Root directory of json output files.')
flags.DEFINE_string('output', None,
'Name of json output file. Set automatically if not given.')
flags.DEFINE_string(
'export_metrics', METRIC_MAP_CONF, 'File name with metric map. The metric '
'map defines canonical metric names for raw, metric names taken from '
'collectd RRD files.')
flags.DEFINE_bool('verbose', False, 'Increase verbosity level.')
flags.DEFINE_bool('show_nagios', False,
'Shows collectd-nagios commands to monitor metrics.')
flags.DEFINE_bool('show_rrdfile', False,
'Shows the RRD files opened during export.')
flags.DEFINE_bool('show_metric', False,
'Shows the canonical metric names during export.')
flags.DEFINE_bool(
'show_rawmetric', False, 'Shows the raw metric name before translating to '
'the canonical name. This can be helpful to know before adding new metrics '
'to the export metrics configuration file.')
flags.DEFINE_bool(
'show_skipped', False, 'Shows the raw metric names that are not exported. '
'This option may be helpful when adding new metrics to the export_metrics.')
flags.DEFINE_bool('update', True,
'Update timestamps on successful export. Update is always '
'disabled when any --show_* option is enabled.')
flags.DEFINE_bool(
'compress', False,
'Compresses output and adds .gz extension to output filename.')
flags.DEFINE_string('suffix', 'metrics',
('The suffix is appended to file names during export, e.g. '
'*-<suffix>.json.gz, and the suffix is used as a section '
'header in the "--export_metrics" configuration file.'))
flags.DEFINE_bool('counts', False,
('Export metric counts rather than rates. Counts are '
'recovered by multiplying rates by the stepsize.'))
class Error(Exception):
"""Base error type for this file."""
pass
class TimeOptionError(Error):
"""An error related to export times or ranges."""
pass
class LockFileError(Error):
"""An exclusive lock could not be acquired for a lock file."""
pass
def init_global():
global HOSTNAME
# NOTE: This should be the hostname of root context, not slice context.
HOSTNAME = socket.gethostname()
logging.basicConfig(format='%(message)s', level=logging.INFO)
class LockFile(object):
"""Provides a file-based lock."""
def __init__(self, filename):
self._filename = filename
self._handle = None
def __enter__(self):
"""Acquires file lock on filename.
Raises:
LockFileError, if the lock cannot be acquired.
"""
try:
self._handle = open(self._filename, 'w')
fcntl.flock(self._handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as err:
raise LockFileError(err)
def __exit__(self, *args):
"""Releases file lock on filename."""
self._handle.close()
def get_mtime(last_export_filename):
"""Returns the file mtime, or zero if last_export_filename was created.
Args:
last_export_filename: str, absolute path to last export time stamp file.
Returns:
int, 0 if the file was created, or the mtime of existing file.
Raises:
IOError, if last_export_filename does not exist and cannot be created.
OSError, if last_export_filename exists but stat info cannot be read.
"""
if not os.path.exists(last_export_filename):
open(last_export_filename, 'w').close()
# Indicate that there is no timestamp, so the caller can use a default.
return 0
return int(os.stat(last_export_filename).st_mtime)
def update_mtime(last_export_filename, mtime):
"""Updates the atime & mtime on the export timestamp file.
Args:
last_export_filename: str, absolute path to last export time stamp file.
mtime: int, timestamp in seconds since epoch; used for file atime & mtime.
Raises:
OSError, if last_export_filename mtime cannot be updated.
"""
os.utime(last_export_filename, (mtime, mtime))
def align_timestamp(timestamp, step):
"""Adjusts 'timestamp' down to a multiple of 'step' size.
Args:
timestamp: int, timestamp in seconds since the epoch.
step: int, interval to align.
Returns:
int, timestamp adjusted to multiple of step.
"""
return timestamp - (timestamp % step)
def default_start_time(options, ts_previous):
"""Calculates a default start timestamp.
Args:
options: flags.FlagValues, the runtime options. These values are read:
options.length, options.step, options.update, options.ts_offset.
ts_previous: int, timestamp in seconds since epoch of last
successful export. On first run, this value should be zero.
Returns:
int, timestamp in seconds since the epoch.
"""
if ts_previous:
# Typical: start from the end time of previous runs.
return align_timestamp(ts_previous, options.step)
ts_current = int(time.time())
# Since ts_previous is not set, this is a "first run" scenario.
if not options.update:
# Unlikely: first run by a user. Start as close to 'now' as possible.
start = ts_current - options.length - options.ts_offset
else:
# Likely: first, automated export. Start at previous 'length' aligned time.
start = align_timestamp(ts_current, options.length) - options.length
# Align start ts to a multiple of step size (just in case).
return align_timestamp(start, options.step)
def default_end_time(step):
"""Calculates a default end timestamp aligned to step based on current time.
Args:
step: int, interval to align end time.
Returns:
int, timestamp in seconds since the epoch.
"""
return align_timestamp(int(time.time()), step)
def assert_start_and_end_times(options):
"""Performs a sanity check on start and end timestamps.
This method asserts that both ts_end is less than ts_start and that the
difference between them is greater than options.length.
Args:
options: flags.FlagValues, the runtime options. These values are read:
options.length, options.ts_start, options.ts_end.
Raises:
TimeOptionError, if a start & end time constraint is violated.
"""
# Always check if basic constratins are respected.
if options.ts_end <= options.ts_start:
raise TimeOptionError('Start time must precede end time.')
if options.ts_end - options.ts_start != options.length:
msg = (
'Difference between ts_start and ts_end times must equal length: ' +
'%s - %s = %s < %s' %
(options.ts_end, options.ts_start,
(options.ts_end - options.ts_start), options.length))
raise TimeOptionError(msg)
logging.debug('Exporting: %s to %s', time.ctime(options.ts_start),
time.ctime(options.ts_end))
def default_output_name(ts_start, ts_end, output_dir, rsync_name, suffix):
"""Creates a default output filename based on time range and output dir.
Filenames are formatted with time stamps as:
<output_dir>/<rsync_name>/YYYY/MM/DD/<HOSTNAME>/
<ts_start>-to-<ts_end>-<suffix>.json
The YYYY, MM, DD in the path are taken from ts_start.
Both <ts_start> and <ts_end> are formatted as: YYYYMMDDTHHMMSS
Args:
ts_start: int, starting timestamp of export in seconds since the epoch.
ts_end: int, ending timestamp of export in seconds since the epoch.
output_dir: str, base path of directory for output.
rsync_name: str, a directory path for the rsync dropbox.
suffix: str, the suffix to append to the end of a file name, e.g.
<ts_start>-to-<ts_end>-<suffix>.json
Returns:
str, absolute path of generated output file name.
"""
filename = '%s-to-%s-%s.json' % (
time.strftime('%Y%m%dT%H:%M:%S', time.gmtime(ts_start)),
time.strftime('%Y%m%dT%H:%M:%S', time.gmtime(ts_end)), suffix)
date_path = time.strftime('%Y/%m/%d', time.gmtime(ts_start))
full_path = os.path.join(output_dir, rsync_name, date_path, HOSTNAME)
return os.path.join(full_path, filename)
def make_output_dirs(output_name):
"""Creates directory path to filename, if it does not exist.
Args:
output_name: str, absolute path of an output file.
Raises:
OSError, if directory cannot be created.
"""
dir_name = os.path.dirname(output_name)
if dir_name and not os.path.exists(dir_name):
os.makedirs(dir_name)
def get_canonical_names(filename, value_name, options):
"""Converts raw filename and value names from RRD into canonical export names.
Args:
filename: str, the absolute path of an rrd file.
value_name: str, the name of the value being exported from the RRD.
options: flags.FlagValues, the runtime options. This method uses
options.rrddir_prefix and all option.show_* flags.
Returns:
(str, str, str), with HOSTNAME, experiment name, metric name.
"""
# Strip rrddir_prefix, remove rrd extension, and split directory components.
short_filename = filename.replace(options.rrddir_prefix, '', 1)
short_filename, _ = os.path.splitext(short_filename)
file_fields = short_filename.split(os.path.sep)
# The zeroth field is always the context hostname.
if HOSTNAME == file_fields[0]:
# The root context represents whole-system metrics.
experiment = 'system'
else:
# A slice hostname. Everything remaining after stripping hostname.
experiment = file_fields[0].replace('.' + HOSTNAME, '')
metric_raw = '.'.join(file_fields[1:])
if value_name != 'value':
metric_raw += '.' + value_name
# NOTE: convert the raw metric name into the canonical form, or None.
metric = METRIC_MAP.get(metric_raw, None)
# Optionally print extra information.
if options.show_nagios:
cmd = ('collectd-nagios -s $COLLECTD_UNIXSOCK -H {host} ' +
'-n {metric} -d {value} [-w <l:h> -c <l:h>]')
cmd = cmd.format(host=file_fields[0],
metric=os.path.join(file_fields[1:]),
value=value_name)
logging.info(cmd)
if options.show_rrdfile:
logging.info('rrdfile: %s', filename)
if options.show_rawmetric:
logging.info('metric_raw: %s', metric_raw)
if options.show_skipped and not metric:
logging.info('metric_skipped: %s', metric_raw)
if options.show_metric and metric:
logging.info('metric: %s', metric)
return (HOSTNAME, experiment, metric)
def get_json_record(hostname, experiment, metric, timestamps, values, scale):
"""Creates a dict suitable for export to json.
Args:
hostname: str, hostname of host system.
experiment: str, name of experiment running on host.
metric: str, the canonical metric name for values.
timestamps: iterable of int, timestamps corresponding to each value.
values: iterable of float, values corresponding to each timestamp.
scale: int, a constant used to scale values.
Returns:
dict, with keys for hostname, experiment, metric, and sample.
"""
logging.debug('%s %s %s', hostname, experiment, metric)
json_data = {
'hostname': hostname,
'experiment': experiment,
'metric': metric
}
json_data['sample'] = get_json_record_samples(timestamps, values, scale)
return json_data
def get_json_record_samples(timestamps, values, scale):
"""Converts a sequences of timestampes and values for a json record.
The timestamps and values arguments must be the same length. Each value is
multiplied by scale and the result is saved.
Args:
timestamps: iterable of int, timestamps corresponding to each value.
values: iterable of float, values corresponding to each timestamp.
scale: int, a constant used to scale values.
Returns:
list of dict, each dict has keys timestamp and value.
"""
samples = []
assert (len(timestamps) == len(values))
for i in xrange(len(timestamps)):
if values[i] is not None:
samples.append({'timestamp': timestamps[i],
'value': values[i] * scale})
return samples
def write_json_record(fd_output, record, pretty_json):
"""Writes json record to fd_output.
Args:
fd_output: file object open for writing, the record is written to this fd.
record: dict, the record of data to serialize as json.
pretty_json: bool, whether to write the json with extra spacing.
"""
json.dump(record, fd_output, indent=pretty_json)
fd_output.write('\n') # separate each record with newline.
def get_rrd_files(rrddir_prefix):
"""Returns the absolute path of all rrd files found under rrddir_prefix.
Args:
rrddir_prefix: str, base directory where rrd files are stored.
Returns:
list of str, where each element is the absolute path to a single rrd file.
"""
rrdfiles = []
for root, _, filenames in os.walk(rrddir_prefix):
for filename in filenames:
if filename.endswith('.rrd'):
full_path = os.path.abspath(os.path.join(root, filename))
rrdfiles.append(full_path)
return rrdfiles
def rrd_list(options):
"""Processes all options.show_* flags without performing an export."""
for filename in get_rrd_files(options.rrddir_prefix):
_, value_names, _ = rrdtool.fetch(filename, 'AVERAGE', '--start',
str(options.ts_start), '--end',
str(options.ts_end))
for value_name in value_names:
get_canonical_names(filename, value_name, options)
def rrd_export(options):
"""Exports all RRD data.
Raises:
OSError, if output directory cannot be created.
IOError, if output file cannot be created or written.
"""
open_func = open
if options.compress:
open_func = gzip.open
options.output += '.gz'
make_output_dirs(options.output)
scale = options.step if options.counts else 1
with contextlib.closing(open_func(options.output, 'w')) as fd_output:
for filename in get_rrd_files(options.rrddir_prefix):
time_range, value_names, data = rrdtool.fetch(
filename, 'AVERAGE', '--start', str(options.ts_start), '--end',
str(options.ts_end))
# W0142 is the use of "* magic". These are legitimate use-cases.
# 1) time_range is a 3-tuple (start, end, step): i.e. arguments to range.
# 2) data is a list of tuples, which are transposed by zip.
# i.e. [(a,), (b,), ...] -> [(a,b,...)]
# pylint: disable=W0142
timestamps = range(*time_range)
values = zip(*data)
# pylint: enable=W0142
for i, value_name in enumerate(value_names):
hostname, experiment, metric = get_canonical_names(
filename, value_name, options)
if metric is None or experiment in options.ignored_experiments:
continue
record = get_json_record(hostname, experiment, metric,
timestamps, values[i], scale)
write_json_record(fd_output, record, options.pretty_json)
def read_metric_map(filename, section):
"""Reads content of metric name conversion configuration file.
The format of filename should be supported by python ConfigParser. The file
must contain at least one section named <section>.
Example:
[metrics]
raw_metric.name: canonical_metric.name
Args:
filename: str, the name of the metrics configuration.
section: str, the section name that defines the metric mapping.
Returns:
dict, keys are raw metric names, values are canonical metric names.
Exits:
When filename is missing, has bad configuration, or is missing metrics
section.
"""
# ConfigParser.read ignores non-existent files, so check that the file
# exists.
if not os.path.exists(filename):
logging.error('Config file does not exist: %s', filename)
sys.exit(1)
try:
# Catch parsing or format errors.
parser = ConfigParser.SafeConfigParser()
parser.read(filename)
except ConfigParser.Error as err:
logging.error('Error while reading %s: %s', filename, err)
sys.exit(1)
if parser.has_section(section):
metric_map = dict(parser.items(section))
else:
logging.error('Config file is missing "[%s]" section' % section)
sys.exit(1)
return metric_map
def any_show_options(options):
"""Checks if any show options are True."""
return any([options.show_nagios, options.show_rrdfile,
options.show_rawmetric, options.show_metric,
options.show_skipped])
def init_args(options, ts_previous):
"""Initializes flags with default values and asserts sanity checks.
Args:
options: flags.FlagValues, the unprocessed defaults from flags.FLAGS.
ts_previous: int, timestamp in seconds since epoch of last export.
Returns:
flags.FlagValues, options with updated defaults.
"""
global METRIC_MAP
METRIC_MAP = read_metric_map(options.export_metrics, options.suffix)
if options.verbose:
logging.basicConfig(level=logging.DEBUG)
if options.ts_start is None:
options.ts_start = default_start_time(options, ts_previous)
if options.ts_end is None:
options.ts_end = default_end_time(options.length)
options.length = options.ts_end - options.ts_start
if any_show_options(options):
options.update = False
if options.rrddir_prefix[-1] != os.path.sep:
# Ensure that the last character of rrddir_prefix includes path separator.
options.rrddir_prefix += os.path.sep
assert_start_and_end_times(options)
if options.output is None:
rsync_name = options.suffix
if options.suffix == 'metrics':
# A legacy name. Ideally, suffix and rsync_name should be the same.
rsync_name = 'utilization'
options.output = default_output_name(options.ts_start, options.ts_end,
options.output_dir, rsync_name,
options.suffix)
return options
def parse_args(ts_previous):
"""Parses command line arguments and initialize defaults.
Args:
ts_previous: int, timestamp in seconds since epoch of last successful
export. On first run, this value should be zero.
Returns:
flags.FlagValues, all options.
"""
try:
# Parses flags. Any remaining args are unused.
flags.FLAGS(sys.argv)
except flags.FlagsError, err:
logging.error('%s\nUsage: %s ARGS\n%s', err, sys.argv[0], flags.FLAGS)
sys.exit(1)
try:
return init_args(flags.FLAGS, ts_previous)
except TimeOptionError as err:
logging.error(err)
sys.exit(1)
def main():
init_global()
try:
with LockFile(EXPORT_LOCKFILE):
options = parse_args(get_mtime(LAST_EXPORT_FILENAME))
if any_show_options(options):
rrd_list(options)
else:
rrd_export(options)
# Update last_export mtime only after everything completes
# successfully.
if options.update:
update_mtime(LAST_EXPORT_FILENAME, options.ts_end)
except (OSError, IOError) as err:
logging.error('Export failure: %s', err)
sys.exit(1)
except LockFileError as err:
logging.error('Failed to acquire lockfile %s: %s', EXPORT_LOCKFILE, err)
sys.exit(1)
if __name__ == '__main__': # pragma: no cover.
main()
| |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for LocalFileSystem."""
import filecmp
import os
import shutil
import tempfile
import unittest
import mock
from apache_beam.io import localfilesystem
from apache_beam.io.filesystem import BeamIOError
def _gen_fake_join(separator):
"""Returns a callable that joins paths with the given separator."""
def _join(first_path, *paths):
return separator.join((first_path.rstrip(separator),) + paths)
return _join
def _gen_fake_split(separator):
"""Returns a callable that splits a with the given separator."""
def _split(path):
sep_index = path.rfind(separator)
if sep_index >= 0:
return (path[:sep_index], path[sep_index + 1:])
else:
return (path, '')
return _split
class LocalFileSystemTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.fs = localfilesystem.LocalFileSystem()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_scheme(self):
self.assertIsNone(self.fs.scheme())
self.assertIsNone(localfilesystem.LocalFileSystem.scheme())
@mock.patch('apache_beam.io.localfilesystem.os')
def test_unix_path_join(self, *unused_mocks):
# Test joining of Unix paths.
localfilesystem.os.path.join.side_effect = _gen_fake_join('/')
self.assertEqual('/tmp/path/to/file',
self.fs.join('/tmp/path', 'to', 'file'))
self.assertEqual('/tmp/path/to/file',
self.fs.join('/tmp/path', 'to/file'))
@mock.patch('apache_beam.io.localfilesystem.os')
def test_windows_path_join(self, *unused_mocks):
# Test joining of Windows paths.
localfilesystem.os.path.join.side_effect = _gen_fake_join('\\')
self.assertEqual(r'C:\tmp\path\to\file',
self.fs.join(r'C:\tmp\path', 'to', 'file'))
self.assertEqual(r'C:\tmp\path\to\file',
self.fs.join(r'C:\tmp\path', r'to\file'))
@mock.patch('apache_beam.io.localfilesystem.os')
def test_unix_path_split(self, os_mock):
os_mock.path.abspath.side_effect = lambda a: a
os_mock.path.split.side_effect = _gen_fake_split('/')
self.assertEqual(('/tmp/path/to', 'file'),
self.fs.split('/tmp/path/to/file'))
# Actual os.path.split will split following to '/' and 'tmp' when run in
# Unix.
self.assertEqual(('', 'tmp'),
self.fs.split('/tmp'))
@mock.patch('apache_beam.io.localfilesystem.os')
def test_windows_path_split(self, os_mock):
os_mock.path.abspath = lambda a: a
os_mock.path.split.side_effect = _gen_fake_split('\\')
self.assertEqual((r'C:\tmp\path\to', 'file'),
self.fs.split(r'C:\tmp\path\to\file'))
# Actual os.path.split will split following to 'C:\' and 'tmp' when run in
# Windows.
self.assertEqual((r'C:', 'tmp'),
self.fs.split(r'C:\tmp'))
def test_mkdirs(self):
path = os.path.join(self.tmpdir, 't1/t2')
self.fs.mkdirs(path)
self.assertTrue(os.path.isdir(path))
def test_mkdirs_failed(self):
path = os.path.join(self.tmpdir, 't1/t2')
self.fs.mkdirs(path)
# Check IOError if existing directory is created
with self.assertRaises(IOError):
self.fs.mkdirs(path)
with self.assertRaises(IOError):
self.fs.mkdirs(os.path.join(self.tmpdir, 't1'))
def test_match_file(self):
path = os.path.join(self.tmpdir, 'f1')
open(path, 'a').close()
# Match files in the temp directory
result = self.fs.match([path])[0]
files = [f.path for f in result.metadata_list]
self.assertEqual(files, [path])
def test_match_file_empty(self):
path = os.path.join(self.tmpdir, 'f2') # Does not exist
# Match files in the temp directory
result = self.fs.match([path])[0]
files = [f.path for f in result.metadata_list]
self.assertEqual(files, [])
def test_match_file_exception(self):
# Match files with None so that it throws an exception
with self.assertRaises(BeamIOError) as error:
self.fs.match([None])
self.assertTrue(
error.exception.message.startswith('Match operation failed'))
self.assertEqual(error.exception.exception_details.keys(), [None])
def test_match_directory(self):
path1 = os.path.join(self.tmpdir, 'f1')
path2 = os.path.join(self.tmpdir, 'f2')
open(path1, 'a').close()
open(path2, 'a').close()
# Match both the files in the directory
path = os.path.join(self.tmpdir, '*')
result = self.fs.match([path])[0]
files = [f.path for f in result.metadata_list]
self.assertEqual(files, [path1, path2])
def test_match_directory(self):
result = self.fs.match([self.tmpdir])[0]
files = [f.path for f in result.metadata_list]
self.assertEqual(files, [self.tmpdir])
def test_copy(self):
path1 = os.path.join(self.tmpdir, 'f1')
path2 = os.path.join(self.tmpdir, 'f2')
with open(path1, 'a') as f:
f.write('Hello')
self.fs.copy([path1], [path2])
self.assertTrue(filecmp.cmp(path1, path2))
def test_copy_error(self):
path1 = os.path.join(self.tmpdir, 'f1')
path2 = os.path.join(self.tmpdir, 'f2')
with self.assertRaises(BeamIOError) as error:
self.fs.copy([path1], [path2])
self.assertTrue(
error.exception.message.startswith('Copy operation failed'))
self.assertEqual(error.exception.exception_details.keys(), [(path1, path2)])
def test_copy_directory(self):
path_t1 = os.path.join(self.tmpdir, 't1')
path_t2 = os.path.join(self.tmpdir, 't2')
self.fs.mkdirs(path_t1)
self.fs.mkdirs(path_t2)
path1 = os.path.join(path_t1, 'f1')
path2 = os.path.join(path_t2, 'f1')
with open(path1, 'a') as f:
f.write('Hello')
self.fs.copy([path_t1], [path_t2])
self.assertTrue(filecmp.cmp(path1, path2))
def test_rename(self):
path1 = os.path.join(self.tmpdir, 'f1')
path2 = os.path.join(self.tmpdir, 'f2')
with open(path1, 'a') as f:
f.write('Hello')
self.fs.rename([path1], [path2])
self.assertTrue(self.fs.exists(path2))
self.assertFalse(self.fs.exists(path1))
def test_rename_error(self):
path1 = os.path.join(self.tmpdir, 'f1')
path2 = os.path.join(self.tmpdir, 'f2')
with self.assertRaises(BeamIOError) as error:
self.fs.rename([path1], [path2])
self.assertTrue(
error.exception.message.startswith('Rename operation failed'))
self.assertEqual(error.exception.exception_details.keys(), [(path1, path2)])
def test_rename_directory(self):
path_t1 = os.path.join(self.tmpdir, 't1')
path_t2 = os.path.join(self.tmpdir, 't2')
self.fs.mkdirs(path_t1)
path1 = os.path.join(path_t1, 'f1')
path2 = os.path.join(path_t2, 'f1')
with open(path1, 'a') as f:
f.write('Hello')
self.fs.rename([path_t1], [path_t2])
self.assertTrue(self.fs.exists(path_t2))
self.assertFalse(self.fs.exists(path_t1))
self.assertTrue(self.fs.exists(path2))
self.assertFalse(self.fs.exists(path1))
def test_exists(self):
path1 = os.path.join(self.tmpdir, 'f1')
path2 = os.path.join(self.tmpdir, 'f2')
with open(path1, 'a') as f:
f.write('Hello')
self.assertTrue(self.fs.exists(path1))
self.assertFalse(self.fs.exists(path2))
def test_delete(self):
path1 = os.path.join(self.tmpdir, 'f1')
with open(path1, 'a') as f:
f.write('Hello')
self.assertTrue(self.fs.exists(path1))
self.fs.delete([path1])
self.assertFalse(self.fs.exists(path1))
def test_delete_error(self):
path1 = os.path.join(self.tmpdir, 'f1')
with self.assertRaises(BeamIOError) as error:
self.fs.delete([path1])
self.assertTrue(
error.exception.message.startswith('Delete operation failed'))
self.assertEqual(error.exception.exception_details.keys(), [path1])
| |
from __future__ import print_function, division
from collections import defaultdict
from .basic import Basic
from .compatibility import cmp_to_key, reduce, is_sequence, range
from .logic import _fuzzy_group, fuzzy_or, fuzzy_not, fuzzy_and
from .singleton import S
from .operations import AssocOp
from .cache import cacheit
from .numbers import ilcm, igcd
from .expr import Expr
# Key for sorting commutative args in canonical order
_args_sortkey = cmp_to_key(Basic.compare)
def _addsort(args):
# in-place sorting of args
args.sort(key=_args_sortkey)
def _unevaluated_Add(*args):
"""Return a well-formed unevaluated Add: Numbers are collected and
put in slot 0 and args are sorted. Use this when args have changed
but you still want to return an unevaluated Add.
Examples
========
>>> from sympy.core.add import _unevaluated_Add as uAdd
>>> from sympy import S, Add
>>> from sympy.abc import x, y
>>> a = uAdd(*[S(1.0), x, S(2)])
>>> a.args[0]
3.00000000000000
>>> a.args[1]
x
Beyond the Number being in slot 0, there is no other assurance of
order for the arguments since they are hash sorted. So, for testing
purposes, output produced by this in some other function can only
be tested against the output of this function or as one of several
options:
>>> opts = (Add(x, y, evaluated=False), Add(y, x, evaluated=False))
>>> a = uAdd(x, y)
>>> assert a in opts and a == uAdd(x, y)
"""
args = list(args)
newargs = []
co = S.Zero
while args:
a = args.pop()
if a.is_Add:
# this will keep nesting from building up
# so that x + (x + 1) -> x + x + 1 (3 args)
args.extend(a.args)
elif a.is_Number:
co += a
else:
newargs.append(a)
_addsort(newargs)
if co:
newargs.insert(0, co)
return Add._from_args(newargs)
class Add(Expr, AssocOp):
__slots__ = []
is_Add = True
@classmethod
def flatten(cls, seq):
"""
Takes the sequence "seq" of nested Adds and returns a flatten list.
Returns: (commutative_part, noncommutative_part, order_symbols)
Applies associativity, all terms are commutable with respect to
addition.
NB: the removal of 0 is already handled by AssocOp.__new__
See also
========
sympy.core.mul.Mul.flatten
"""
from sympy.calculus.util import AccumBounds
rv = None
if len(seq) == 2:
a, b = seq
if b.is_Rational:
a, b = b, a
if a.is_Rational:
if b.is_Mul:
rv = [a, b], [], None
if rv:
if all(s.is_commutative for s in rv[0]):
return rv
return [], rv[0], None
terms = {} # term -> coeff
# e.g. x**2 -> 5 for ... + 5*x**2 + ...
coeff = S.Zero # coefficient (Number or zoo) to always be in slot 0
# e.g. 3 + ...
order_factors = []
for o in seq:
# O(x)
if o.is_Order:
for o1 in order_factors:
if o1.contains(o):
o = None
break
if o is None:
continue
order_factors = [o] + [
o1 for o1 in order_factors if not o.contains(o1)]
continue
# 3 or NaN
elif o.is_Number:
if (o is S.NaN or coeff is S.ComplexInfinity and
o.is_finite is False):
# we know for sure the result will be nan
return [S.NaN], [], None
if coeff.is_Number:
coeff += o
if coeff is S.NaN:
# we know for sure the result will be nan
return [S.NaN], [], None
continue
elif isinstance(o, AccumBounds):
coeff = o.__add__(coeff)
continue
elif o is S.ComplexInfinity:
if coeff.is_finite is False:
# we know for sure the result will be nan
return [S.NaN], [], None
coeff = S.ComplexInfinity
continue
# Add([...])
elif o.is_Add:
# NB: here we assume Add is always commutative
seq.extend(o.args) # TODO zerocopy?
continue
# Mul([...])
elif o.is_Mul:
c, s = o.as_coeff_Mul()
# check for unevaluated Pow, e.g. 2**3 or 2**(-1/2)
elif o.is_Pow:
b, e = o.as_base_exp()
if b.is_Number and (e.is_Integer or
(e.is_Rational and e.is_negative)):
seq.append(b**e)
continue
c, s = S.One, o
else:
# everything else
c = S.One
s = o
# now we have:
# o = c*s, where
#
# c is a Number
# s is an expression with number factor extracted
# let's collect terms with the same s, so e.g.
# 2*x**2 + 3*x**2 -> 5*x**2
if s in terms:
terms[s] += c
if terms[s] is S.NaN:
# we know for sure the result will be nan
return [S.NaN], [], None
else:
terms[s] = c
# now let's construct new args:
# [2*x**2, x**3, 7*x**4, pi, ...]
newseq = []
noncommutative = False
for s, c in terms.items():
# 0*s
if c is S.Zero:
continue
# 1*s
elif c is S.One:
newseq.append(s)
# c*s
else:
if s.is_Mul:
# Mul, already keeps its arguments in perfect order.
# so we can simply put c in slot0 and go the fast way.
cs = s._new_rawargs(*((c,) + s.args))
newseq.append(cs)
elif s.is_Add:
# we just re-create the unevaluated Mul
newseq.append(Mul(c, s, evaluate=False))
else:
# alternatively we have to call all Mul's machinery (slow)
newseq.append(Mul(c, s))
noncommutative = noncommutative or not s.is_commutative
# oo, -oo
if coeff is S.Infinity:
newseq = [f for f in newseq if not
(f.is_nonnegative or f.is_real and f.is_finite)]
elif coeff is S.NegativeInfinity:
newseq = [f for f in newseq if not
(f.is_nonpositive or f.is_real and f.is_finite)]
if coeff is S.ComplexInfinity:
# zoo might be
# infinite_real + finite_im
# finite_real + infinite_im
# infinite_real + infinite_im
# addition of a finite real or imaginary number won't be able to
# change the zoo nature; adding an infinite qualtity would result
# in a NaN condition if it had sign opposite of the infinite
# portion of zoo, e.g., infinite_real - infinite_real.
newseq = [c for c in newseq if not (c.is_finite and
c.is_real is not None)]
# process O(x)
if order_factors:
newseq2 = []
for t in newseq:
for o in order_factors:
# x + O(x) -> O(x)
if o.contains(t):
t = None
break
# x + O(x**2) -> x + O(x**2)
if t is not None:
newseq2.append(t)
newseq = newseq2 + order_factors
# 1 + O(1) -> O(1)
for o in order_factors:
if o.contains(coeff):
coeff = S.Zero
break
# order args canonically
_addsort(newseq)
# current code expects coeff to be first
if coeff is not S.Zero:
newseq.insert(0, coeff)
# we are done
if noncommutative:
return [], newseq, None
else:
return newseq, [], None
@classmethod
def class_key(cls):
"""Nice order of classes"""
return 3, 1, cls.__name__
def as_coefficients_dict(a):
"""Return a dictionary mapping terms to their Rational coefficient.
Since the dictionary is a defaultdict, inquiries about terms which
were not present will return a coefficient of 0. If an expression is
not an Add it is considered to have a single term.
Examples
========
>>> from sympy.abc import a, x
>>> (3*x + a*x + 4).as_coefficients_dict()
{1: 4, x: 3, a*x: 1}
>>> _[a]
0
>>> (3*a*x).as_coefficients_dict()
{a*x: 3}
"""
d = defaultdict(list)
for ai in a.args:
c, m = ai.as_coeff_Mul()
d[m].append(c)
for k, v in d.items():
if len(v) == 1:
d[k] = v[0]
else:
d[k] = Add(*v)
di = defaultdict(int)
di.update(d)
return di
@cacheit
def as_coeff_add(self, *deps):
"""
Returns a tuple (coeff, args) where self is treated as an Add and coeff
is the Number term and args is a tuple of all other terms.
Examples
========
>>> from sympy.abc import x
>>> (7 + 3*x).as_coeff_add()
(7, (3*x,))
>>> (7*x).as_coeff_add()
(0, (7*x,))
"""
if deps:
l1 = []
l2 = []
for f in self.args:
if f.has(*deps):
l2.append(f)
else:
l1.append(f)
return self._new_rawargs(*l1), tuple(l2)
coeff, notrat = self.args[0].as_coeff_add()
if coeff is not S.Zero:
return coeff, notrat + self.args[1:]
return S.Zero, self.args
def as_coeff_Add(self):
"""Efficiently extract the coefficient of a summation. """
coeff, args = self.args[0], self.args[1:]
if coeff.is_Number:
if len(args) == 1:
return coeff, args[0]
else:
return coeff, self._new_rawargs(*args)
else:
return S.Zero, self
# Note, we intentionally do not implement Add.as_coeff_mul(). Rather, we
# let Expr.as_coeff_mul() just always return (S.One, self) for an Add. See
# issue 5524.
@cacheit
def _eval_derivative(self, s):
return self.func(*[a.diff(s) for a in self.args])
def _eval_nseries(self, x, n, logx):
terms = [t.nseries(x, n=n, logx=logx) for t in self.args]
return self.func(*terms)
def _matches_simple(self, expr, repl_dict):
# handle (w+3).matches('x+5') -> {w: x+2}
coeff, terms = self.as_coeff_add()
if len(terms) == 1:
return terms[0].matches(expr - coeff, repl_dict)
return
def matches(self, expr, repl_dict={}, old=False):
return AssocOp._matches_commutative(self, expr, repl_dict, old)
@staticmethod
def _combine_inverse(lhs, rhs):
"""
Returns lhs - rhs, but treats arguments like symbols, so things like
oo - oo return 0, instead of a nan.
"""
from sympy import oo, I, expand_mul
if lhs == oo and rhs == oo or lhs == oo*I and rhs == oo*I:
return S.Zero
return expand_mul(lhs - rhs)
@cacheit
def as_two_terms(self):
"""Return head and tail of self.
This is the most efficient way to get the head and tail of an
expression.
- if you want only the head, use self.args[0];
- if you want to process the arguments of the tail then use
self.as_coef_add() which gives the head and a tuple containing
the arguments of the tail when treated as an Add.
- if you want the coefficient when self is treated as a Mul
then use self.as_coeff_mul()[0]
>>> from sympy.abc import x, y
>>> (3*x*y).as_two_terms()
(3, x*y)
"""
if len(self.args) == 1:
return S.Zero, self
return self.args[0], self._new_rawargs(*self.args[1:])
def as_numer_denom(self):
# clear rational denominator
content, expr = self.primitive()
ncon, dcon = content.as_numer_denom()
# collect numerators and denominators of the terms
nd = defaultdict(list)
for f in expr.args:
ni, di = f.as_numer_denom()
nd[di].append(ni)
# put infinity in the numerator
if S.Zero in nd:
n = nd.pop(S.Zero)
assert len(n) == 1
n = n[0]
nd[S.One].append(n/S.Zero)
# check for quick exit
if len(nd) == 1:
d, n = nd.popitem()
return self.func(
*[_keep_coeff(ncon, ni) for ni in n]), _keep_coeff(dcon, d)
# sum up the terms having a common denominator
for d, n in nd.items():
if len(n) == 1:
nd[d] = n[0]
else:
nd[d] = self.func(*n)
# assemble single numerator and denominator
denoms, numers = [list(i) for i in zip(*iter(nd.items()))]
n, d = self.func(*[Mul(*(denoms[:i] + [numers[i]] + denoms[i + 1:]))
for i in range(len(numers))]), Mul(*denoms)
return _keep_coeff(ncon, n), _keep_coeff(dcon, d)
def _eval_is_polynomial(self, syms):
return all(term._eval_is_polynomial(syms) for term in self.args)
def _eval_is_rational_function(self, syms):
return all(term._eval_is_rational_function(syms) for term in self.args)
def _eval_is_algebraic_expr(self, syms):
return all(term._eval_is_algebraic_expr(syms) for term in self.args)
# assumption methods
_eval_is_real = lambda self: _fuzzy_group(
(a.is_real for a in self.args), quick_exit=True)
_eval_is_complex = lambda self: _fuzzy_group(
(a.is_complex for a in self.args), quick_exit=True)
_eval_is_antihermitian = lambda self: _fuzzy_group(
(a.is_antihermitian for a in self.args), quick_exit=True)
_eval_is_finite = lambda self: _fuzzy_group(
(a.is_finite for a in self.args), quick_exit=True)
_eval_is_hermitian = lambda self: _fuzzy_group(
(a.is_hermitian for a in self.args), quick_exit=True)
_eval_is_integer = lambda self: _fuzzy_group(
(a.is_integer for a in self.args), quick_exit=True)
_eval_is_rational = lambda self: _fuzzy_group(
(a.is_rational for a in self.args), quick_exit=True)
_eval_is_algebraic = lambda self: _fuzzy_group(
(a.is_algebraic for a in self.args), quick_exit=True)
_eval_is_commutative = lambda self: _fuzzy_group(
a.is_commutative for a in self.args)
def _eval_is_imaginary(self):
nz = []
im_I = []
for a in self.args:
if a.is_real:
if a.is_zero:
pass
elif a.is_zero is False:
nz.append(a)
else:
return
elif a.is_imaginary:
im_I.append(a*S.ImaginaryUnit)
elif (S.ImaginaryUnit*a).is_real:
im_I.append(a*S.ImaginaryUnit)
else:
return
if self.func(*nz).is_zero:
return fuzzy_not(self.func(*im_I).is_zero)
elif self.func(*nz).is_zero is False:
return False
def _eval_is_zero(self):
nz = []
z = 0
im_or_z = False
im = False
for a in self.args:
if a.is_real:
if a.is_zero:
z += 1
elif a.is_zero is False:
nz.append(a)
else:
return
elif a.is_imaginary:
im = True
elif (S.ImaginaryUnit*a).is_real:
im_or_z = True
else:
return
if z == len(self.args):
return True
if self.func(*nz).is_zero:
if not im_or_z and not im:
return True
if im and not im_or_z:
return False
if self.func(*nz).is_zero is False:
return False
def _eval_is_odd(self):
l = [f for f in self.args if not (f.is_even is True)]
if not l:
return False
if l[0].is_odd:
return self._new_rawargs(*l[1:]).is_even
def _eval_is_irrational(self):
for t in self.args:
a = t.is_irrational
if a:
others = list(self.args)
others.remove(t)
if all(x.is_rational is True for x in others):
return True
return None
if a is None:
return
return False
def _eval_is_positive(self):
from sympy.core.exprtools import _monotonic_sign
if self.is_number:
return super(Add, self)._eval_is_positive()
c, a = self.as_coeff_Add()
if not c.is_zero:
v = _monotonic_sign(a)
if v is not None:
s = v + c
if s.is_positive and a.is_nonnegative:
return True
if len(self.free_symbols) == 1:
v = _monotonic_sign(self)
if v is not None and v.is_positive:
return True
pos = nonneg = nonpos = unknown_sign = False
saw_INF = set()
args = [a for a in self.args if not a.is_zero]
if not args:
return False
for a in args:
ispos = a.is_positive
infinite = a.is_infinite
if infinite:
saw_INF.add(fuzzy_or((ispos, a.is_nonnegative)))
if True in saw_INF and False in saw_INF:
return
if ispos:
pos = True
continue
elif a.is_nonnegative:
nonneg = True
continue
elif a.is_nonpositive:
nonpos = True
continue
if infinite is None:
return
unknown_sign = True
if saw_INF:
if len(saw_INF) > 1:
return
return saw_INF.pop()
elif unknown_sign:
return
elif not nonpos and not nonneg and pos:
return True
elif not nonpos and pos:
return True
elif not pos and not nonneg:
return False
def _eval_is_nonnegative(self):
from sympy.core.exprtools import _monotonic_sign
if not self.is_number:
c, a = self.as_coeff_Add()
if not c.is_zero and a.is_nonnegative:
v = _monotonic_sign(a)
if v is not None:
s = v + c
if s.is_nonnegative:
return True
if len(self.free_symbols) == 1:
v = _monotonic_sign(self)
if v is not None and v.is_nonnegative:
return True
def _eval_is_nonpositive(self):
from sympy.core.exprtools import _monotonic_sign
if not self.is_number:
c, a = self.as_coeff_Add()
if not c.is_zero and a.is_nonpositive:
v = _monotonic_sign(a)
if v is not None:
s = v + c
if s.is_nonpositive:
return True
if len(self.free_symbols) == 1:
v = _monotonic_sign(self)
if v is not None and v.is_nonpositive:
return True
def _eval_is_negative(self):
from sympy.core.exprtools import _monotonic_sign
if self.is_number:
return super(Add, self)._eval_is_negative()
c, a = self.as_coeff_Add()
if not c.is_zero:
v = _monotonic_sign(a)
if v is not None:
s = v + c
if s.is_negative and a.is_nonpositive:
return True
if len(self.free_symbols) == 1:
v = _monotonic_sign(self)
if v is not None and v.is_negative:
return True
neg = nonpos = nonneg = unknown_sign = False
saw_INF = set()
args = [a for a in self.args if not a.is_zero]
if not args:
return False
for a in args:
isneg = a.is_negative
infinite = a.is_infinite
if infinite:
saw_INF.add(fuzzy_or((isneg, a.is_nonpositive)))
if True in saw_INF and False in saw_INF:
return
if isneg:
neg = True
continue
elif a.is_nonpositive:
nonpos = True
continue
elif a.is_nonnegative:
nonneg = True
continue
if infinite is None:
return
unknown_sign = True
if saw_INF:
if len(saw_INF) > 1:
return
return saw_INF.pop()
elif unknown_sign:
return
elif not nonneg and not nonpos and neg:
return True
elif not nonneg and neg:
return True
elif not neg and not nonpos:
return False
def _eval_subs(self, old, new):
if not old.is_Add:
return None
coeff_self, terms_self = self.as_coeff_Add()
coeff_old, terms_old = old.as_coeff_Add()
if coeff_self.is_Rational and coeff_old.is_Rational:
if terms_self == terms_old: # (2 + a).subs( 3 + a, y) -> -1 + y
return self.func(new, coeff_self, -coeff_old)
if terms_self == -terms_old: # (2 + a).subs(-3 - a, y) -> -1 - y
return self.func(-new, coeff_self, coeff_old)
if coeff_self.is_Rational and coeff_old.is_Rational \
or coeff_self == coeff_old:
args_old, args_self = self.func.make_args(
terms_old), self.func.make_args(terms_self)
if len(args_old) < len(args_self): # (a+b+c).subs(b+c,x) -> a+x
self_set = set(args_self)
old_set = set(args_old)
if old_set < self_set:
ret_set = self_set - old_set
return self.func(new, coeff_self, -coeff_old,
*[s._subs(old, new) for s in ret_set])
args_old = self.func.make_args(
-terms_old) # (a+b+c+d).subs(-b-c,x) -> a-x+d
old_set = set(args_old)
if old_set < self_set:
ret_set = self_set - old_set
return self.func(-new, coeff_self, coeff_old,
*[s._subs(old, new) for s in ret_set])
def removeO(self):
args = [a for a in self.args if not a.is_Order]
return self._new_rawargs(*args)
def getO(self):
args = [a for a in self.args if a.is_Order]
if args:
return self._new_rawargs(*args)
@cacheit
def extract_leading_order(self, symbols, point=None):
"""
Returns the leading term and its order.
Examples
========
>>> from sympy.abc import x
>>> (x + 1 + 1/x**5).extract_leading_order(x)
((x**(-5), O(x**(-5))),)
>>> (1 + x).extract_leading_order(x)
((1, O(1)),)
>>> (x + x**2).extract_leading_order(x)
((x, O(x)),)
"""
from sympy import Order
lst = []
symbols = list(symbols if is_sequence(symbols) else [symbols])
if not point:
point = [0]*len(symbols)
seq = [(f, Order(f, *zip(symbols, point))) for f in self.args]
for ef, of in seq:
for e, o in lst:
if o.contains(of) and o != of:
of = None
break
if of is None:
continue
new_lst = [(ef, of)]
for e, o in lst:
if of.contains(o) and o != of:
continue
new_lst.append((e, o))
lst = new_lst
return tuple(lst)
def as_real_imag(self, deep=True, **hints):
"""
returns a tuple representing a complex number
Examples
========
>>> from sympy import I
>>> (7 + 9*I).as_real_imag()
(7, 9)
>>> ((1 + I)/(1 - I)).as_real_imag()
(0, 1)
>>> ((1 + 2*I)*(1 + 3*I)).as_real_imag()
(-5, 5)
"""
sargs, terms = self.args, []
re_part, im_part = [], []
for term in sargs:
re, im = term.as_real_imag(deep=deep)
re_part.append(re)
im_part.append(im)
return (self.func(*re_part), self.func(*im_part))
def _eval_as_leading_term(self, x):
from sympy import expand_mul, factor_terms
old = self
expr = expand_mul(self)
if not expr.is_Add:
return expr.as_leading_term(x)
infinite = [t for t in expr.args if t.is_infinite]
expr = expr.func(*[t.as_leading_term(x) for t in expr.args]).removeO()
if not expr:
# simple leading term analysis gave us 0 but we have to send
# back a term, so compute the leading term (via series)
return old.compute_leading_term(x)
elif expr is S.NaN:
return old.func._from_args(infinite)
elif not expr.is_Add:
return expr
else:
plain = expr.func(*[s for s, _ in expr.extract_leading_order(x)])
rv = factor_terms(plain, fraction=False)
rv_simplify = rv.simplify()
# if it simplifies to an x-free expression, return that;
# tests don't fail if we don't but it seems nicer to do this
if x not in rv_simplify.free_symbols:
if rv_simplify.is_zero and plain.is_zero is not True:
return (expr - plain)._eval_as_leading_term(x)
return rv_simplify
return rv
def _eval_adjoint(self):
return self.func(*[t.adjoint() for t in self.args])
def _eval_conjugate(self):
return self.func(*[t.conjugate() for t in self.args])
def _eval_transpose(self):
return self.func(*[t.transpose() for t in self.args])
def __neg__(self):
return self.func(*[-t for t in self.args])
def _sage_(self):
s = 0
for x in self.args:
s += x._sage_()
return s
def primitive(self):
"""
Return ``(R, self/R)`` where ``R``` is the Rational GCD of ``self```.
``R`` is collected only from the leading coefficient of each term.
Examples
========
>>> from sympy.abc import x, y
>>> (2*x + 4*y).primitive()
(2, x + 2*y)
>>> (2*x/3 + 4*y/9).primitive()
(2/9, 3*x + 2*y)
>>> (2*x/3 + 4.2*y).primitive()
(1/3, 2*x + 12.6*y)
No subprocessing of term factors is performed:
>>> ((2 + 2*x)*x + 2).primitive()
(1, x*(2*x + 2) + 2)
Recursive subprocessing can be done with the as_content_primitive()
method:
>>> ((2 + 2*x)*x + 2).as_content_primitive()
(2, x*(x + 1) + 1)
See also: primitive() function in polytools.py
"""
terms = []
inf = False
for a in self.args:
c, m = a.as_coeff_Mul()
if not c.is_Rational:
c = S.One
m = a
inf = inf or m is S.ComplexInfinity
terms.append((c.p, c.q, m))
if not inf:
ngcd = reduce(igcd, [t[0] for t in terms], 0)
dlcm = reduce(ilcm, [t[1] for t in terms], 1)
else:
ngcd = reduce(igcd, [t[0] for t in terms if t[1]], 0)
dlcm = reduce(ilcm, [t[1] for t in terms if t[1]], 1)
if ngcd == dlcm == 1:
return S.One, self
if not inf:
for i, (p, q, term) in enumerate(terms):
terms[i] = _keep_coeff(Rational((p//ngcd)*(dlcm//q)), term)
else:
for i, (p, q, term) in enumerate(terms):
if q:
terms[i] = _keep_coeff(Rational((p//ngcd)*(dlcm//q)), term)
else:
terms[i] = _keep_coeff(Rational(p, q), term)
# we don't need a complete re-flattening since no new terms will join
# so we just use the same sort as is used in Add.flatten. When the
# coefficient changes, the ordering of terms may change, e.g.
# (3*x, 6*y) -> (2*y, x)
#
# We do need to make sure that term[0] stays in position 0, however.
#
if terms[0].is_Number or terms[0] is S.ComplexInfinity:
c = terms.pop(0)
else:
c = None
_addsort(terms)
if c:
terms.insert(0, c)
return Rational(ngcd, dlcm), self._new_rawargs(*terms)
def as_content_primitive(self, radical=False):
"""Return the tuple (R, self/R) where R is the positive Rational
extracted from self. If radical is True (default is False) then
common radicals will be removed and included as a factor of the
primitive expression.
Examples
========
>>> from sympy import sqrt
>>> (3 + 3*sqrt(2)).as_content_primitive()
(3, 1 + sqrt(2))
Radical content can also be factored out of the primitive:
>>> (2*sqrt(2) + 4*sqrt(10)).as_content_primitive(radical=True)
(2, sqrt(2)*(1 + 2*sqrt(5)))
See docstring of Expr.as_content_primitive for more examples.
"""
con, prim = self.func(*[_keep_coeff(*a.as_content_primitive(
radical=radical)) for a in self.args]).primitive()
if radical and prim.is_Add:
# look for common radicals that can be removed
args = prim.args
rads = []
common_q = None
for m in args:
term_rads = defaultdict(list)
for ai in Mul.make_args(m):
if ai.is_Pow:
b, e = ai.as_base_exp()
if e.is_Rational and b.is_Integer:
term_rads[e.q].append(abs(int(b))**e.p)
if not term_rads:
break
if common_q is None:
common_q = set(term_rads.keys())
else:
common_q = common_q & set(term_rads.keys())
if not common_q:
break
rads.append(term_rads)
else:
# process rads
# keep only those in common_q
for r in rads:
for q in list(r.keys()):
if q not in common_q:
r.pop(q)
for q in r:
r[q] = prod(r[q])
# find the gcd of bases for each q
G = []
for q in common_q:
g = reduce(igcd, [r[q] for r in rads], 0)
if g != 1:
G.append(g**Rational(1, q))
if G:
G = Mul(*G)
args = [ai/G for ai in args]
prim = G*prim.func(*args)
return con, prim
@property
def _sorted_args(self):
from sympy.core.compatibility import default_sort_key
return tuple(sorted(self.args, key=lambda w: default_sort_key(w)))
def _eval_difference_delta(self, n, step):
from sympy.series.limitseq import difference_delta as dd
return self.func(*[dd(a, n, step) for a in self.args])
from .mul import Mul, _keep_coeff, prod
from sympy.core.numbers import Rational
| |
# Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
# Copyright (c) 2011, Nexenta Systems Inc.
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# Copyright (c) 2010, Google, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.pyami.config import Config, BotoConfigLocations
from boto.storage_uri import BucketStorageUri, FileStorageUri
import boto.plugin
import datetime
import os
import platform
import re
import sys
import logging
import logging.config
from boto.compat import urlparse
from boto.exception import InvalidUriError
__version__ = '2.33.0'
Version = __version__ # for backware compatibility
# http://bugs.python.org/issue7980
datetime.datetime.strptime('', '')
UserAgent = 'Boto/%s Python/%s %s/%s' % (
__version__,
platform.python_version(),
platform.system(),
platform.release()
)
config = Config()
# Regex to disallow buckets violating charset or not [3..255] chars total.
BUCKET_NAME_RE = re.compile(r'^[a-zA-Z0-9][a-zA-Z0-9\._-]{1,253}[a-zA-Z0-9]$')
# Regex to disallow buckets with individual DNS labels longer than 63.
TOO_LONG_DNS_NAME_COMP = re.compile(r'[-_a-z0-9]{64}')
GENERATION_RE = re.compile(r'(?P<versionless_uri_str>.+)'
r'#(?P<generation>[0-9]+)$')
VERSION_RE = re.compile('(?P<versionless_uri_str>.+)#(?P<version_id>.+)$')
ENDPOINTS_PATH = os.path.join(os.path.dirname(__file__), 'endpoints.json')
def init_logging():
for file in BotoConfigLocations:
try:
logging.config.fileConfig(os.path.expanduser(file))
except:
pass
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger('boto')
perflog = logging.getLogger('boto.perf')
log.addHandler(NullHandler())
perflog.addHandler(NullHandler())
init_logging()
# convenience function to set logging to a particular file
def set_file_logger(name, filepath, level=logging.INFO, format_string=None):
global log
if not format_string:
format_string = "%(asctime)s %(name)s [%(levelname)s]:%(message)s"
logger = logging.getLogger(name)
logger.setLevel(level)
fh = logging.FileHandler(filepath)
fh.setLevel(level)
formatter = logging.Formatter(format_string)
fh.setFormatter(formatter)
logger.addHandler(fh)
log = logger
def set_stream_logger(name, level=logging.DEBUG, format_string=None):
global log
if not format_string:
format_string = "%(asctime)s %(name)s [%(levelname)s]:%(message)s"
logger = logging.getLogger(name)
logger.setLevel(level)
fh = logging.StreamHandler()
fh.setLevel(level)
formatter = logging.Formatter(format_string)
fh.setFormatter(formatter)
logger.addHandler(fh)
log = logger
def connect_sqs(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sqs.connection.SQSConnection`
:return: A connection to Amazon's SQS
"""
from boto.sqs.connection import SQSConnection
return SQSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_s3(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.s3.connection.S3Connection`
:return: A connection to Amazon's S3
"""
from boto.s3.connection import S3Connection
return S3Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_gs(gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
"""
@type gs_access_key_id: string
@param gs_access_key_id: Your Google Cloud Storage Access Key ID
@type gs_secret_access_key: string
@param gs_secret_access_key: Your Google Cloud Storage Secret Access Key
@rtype: L{GSConnection<boto.gs.connection.GSConnection>}
@return: A connection to Google's Storage service
"""
from boto.gs.connection import GSConnection
return GSConnection(gs_access_key_id, gs_secret_access_key, **kwargs)
def connect_ec2(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.connection.EC2Connection`
:return: A connection to Amazon's EC2
"""
from boto.ec2.connection import EC2Connection
return EC2Connection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_elb(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.elb.ELBConnection`
:return: A connection to Amazon's Load Balancing Service
"""
from boto.ec2.elb import ELBConnection
return ELBConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_autoscale(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.autoscale.AutoScaleConnection`
:return: A connection to Amazon's Auto Scaling Service
:type use_block_device_types bool
:param use_block_device_types: Specifies whether to return described Launch Configs with block device mappings containing
block device types, or a list of old style block device mappings (deprecated). This defaults to false for compatability
with the old incorrect style.
"""
from boto.ec2.autoscale import AutoScaleConnection
return AutoScaleConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_cloudwatch(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.cloudwatch.CloudWatchConnection`
:return: A connection to Amazon's EC2 Monitoring service
"""
from boto.ec2.cloudwatch import CloudWatchConnection
return CloudWatchConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_sdb(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sdb.connection.SDBConnection`
:return: A connection to Amazon's SDB
"""
from boto.sdb.connection import SDBConnection
return SDBConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_fps(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.fps.connection.FPSConnection`
:return: A connection to FPS
"""
from boto.fps.connection import FPSConnection
return FPSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_mturk(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.mturk.connection.MTurkConnection`
:return: A connection to MTurk
"""
from boto.mturk.connection import MTurkConnection
return MTurkConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_cloudfront(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.fps.connection.FPSConnection`
:return: A connection to FPS
"""
from boto.cloudfront import CloudFrontConnection
return CloudFrontConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_vpc(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.vpc.VPCConnection`
:return: A connection to VPC
"""
from boto.vpc import VPCConnection
return VPCConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_rds(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.rds.RDSConnection`
:return: A connection to RDS
"""
from boto.rds import RDSConnection
return RDSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_rds2(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.rds2.layer1.RDSConnection`
:return: A connection to RDS
"""
from boto.rds2.layer1 import RDSConnection
return RDSConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_emr(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.emr.EmrConnection`
:return: A connection to Elastic mapreduce
"""
from boto.emr import EmrConnection
return EmrConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_sns(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sns.SNSConnection`
:return: A connection to Amazon's SNS
"""
from boto.sns import SNSConnection
return SNSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_iam(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.iam.IAMConnection`
:return: A connection to Amazon's IAM
"""
from boto.iam import IAMConnection
return IAMConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_route53(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.dns.Route53Connection`
:return: A connection to Amazon's Route53 DNS Service
"""
from boto.route53 import Route53Connection
return Route53Connection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_cloudformation(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudformation.CloudFormationConnection`
:return: A connection to Amazon's CloudFormation Service
"""
from boto.cloudformation import CloudFormationConnection
return CloudFormationConnection(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_euca(host=None, aws_access_key_id=None, aws_secret_access_key=None,
port=8773, path='/services/Eucalyptus', is_secure=False,
**kwargs):
"""
Connect to a Eucalyptus service.
:type host: string
:param host: the host name or ip address of the Eucalyptus server
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.connection.EC2Connection`
:return: A connection to Eucalyptus server
"""
from boto.ec2 import EC2Connection
from boto.ec2.regioninfo import RegionInfo
# Check for values in boto config, if not supplied as args
if not aws_access_key_id:
aws_access_key_id = config.get('Credentials',
'euca_access_key_id',
None)
if not aws_secret_access_key:
aws_secret_access_key = config.get('Credentials',
'euca_secret_access_key',
None)
if not host:
host = config.get('Boto', 'eucalyptus_host', None)
reg = RegionInfo(name='eucalyptus', endpoint=host)
return EC2Connection(aws_access_key_id, aws_secret_access_key,
region=reg, port=port, path=path,
is_secure=is_secure, **kwargs)
def connect_glacier(aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.glacier.layer2.Layer2`
:return: A connection to Amazon's Glacier Service
"""
from boto.glacier.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_ec2_endpoint(url, aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to an EC2 Api endpoint. Additional arguments are passed
through to connect_ec2.
:type url: string
:param url: A url for the ec2 api endpoint to connect to
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ec2.connection.EC2Connection`
:return: A connection to Eucalyptus server
"""
from boto.ec2.regioninfo import RegionInfo
purl = urlparse(url)
kwargs['port'] = purl.port
kwargs['host'] = purl.hostname
kwargs['path'] = purl.path
if not 'is_secure' in kwargs:
kwargs['is_secure'] = (purl.scheme == "https")
kwargs['region'] = RegionInfo(name=purl.hostname,
endpoint=purl.hostname)
kwargs['aws_access_key_id'] = aws_access_key_id
kwargs['aws_secret_access_key'] = aws_secret_access_key
return(connect_ec2(**kwargs))
def connect_walrus(host=None, aws_access_key_id=None,
aws_secret_access_key=None,
port=8773, path='/services/Walrus', is_secure=False,
**kwargs):
"""
Connect to a Walrus service.
:type host: string
:param host: the host name or ip address of the Walrus server
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.s3.connection.S3Connection`
:return: A connection to Walrus
"""
from boto.s3.connection import S3Connection
from boto.s3.connection import OrdinaryCallingFormat
# Check for values in boto config, if not supplied as args
if not aws_access_key_id:
aws_access_key_id = config.get('Credentials',
'euca_access_key_id',
None)
if not aws_secret_access_key:
aws_secret_access_key = config.get('Credentials',
'euca_secret_access_key',
None)
if not host:
host = config.get('Boto', 'walrus_host', None)
return S3Connection(aws_access_key_id, aws_secret_access_key,
host=host, port=port, path=path,
calling_format=OrdinaryCallingFormat(),
is_secure=is_secure, **kwargs)
def connect_ses(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ses.SESConnection`
:return: A connection to Amazon's SES
"""
from boto.ses import SESConnection
return SESConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_sts(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sts.STSConnection`
:return: A connection to Amazon's STS
"""
from boto.sts import STSConnection
return STSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_ia(ia_access_key_id=None, ia_secret_access_key=None,
is_secure=False, **kwargs):
"""
Connect to the Internet Archive via their S3-like API.
:type ia_access_key_id: string
:param ia_access_key_id: Your IA Access Key ID. This will also look
in your boto config file for an entry in the Credentials
section called "ia_access_key_id"
:type ia_secret_access_key: string
:param ia_secret_access_key: Your IA Secret Access Key. This will also
look in your boto config file for an entry in the Credentials
section called "ia_secret_access_key"
:rtype: :class:`boto.s3.connection.S3Connection`
:return: A connection to the Internet Archive
"""
from boto.s3.connection import S3Connection
from boto.s3.connection import OrdinaryCallingFormat
access_key = config.get('Credentials', 'ia_access_key_id',
ia_access_key_id)
secret_key = config.get('Credentials', 'ia_secret_access_key',
ia_secret_access_key)
return S3Connection(access_key, secret_key,
host='s3.us.archive.org',
calling_format=OrdinaryCallingFormat(),
is_secure=is_secure, **kwargs)
def connect_dynamodb(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.dynamodb.layer2.Layer2`
:return: A connection to the Layer2 interface for DynamoDB.
"""
from boto.dynamodb.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_swf(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.swf.layer1.Layer1`
:return: A connection to the Layer1 interface for SWF.
"""
from boto.swf.layer1 import Layer1
return Layer1(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_cloudsearch(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudsearch.layer2.Layer2`
:return: A connection to Amazon's CloudSearch service
"""
from boto.cloudsearch.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_cloudsearch2(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudsearch2.layer2.Layer2`
:return: A connection to Amazon's CloudSearch2 service
"""
from boto.cloudsearch2.layer2 import Layer2
return Layer2(aws_access_key_id, aws_secret_access_key,
**kwargs)
def connect_beanstalk(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.beanstalk.layer1.Layer1`
:return: A connection to Amazon's Elastic Beanstalk service
"""
from boto.beanstalk.layer1 import Layer1
return Layer1(aws_access_key_id, aws_secret_access_key, **kwargs)
def connect_elastictranscoder(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.ets.layer1.ElasticTranscoderConnection`
:return: A connection to Amazon's Elastic Transcoder service
"""
from boto.elastictranscoder.layer1 import ElasticTranscoderConnection
return ElasticTranscoderConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs)
def connect_opsworks(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
from boto.opsworks.layer1 import OpsWorksConnection
return OpsWorksConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs)
def connect_redshift(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.redshift.layer1.RedshiftConnection`
:return: A connection to Amazon's Redshift service
"""
from boto.redshift.layer1 import RedshiftConnection
return RedshiftConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_support(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.support.layer1.SupportConnection`
:return: A connection to Amazon's Support service
"""
from boto.support.layer1 import SupportConnection
return SupportConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_cloudtrail(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS CloudTrail
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.cloudtrail.layer1.CloudtrailConnection`
:return: A connection to the AWS Cloudtrail service
"""
from boto.cloudtrail.layer1 import CloudTrailConnection
return CloudTrailConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_directconnect(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to AWS DirectConnect
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.directconnect.layer1.DirectConnectConnection`
:return: A connection to the AWS DirectConnect service
"""
from boto.directconnect.layer1 import DirectConnectConnection
return DirectConnectConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_kinesis(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Kinesis
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.kinesis.layer1.KinesisConnection`
:return: A connection to the Amazon Kinesis service
"""
from boto.kinesis.layer1 import KinesisConnection
return KinesisConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_logs(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon CloudWatch Logs
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.kinesis.layer1.CloudWatchLogsConnection`
:return: A connection to the Amazon CloudWatch Logs service
"""
from boto.logs.layer1 import CloudWatchLogsConnection
return CloudWatchLogsConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_route53domains(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Route 53 Domains
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.route53.domains.layer1.Route53DomainsConnection`
:return: A connection to the Amazon Route 53 Domains service
"""
from boto.route53.domains.layer1 import Route53DomainsConnection
return Route53DomainsConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_cognito_identity(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Cognito Identity
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.cognito.identity.layer1.CognitoIdentityConnection`
:return: A connection to the Amazon Cognito Identity service
"""
from boto.cognito.identity.layer1 import CognitoIdentityConnection
return CognitoIdentityConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def connect_cognito_sync(aws_access_key_id=None,
aws_secret_access_key=None,
**kwargs):
"""
Connect to Amazon Cognito Sync
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
rtype: :class:`boto.cognito.sync.layer1.CognitoSyncConnection`
:return: A connection to the Amazon Cognito Sync service
"""
from boto.cognito.sync.layer1 import CognitoSyncConnection
return CognitoSyncConnection(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
**kwargs
)
def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
bucket_storage_uri_class=BucketStorageUri,
suppress_consec_slashes=True, is_latest=False):
"""
Instantiate a StorageUri from a URI string.
:type uri_str: string
:param uri_str: URI naming bucket + optional object.
:type default_scheme: string
:param default_scheme: default scheme for scheme-less URIs.
:type debug: int
:param debug: debug level to pass in to boto connection (range 0..2).
:type validate: bool
:param validate: whether to check for bucket name validity.
:type bucket_storage_uri_class: BucketStorageUri interface.
:param bucket_storage_uri_class: Allows mocking for unit tests.
:param suppress_consec_slashes: If provided, controls whether
consecutive slashes will be suppressed in key paths.
:type is_latest: bool
:param is_latest: whether this versioned object represents the
current version.
We allow validate to be disabled to allow caller
to implement bucket-level wildcarding (outside the boto library;
see gsutil).
:rtype: :class:`boto.StorageUri` subclass
:return: StorageUri subclass for given URI.
``uri_str`` must be one of the following formats:
* gs://bucket/name
* gs://bucket/name#ver
* s3://bucket/name
* gs://bucket
* s3://bucket
* filename (which could be a Unix path like /a/b/c or a Windows path like
C:\a\b\c)
The last example uses the default scheme ('file', unless overridden).
"""
version_id = None
generation = None
# Manually parse URI components instead of using urlparse because
# what we're calling URIs don't really fit the standard syntax for URIs
# (the latter includes an optional host/net location part).
end_scheme_idx = uri_str.find('://')
if end_scheme_idx == -1:
scheme = default_scheme.lower()
path = uri_str
else:
scheme = uri_str[0:end_scheme_idx].lower()
path = uri_str[end_scheme_idx + 3:]
if scheme not in ['file', 's3', 'gs']:
raise InvalidUriError('Unrecognized scheme "%s"' % scheme)
if scheme == 'file':
# For file URIs we have no bucket name, and use the complete path
# (minus 'file://') as the object name.
is_stream = False
if path == '-':
is_stream = True
return FileStorageUri(path, debug, is_stream)
else:
path_parts = path.split('/', 1)
bucket_name = path_parts[0]
object_name = ''
# If validate enabled, ensure the bucket name is valid, to avoid
# possibly confusing other parts of the code. (For example if we didn't
# catch bucket names containing ':', when a user tried to connect to
# the server with that name they might get a confusing error about
# non-integer port numbers.)
if (validate and bucket_name and
(not BUCKET_NAME_RE.match(bucket_name)
or TOO_LONG_DNS_NAME_COMP.search(bucket_name))):
raise InvalidUriError('Invalid bucket name in URI "%s"' % uri_str)
if scheme == 'gs':
match = GENERATION_RE.search(path)
if match:
md = match.groupdict()
versionless_uri_str = md['versionless_uri_str']
path_parts = versionless_uri_str.split('/', 1)
generation = int(md['generation'])
elif scheme == 's3':
match = VERSION_RE.search(path)
if match:
md = match.groupdict()
versionless_uri_str = md['versionless_uri_str']
path_parts = versionless_uri_str.split('/', 1)
version_id = md['version_id']
else:
raise InvalidUriError('Unrecognized scheme "%s"' % scheme)
if len(path_parts) > 1:
object_name = path_parts[1]
return bucket_storage_uri_class(
scheme, bucket_name, object_name, debug,
suppress_consec_slashes=suppress_consec_slashes,
version_id=version_id, generation=generation, is_latest=is_latest)
def storage_uri_for_key(key):
"""Returns a StorageUri for the given key.
:type key: :class:`boto.s3.key.Key` or subclass
:param key: URI naming bucket + optional object.
"""
if not isinstance(key, boto.s3.key.Key):
raise InvalidUriError('Requested key (%s) is not a subclass of '
'boto.s3.key.Key' % str(type(key)))
prov_name = key.bucket.connection.provider.get_provider_name()
uri_str = '%s://%s/%s' % (prov_name, key.bucket.name, key.name)
return storage_uri(uri_str)
boto.plugin.load_plugins(config)
| |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
It's used to accept requests from the device to spawn and kill instances of the
chrome test server on the host.
"""
# pylint: disable=W0702
import BaseHTTPServer
import json
import logging
import os
import select
import struct
import subprocess
import sys
import threading
import time
import urlparse
from devil.android import forwarder
from devil.android import ports
from pylib import constants
from pylib.constants import host_paths
# Path that are needed to import necessary modules when launching a testserver.
os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
% (os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib',
'src'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
os.path.join(host_paths.DIR_SOURCE_ROOT, 'sync', 'tools', 'testserver')))
SERVER_TYPES = {
'http': '',
'ftp': '-f',
'sync': '', # Sync uses its own script, and doesn't take a server type arg.
'tcpecho': '--tcp-echo',
'udpecho': '--udp-echo',
}
# The timeout (in seconds) of starting up the Python test server.
TEST_SERVER_STARTUP_TIMEOUT = 10
def _WaitUntil(predicate, max_attempts=5):
"""Blocks until the provided predicate (function) is true.
Returns:
Whether the provided predicate was satisfied once (before the timeout).
"""
sleep_time_sec = 0.025
for _ in xrange(1, max_attempts):
if predicate():
return True
time.sleep(sleep_time_sec)
sleep_time_sec = min(1, sleep_time_sec * 2) # Don't wait more than 1 sec.
return False
def _CheckPortAvailable(port):
"""Returns True if |port| is available."""
return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
def _CheckPortNotAvailable(port):
"""Returns True if |port| is not available."""
return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
def _CheckDevicePortStatus(device, port):
"""Returns whether the provided port is used."""
return _WaitUntil(lambda: ports.IsDevicePortUsed(device, port))
def _GetServerTypeCommandLine(server_type):
"""Returns the command-line by the given server type.
Args:
server_type: the server type to be used (e.g. 'http').
Returns:
A string containing the command-line argument.
"""
if server_type not in SERVER_TYPES:
raise NotImplementedError('Unknown server type: %s' % server_type)
if server_type == 'udpecho':
raise Exception('Please do not run UDP echo tests because we do not have '
'a UDP forwarder tool.')
return SERVER_TYPES[server_type]
class TestServerThread(threading.Thread):
"""A thread to run the test server in a separate process."""
def __init__(self, ready_event, arguments, device, tool):
"""Initialize TestServerThread with the following argument.
Args:
ready_event: event which will be set when the test server is ready.
arguments: dictionary of arguments to run the test server.
device: An instance of DeviceUtils.
tool: instance of runtime error detection tool.
"""
threading.Thread.__init__(self)
self.wait_event = threading.Event()
self.stop_flag = False
self.ready_event = ready_event
self.ready_event.clear()
self.arguments = arguments
self.device = device
self.tool = tool
self.test_server_process = None
self.is_ready = False
self.host_port = self.arguments['port']
assert isinstance(self.host_port, int)
# The forwarder device port now is dynamically allocated.
self.forwarder_device_port = 0
# Anonymous pipe in order to get port info from test server.
self.pipe_in = None
self.pipe_out = None
self.process = None
self.command_line = []
def _WaitToStartAndGetPortFromTestServer(self):
"""Waits for the Python test server to start and gets the port it is using.
The port information is passed by the Python test server with a pipe given
by self.pipe_out. It is written as a result to |self.host_port|.
Returns:
Whether the port used by the test server was successfully fetched.
"""
assert self.host_port == 0 and self.pipe_out and self.pipe_in
(in_fds, _, _) = select.select([self.pipe_in, ], [], [],
TEST_SERVER_STARTUP_TIMEOUT)
if len(in_fds) == 0:
logging.error('Failed to wait to the Python test server to be started.')
return False
# First read the data length as an unsigned 4-byte value. This
# is _not_ using network byte ordering since the Python test server packs
# size as native byte order and all Chromium platforms so far are
# configured to use little-endian.
# TODO(jnd): Change the Python test server and local_test_server_*.cc to
# use a unified byte order (either big-endian or little-endian).
data_length = os.read(self.pipe_in, struct.calcsize('=L'))
if data_length:
(data_length,) = struct.unpack('=L', data_length)
assert data_length
if not data_length:
logging.error('Failed to get length of server data.')
return False
port_json = os.read(self.pipe_in, data_length)
if not port_json:
logging.error('Failed to get server data.')
return False
logging.info('Got port json data: %s', port_json)
port_json = json.loads(port_json)
if port_json.has_key('port') and isinstance(port_json['port'], int):
self.host_port = port_json['port']
return _CheckPortNotAvailable(self.host_port)
logging.error('Failed to get port information from the server data.')
return False
def _GenerateCommandLineArguments(self):
"""Generates the command line to run the test server.
Note that all options are processed by following the definitions in
testserver.py.
"""
if self.command_line:
return
args_copy = dict(self.arguments)
# Translate the server type.
type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
if type_cmd:
self.command_line.append(type_cmd)
# Use a pipe to get the port given by the instance of Python test server
# if the test does not specify the port.
assert self.host_port == args_copy['port']
if self.host_port == 0:
(self.pipe_in, self.pipe_out) = os.pipe()
self.command_line.append('--startup-pipe=%d' % self.pipe_out)
# Pass the remaining arguments as-is.
for key, values in args_copy.iteritems():
if not isinstance(values, list):
values = [values]
for value in values:
if value is None:
self.command_line.append('--%s' % key)
else:
self.command_line.append('--%s=%s' % (key, value))
def _CloseUnnecessaryFDsForTestServerProcess(self):
# This is required to avoid subtle deadlocks that could be caused by the
# test server child process inheriting undesirable file descriptors such as
# file lock file descriptors.
for fd in xrange(0, 1024):
if fd != self.pipe_out:
try:
os.close(fd)
except:
pass
def run(self):
logging.info('Start running the thread!')
self.wait_event.clear()
self._GenerateCommandLineArguments()
command = host_paths.DIR_SOURCE_ROOT
if self.arguments['server-type'] == 'sync':
command = [os.path.join(command, 'sync', 'tools', 'testserver',
'sync_testserver.py')] + self.command_line
else:
command = [os.path.join(command, 'net', 'tools', 'testserver',
'testserver.py')] + self.command_line
logging.info('Running: %s', command)
# Disable PYTHONUNBUFFERED because it has a bad interaction with the
# testserver. Remove once this interaction is fixed.
unbuf = os.environ.pop('PYTHONUNBUFFERED', None)
# Pass DIR_SOURCE_ROOT as the child's working directory so that relative
# paths in the arguments are resolved correctly.
self.process = subprocess.Popen(
command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
cwd=host_paths.DIR_SOURCE_ROOT)
if unbuf:
os.environ['PYTHONUNBUFFERED'] = unbuf
if self.process:
if self.pipe_out:
self.is_ready = self._WaitToStartAndGetPortFromTestServer()
else:
self.is_ready = _CheckPortNotAvailable(self.host_port)
if self.is_ready:
forwarder.Forwarder.Map([(0, self.host_port)], self.device, self.tool)
# Check whether the forwarder is ready on the device.
self.is_ready = False
device_port = forwarder.Forwarder.DevicePortForHostPort(self.host_port)
if device_port and _CheckDevicePortStatus(self.device, device_port):
self.is_ready = True
self.forwarder_device_port = device_port
# Wake up the request handler thread.
self.ready_event.set()
# Keep thread running until Stop() gets called.
_WaitUntil(lambda: self.stop_flag, max_attempts=sys.maxint)
if self.process.poll() is None:
self.process.kill()
forwarder.Forwarder.UnmapDevicePort(self.forwarder_device_port, self.device)
self.process = None
self.is_ready = False
if self.pipe_out:
os.close(self.pipe_in)
os.close(self.pipe_out)
self.pipe_in = None
self.pipe_out = None
logging.info('Test-server has died.')
self.wait_event.set()
def Stop(self):
"""Blocks until the loop has finished.
Note that this must be called in another thread.
"""
if not self.process:
return
self.stop_flag = True
self.wait_event.wait()
class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler used to process http GET/POST request."""
def _SendResponse(self, response_code, response_reason, additional_headers,
contents):
"""Generates a response sent to the client from the provided parameters.
Args:
response_code: number of the response status.
response_reason: string of reason description of the response.
additional_headers: dict of additional headers. Each key is the name of
the header, each value is the content of the header.
contents: string of the contents we want to send to client.
"""
self.send_response(response_code, response_reason)
self.send_header('Content-Type', 'text/html')
# Specify the content-length as without it the http(s) response will not
# be completed properly (and the browser keeps expecting data).
self.send_header('Content-Length', len(contents))
for header_name in additional_headers:
self.send_header(header_name, additional_headers[header_name])
self.end_headers()
self.wfile.write(contents)
self.wfile.flush()
def _StartTestServer(self):
"""Starts the test server thread."""
logging.info('Handling request to spawn a test server.')
content_type = self.headers.getheader('content-type')
if content_type != 'application/json':
raise Exception('Bad content-type for start request.')
content_length = self.headers.getheader('content-length')
if not content_length:
content_length = 0
try:
content_length = int(content_length)
except:
raise Exception('Bad content-length for start request.')
logging.info(content_length)
test_server_argument_json = self.rfile.read(content_length)
logging.info(test_server_argument_json)
assert not self.server.test_server_instance
ready_event = threading.Event()
self.server.test_server_instance = TestServerThread(
ready_event,
json.loads(test_server_argument_json),
self.server.device,
self.server.tool)
self.server.test_server_instance.setDaemon(True)
self.server.test_server_instance.start()
ready_event.wait()
if self.server.test_server_instance.is_ready:
self._SendResponse(200, 'OK', {}, json.dumps(
{'port': self.server.test_server_instance.forwarder_device_port,
'message': 'started'}))
logging.info('Test server is running on port: %d.',
self.server.test_server_instance.host_port)
else:
self.server.test_server_instance.Stop()
self.server.test_server_instance = None
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during starting a test server.')
def _KillTestServer(self):
"""Stops the test server instance."""
# There should only ever be one test server at a time. This may do the
# wrong thing if we try and start multiple test servers.
if not self.server.test_server_instance:
return
port = self.server.test_server_instance.host_port
logging.info('Handling request to kill a test server on port: %d.', port)
self.server.test_server_instance.Stop()
# Make sure the status of test server is correct before sending response.
if _CheckPortAvailable(port):
self._SendResponse(200, 'OK', {}, 'killed')
logging.info('Test server on port %d is killed', port)
else:
self._SendResponse(500, 'Test Server Error.', {}, '')
logging.info('Encounter problem during killing a test server.')
self.server.test_server_instance = None
def do_POST(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
logging.info('Action for POST method is: %s.', action)
if action == '/start':
self._StartTestServer()
else:
self._SendResponse(400, 'Unknown request.', {}, '')
logging.info('Encounter unknown request: %s.', action)
def do_GET(self):
parsed_path = urlparse.urlparse(self.path)
action = parsed_path.path
params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
logging.info('Action for GET method is: %s.', action)
for param in params:
logging.info('%s=%s', param, params[param][0])
if action == '/kill':
self._KillTestServer()
elif action == '/ping':
# The ping handler is used to check whether the spawner server is ready
# to serve the requests. We don't need to test the status of the test
# server when handling ping request.
self._SendResponse(200, 'OK', {}, 'ready')
logging.info('Handled ping request and sent response.')
else:
self._SendResponse(400, 'Unknown request', {}, '')
logging.info('Encounter unknown request: %s.', action)
class SpawningServer(object):
"""The class used to start/stop a http server."""
def __init__(self, test_server_spawner_port, device, tool):
logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
SpawningServerRequestHandler)
self.server.device = device
self.server.tool = tool
self.server.test_server_instance = None
self.server.build_type = constants.GetBuildType()
def _Listen(self):
logging.info('Starting test server spawner')
self.server.serve_forever()
def Start(self):
"""Starts the test server spawner."""
listener_thread = threading.Thread(target=self._Listen)
listener_thread.setDaemon(True)
listener_thread.start()
def Stop(self):
"""Stops the test server spawner.
Also cleans the server state.
"""
self.CleanupState()
self.server.shutdown()
def CleanupState(self):
"""Cleans up the spawning server state.
This should be called if the test server spawner is reused,
to avoid sharing the test server instance.
"""
if self.server.test_server_instance:
self.server.test_server_instance.Stop()
self.server.test_server_instance = None
| |
import os
import sys
import time
import logging
import signal
import tempfile
import inspect
from watchdog.observers import Observer
from watchdog.events import LoggingEventHandler, FileSystemEventHandler
from django import conf
from django.conf import settings
from django.utils.module_loading import import_module, import_string
import subprocess
class EventHandler(FileSystemEventHandler):
def __init__(self, watcher, *args, **kwargs):
self.watcher = watcher
super(EventHandler, self).__init__( *args, **kwargs)
def on_any_event(self, event):
self.watcher.process_changes(event)
class Watcher(object):
handler = None
command = None
blocked = False
stout_prefix = 'stylus'
configs = []
def __init__(self, command=None, *args, **kwargs):
#self.handler = WatcherHandler(self)
self.command = command
self.observer = Observer()
self.event_handler = EventHandler(self)
# self.notifier.max_user_watches=16384
self.process_settings()
app_root = os.path.abspath(settings.SITE_ROOT)
# self.print_head('Watching \033[94m%s\033[0m' % (app_root))
paths = self.get_watched_paths()
for appname, path in paths:
#try:
#self.schedule(self.handler, path, recursive=True)
self.observer.schedule(self.event_handler, path, recursive=True)
self.print_head('Watching \033[94m%s\033[0m' % (appname))
# except Exception, e:
# self.print_error('Watching %s error : %s' % (appname, str(e)))
def process_changes(self, event):
if event.src_path.endswith('.styl'):
self.generate_css()
# if hasattr(event, 'dest_path'):
# print event.dest_path
# self.process_settings()
# diff_cmd_stream = os.popen("git diff --name-only")
# diffs = diff_cmd_stream.read()
# if ".styl" in diffs:
# self.print_head('Changes detected')
# else:
# self.print_head("No changes")
def process_settings(self):
reload(conf)
self.configs = []
settings = conf.settings
if not hasattr(settings, 'STYLUS_WATCHER') and 'watcher' in settings.STYLUS_WATCHER:
self.print_error('settings.STYLUS_WATCHER is missing')
else:
configs = settings.STYLUS_WATCHER
for config in configs:
try:
source = config[0]
css_output = config[1]
content = None
if not os.path.isfile(source):
source = os.path.join(settings.SITE_ROOT, config[0])
if not os.path.isfile(source):
self.print_error('Source is missing "%s"' % source)
source = None
css_output_dir = os.path.dirname(css_output)
if not os.path.isdir(css_output_dir):
css_output_dir = os.path.join(settings.SITE_ROOT, css_output_dir)
css_output = os.path.join(settings.SITE_ROOT, css_output)
if not os.path.isdir(css_output_dir):
self.print_error('CSS output folder is missing "%s"' % css_output)
css_output = None
if os.path.isfile(css_output):
f = open(css_output, 'r')
content = f.read()
f.close()
if source and css_output:
self.configs.append([source, css_output, content])
except Exception, e:
# print config
self.print_error(u'Invalid config for stylus watcher "%s"' % (e.message))
def generate_css(self, compress=True):
for config in self.configs:
#try:
source = config[0]
css_output = config[1]
self.print_process('Compiling css from %s to %s' % (source, css_output))
f = open(source, 'r')
initial = f.read()
f.close()
shortcuts_path = os.path.join(os.path.dirname(__file__), 'shortcuts', 'shortcuts.styl')
styl = """
SOURCE_ROOT = '%s/'
DJANGO_ROOT = '%s/'
@import '%s'
import_app(appname, path)
if !appname
return
""" % (os.path.abspath(os.path.dirname(source)), settings.DJANGO_ROOT, shortcuts_path)
for appname, path in self.get_watched_paths(recursive=False):
styl += """
else if appname == '%s'
@import '%s/'+path
""" % (appname, os.path.join(path))
styl += """
else
@import SOURCE_ROOT+appname
"""
styl += initial
tmp = tempfile.NamedTemporaryFile(mode='w+b', delete=False)
tmp.write(styl)
tmp.close()
cmd = "stylus%s < %s" % (' --compress' if compress else '', tmp.name)#, css_output)
# self.print_process('Executing %s' % cmd)
pipe = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell = True)
css = "".join([ line for line in pipe.stdout ])
errors = "".join([ line for line in pipe.stderr ])
self.print_error(errors)
os.unlink(tmp.name)
csslen = len(css)
if csslen == 0 or errors:
self.print_error("^ Error in stylus compilation")
else:
if css != config[2]:
# self.print_success("Done (%s chars)." % csslen)
f = open(css_output, 'w')
#self.print_process('Pushing css into %s' % css_output)
f.write(css)
f.close()
config[2] = css
self.print_success("Done.")
else:
self.print_success("No changes, abort.")
#except Exception, e:
#self.print_error('Error during css generation for "%s" : %s' % (config, str(e)))
def get_watched_paths(self, recursive=True):
app_paths = []
for config in self.configs:
source_dir = os.path.abspath(os.path.dirname(config[0]))
app_paths.append(
(config[0], source_dir)
)
#styl_path = os.path.join(settings.DJANGO_ROOT, 'styl')
project_path = settings.DJANGO_ROOT
if os.path.exists(project_path):
app_paths.append((project_path, project_path))
for path in settings.STATICFILES_DIRS:
#styl_path = os.path.join(path, 'styl')
styl_path = path
if os.path.exists(styl_path):
app_paths.append((styl_path, styl_path))
for appname in settings.INSTALLED_APPS:
try:
try:
app = import_string(appname)
except:
app = import_module(appname)
if inspect.ismodule(app):
pass
elif inspect.isclass(app):
app = import_module(app.name)
else:
raise Exception
#styl_path = os.path.join(os.path.dirname(app.__file__), 'styl')
styl_path = os.path.dirname(app.__file__)
if os.path.exists(styl_path):
app_paths.append((appname, styl_path))
except Exception:
self.print_error(u"Failed to import %s (%s)" % (appname, app))
if recursive:
for path in app_paths:
for path2 in app_paths:
if path[1] != path2[1] and path2[1].startswith(path[1]):
app_paths.remove(path2)
return app_paths
def sigterm(self, signum, frame):
self.observer.stop()
self.observer.join()
exit(0)
def watch(self, paths=[]):
signal.signal(signal.SIGTERM, self.sigterm)
signal.signal(signal.SIGINT , self.sigterm)
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
self.observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
self.observer.stop()
self.observer.join()
def print_r(self, pattern, str):
output = pattern % (self.stout_prefix, str)
if self.command:
try:
self.command.stdout.write(output)
except:
self.command.stdout.write(output.decode('utf8', 'ignore'))
self.command.stdout.flush()
else:
print output
def print_head(self, str):
self.print_r("\033[95m[%s]\033[0m %s", str)
def print_process(self, str):
self.print_r("\033[95m[%s]\033[0m \033[93m%s\033[0m", str)
def print_success(self, str):
self.print_r("\033[95m[%s]\033[0m \033[92m%s\033[0m", str)
def print_error(self, str):
self.print_r("\033[95m[%s]\033[0m \033[91m%s\033[0m", str)
def getext(filename):
"Get the file extension."
return os.path.splitext(filename)[-1].lower()
| |
# Copyright 2014 Josh Pieper, jjp@pobox.com.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Inverse kinematics for 3dof legs.'''
import math
from ..tf.tf import Point3D
class Configuration(object):
coxa_min_deg = None
coxa_idle_deg = None
coxa_max_deg = None
coxa_length_mm = None
coxa_sign = 1
coxa_ident = None
femur_min_deg = None
femur_idle_deg = None
femur_max_deg = None
femur_length_mm = None
femur_sign = 1
femur_ident = None
tibia_min_deg = None
tibia_idle_deg = None
tibia_max_deg = None
tibia_length_mm = None
tibia_sign = 1
tibia_ident = None
servo_speed_dps = 360.0
@staticmethod
def get_attributes():
return [key for key, value in Configuration.__dict__.iteritems()
if (not key.startswith('__') and
not callable(value) and
not isinstance(value, staticmethod))]
def write_settings(self, config, group_name):
config.add_section(group_name)
for x in Configuration.get_attributes():
config.set(group_name, x, getattr(self, x))
@staticmethod
def read_settings(config, group_name):
result = Configuration()
for x in Configuration.get_attributes():
if config.has_option(group_name, x):
if x.endswith('sign') or x.endswith('ident'):
value = config.getint(group_name, x)
else:
value = config.getfloat(group_name, x)
setattr(result, x, value)
return result
class JointAngles(object):
config = None
coxa_deg = None # positive is rotating clockwise viewed from top
femur_deg = None # positive is rotating upward
tibia_deg = None # positive is rotating upward
def command_dict(self):
'''Return a dictionary mapping servo identifiers to commands
in degrees. This is the same format as the servo_controller
module uses.'''
return { self.config.coxa_ident : self.coxa_deg,
self.config.femur_ident : self.femur_deg,
self.config.tibia_ident : self.tibia_deg }
def lizard_3dof_ik(point_mm, config):
'''Given a target end position in 3D coordinate space, return the
required joint angles for a 3 degree of freedom lizard style
leg.
+y is away from the shoulder
+x is clockwise from shoulder
+z is up
If no solution is possible, return None.
'''
# Solve for the coxa first, as it has only a single solution.
coxa_deg = (config.coxa_sign *
math.degrees(math.atan2(point_mm.x, point_mm.y)) +
config.coxa_idle_deg)
if (coxa_deg < config.coxa_min_deg or
coxa_deg > config.coxa_max_deg):
return None
# x-coordinate of femur/tibia pair after rotating to 0 coxa
true_x = (math.sqrt(point_mm.x ** 2 + point_mm.y ** 2) -
config.coxa_length_mm)
im = math.sqrt(point_mm.z ** 2 + true_x ** 2)
# The new femur/tibia pair makes a triangle where the 3rd side is
# the hypotenuse of the right triangle composed of z and im, lets
# call it c.
#
# --\ femur
# |\ --\
# | \ --\
# | -- |
# z| im\ | tibia
# | --\|
# ----------
# true_x
#
# im = math.sqrt(z ** 2 + true_x ** 2)
#
# Then, we can use the law of cosines to find the angle opposite
# im, which is the angle between the femur and tibia.
#
# im ** 2 = a ** 2 + b ** 2 + 2 * a * b * cos(C)
#
# Solving for C yields:
#
# C = acos((im ** 2 - a ** 2 - b ** 2) / (2 * a * b))
tibia_cos = ((im ** 2 -
config.tibia_length_mm ** 2 -
config.femur_length_mm ** 2) /
(2 * config.tibia_length_mm * config.femur_length_mm))
if tibia_cos < -1.0 or tibia_cos > 1.0:
return None
# For our purposes, a 0 tibia angle should equate to a right angle
# with the femur, so subtract off 90 degrees.
tibia_deg = (config.tibia_sign *
math.degrees(0.5 * math.pi - math.acos(tibia_cos)) +
config.tibia_idle_deg)
if (tibia_deg < config.tibia_min_deg or
tibia_deg > config.tibia_max_deg):
return None
# To solve for the femur angle, we first get the angle opposite
# true_x, then the angle opposite the tibia.
true_x_deg = math.degrees(math.atan2(true_x, -point_mm.z))
# Then the angle opposite the tibia is also found the via the law
# of cosines.
#
# tibia ** 2 = femur ** 2 + im ** 2 + 2 * femur * im * cos(femur_im)
#
# femur_im = acos ( (tibia ** 2 - im ** 2 - femur ** 2) /
# (2 * femur * im) )
femur_im_cos = -(config.tibia_length_mm ** 2 -
config.femur_length_mm ** 2 -
im ** 2) / (2 * config.femur_length_mm * im)
if femur_im_cos < -1.0 or femur_im_cos > 1.0:
return None
femur_im_deg = math.degrees(math.acos(femur_im_cos))
femur_deg = (config.femur_sign * ((femur_im_deg + true_x_deg) - 90.0) +
config.femur_idle_deg)
if (femur_deg < config.femur_min_deg or
femur_deg > config.femur_max_deg):
return None
result = JointAngles()
result.config = config
result.coxa_deg = coxa_deg
result.femur_deg = femur_deg
result.tibia_deg = tibia_deg
return result
class LizardIk(object):
def __init__(self, config):
self.config = config
def do_ik(self, point_mm):
return lizard_3dof_ik(point_mm, self.config)
def worst_case_speed_mm_s(self, point_mm, direction_mm=None):
'''Return the worst case linear velocity the end effector can
achieve in the given orientation.'''
step = 0.01
nominal = self.do_ik(point_mm)
if nominal is None:
return None
servo_step = step * self.config.servo_speed_dps
result = None
def update(result, advanced_servo_deg, nominal_servo_deg):
if advanced_servo_deg == nominal_servo_deg:
return
this_speed = (servo_step /
abs(advanced_servo_deg - nominal_servo_deg))
if result is None or this_speed < result:
result = this_speed
return result
if direction_mm:
normalized = direction_mm.scaled(1.0 / direction_mm.length())
consider = [normalized.scaled(step)]
else:
consider = [Point3D(*val) for val in
(step, 0., 0.), (0., step, 0.), (0., 0., step)]
for advance in consider:
advanced = self.do_ik(point_mm + advance)
if advanced is None:
return None
result = update(result, advanced.coxa_deg, nominal.coxa_deg)
result = update(result, advanced.femur_deg, nominal.femur_deg)
result = update(result, advanced.tibia_deg, nominal.tibia_deg)
return result
def servo_speed_dps(self):
return self.config.servo_speed_dps
def largest_change_deg(self, result1, result2):
return max(abs(result1.coxa_deg - result2.coxa_deg),
abs(result1.femur_deg - result2.femur_deg),
abs(result1.tibia_deg - result2.tibia_deg))
| |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PTransforms for supporting Kinesis streaming in Python pipelines.
These transforms are currently supported by Beam Flink and Spark portable
runners.
**Setup**
Transforms provided in this module are cross-language transforms
implemented in the Beam Java SDK. During the pipeline construction, Python SDK
will connect to a Java expansion service to expand these transforms.
To facilitate this, a small amount of setup is needed before using these
transforms in a Beam Python pipeline.
There are several ways to setup cross-language Kinesis transforms.
* Option 1: use the default expansion service
* Option 2: specify a custom expansion service
See below for details regarding each of these options.
*Option 1: Use the default expansion service*
This is the recommended and easiest setup option for using Python Kinesis
transforms. This option is only available for Beam 2.25.0 and later.
This option requires following pre-requisites before running the Beam
pipeline.
* Install Java runtime in the computer from where the pipeline is constructed
and make sure that 'java' command is available.
In this option, Python SDK will either download (for released Beam version) or
build (when running from a Beam Git clone) a expansion service jar and use
that to expand transforms. Currently Kinesis transforms use the
'beam-sdks-java-io-kinesis-expansion-service' jar for this purpose.
*Option 2: specify a custom expansion service*
In this option, you startup your own expansion service and provide that as
a parameter when using the transforms provided in this module.
This option requires following pre-requisites before running the Beam
pipeline.
* Startup your own expansion service.
* Update your pipeline to provide the expansion service address when
initiating Kinesis transforms provided in this module.
Flink Users can use the built-in Expansion Service of the Flink Runner's
Job Server. If you start Flink's Job Server, the expansion service will be
started on port 8097. For a different address, please set the
expansion_service parameter.
**More information**
For more information regarding cross-language transforms see:
- https://beam.apache.org/roadmap/portability/
For more information specific to Flink runner see:
- https://beam.apache.org/documentation/runners/flink/
"""
# pytype: skip-file
import logging
import time
from typing import Mapping
from typing import NamedTuple
from typing import Optional
from apache_beam import BeamJarExpansionService
from apache_beam import ExternalTransform
from apache_beam import NamedTupleBasedPayloadBuilder
__all__ = [
'WriteToKinesis',
'ReadDataFromKinesis',
'InitialPositionInStream',
'WatermarkPolicy',
]
def default_io_expansion_service():
return BeamJarExpansionService(
'sdks:java:io:kinesis:expansion-service:shadowJar')
WriteToKinesisSchema = NamedTuple(
'WriteToKinesisSchema',
[
('stream_name', str),
('aws_access_key', str),
('aws_secret_key', str),
('region', str),
('partition_key', str),
('service_endpoint', Optional[str]),
('verify_certificate', Optional[bool]),
('producer_properties', Optional[Mapping[str, str]]),
],
)
class WriteToKinesis(ExternalTransform):
"""
An external PTransform which writes byte array stream to Amazon Kinesis.
Experimental; no backwards compatibility guarantees.
"""
URN = 'beam:external:java:kinesis:write:v1'
def __init__(
self,
stream_name,
aws_access_key,
aws_secret_key,
region,
partition_key,
service_endpoint=None,
verify_certificate=None,
producer_properties=None,
expansion_service=None,
):
"""
Initializes a write operation to Kinesis.
:param stream_name: Kinesis stream name.
:param aws_access_key: Kinesis access key.
:param aws_secret_key: Kinesis access key secret.
:param region: AWS region. Example: 'us-east-1'.
:param service_endpoint: Kinesis service endpoint
:param verify_certificate: Enable or disable certificate verification.
Never set to False on production. True by default.
:param partition_key: Specify default partition key.
:param producer_properties: Specify the configuration properties for Kinesis
Producer Library (KPL) as dictionary.
Example: {'CollectionMaxCount': '1000', 'ConnectTimeout': '10000'}
:param expansion_service: The address (host:port) of the ExpansionService.
"""
super(WriteToKinesis, self).__init__(
self.URN,
NamedTupleBasedPayloadBuilder(
WriteToKinesisSchema(
stream_name=stream_name,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key,
region=region,
partition_key=partition_key,
service_endpoint=service_endpoint,
verify_certificate=verify_certificate,
producer_properties=producer_properties,
)),
expansion_service or default_io_expansion_service(),
)
ReadFromKinesisSchema = NamedTuple(
'ReadFromKinesisSchema',
[
('stream_name', str),
('aws_access_key', str),
('aws_secret_key', str),
('region', str),
('service_endpoint', Optional[str]),
('verify_certificate', Optional[bool]),
('max_num_records', Optional[int]),
('max_read_time', Optional[int]),
('initial_position_in_stream', Optional[str]),
('initial_timestamp_in_stream', Optional[int]),
('request_records_limit', Optional[int]),
('up_to_date_threshold', Optional[int]),
('max_capacity_per_shard', Optional[int]),
('watermark_policy', Optional[str]),
('watermark_idle_duration_threshold', Optional[int]),
('rate_limit', Optional[int]),
],
)
class ReadDataFromKinesis(ExternalTransform):
"""
An external PTransform which reads byte array stream from Amazon Kinesis.
Experimental; no backwards compatibility guarantees.
"""
URN = 'beam:external:java:kinesis:read_data:v1'
def __init__(
self,
stream_name,
aws_access_key,
aws_secret_key,
region,
service_endpoint=None,
verify_certificate=None,
max_num_records=None,
max_read_time=None,
initial_position_in_stream=None,
initial_timestamp_in_stream=None,
request_records_limit=None,
up_to_date_threshold=None,
max_capacity_per_shard=None,
watermark_policy=None,
watermark_idle_duration_threshold=None,
rate_limit=None,
expansion_service=None,
):
"""
Initializes a read operation from Kinesis.
:param stream_name: Kinesis stream name.
:param aws_access_key: Kinesis access key.
:param aws_secret_key: Kinesis access key secret.
:param region: AWS region. Example: 'us-east-1'.
:param service_endpoint: Kinesis service endpoint
:param verify_certificate: Enable or disable certificate verification.
Never set to False on production. True by default.
:param max_num_records: Specifies to read at most a given number of records.
Must be greater than 0.
:param max_read_time: Specifies to read records during x milliseconds.
:param initial_timestamp_in_stream: Specify reading beginning at the given
timestamp in milliseconds. Must be in the past.
:param initial_position_in_stream: Specify reading from some initial
position in stream. Possible values:
LATEST - Start after the most recent data record (fetch new data).
TRIM_HORIZON - Start from the oldest available data record.
AT_TIMESTAMP - Start from the record at or after the specified
server-side timestamp.
:param request_records_limit: Specifies the maximum number of records in
GetRecordsResult returned by GetRecords call which is limited by 10K
records. If should be adjusted according to average size of data record
to prevent shard overloading. More at:
docs.aws.amazon.com/kinesis/latest/APIReference/API_GetRecords.html
:param up_to_date_threshold: Specifies how late in milliseconds records
consumed by this source can be to still be considered on time. Defaults
to zero.
:param max_capacity_per_shard: Specifies the maximum number of messages per
one shard. Defaults to 10'000.
:param watermark_policy: Specifies the watermark policy. Possible values:
PROCESSING_TYPE, ARRIVAL_TIME. Defaults to ARRIVAL_TIME.
:param watermark_idle_duration_threshold: Use only when watermark policy is
ARRIVAL_TIME. Denotes the duration for which the watermark can be idle.
Passed in milliseconds.
:param rate_limit: Sets fixed rate policy for given milliseconds value. By
default there is no rate limit.
:param expansion_service: The address (host:port) of the ExpansionService.
"""
WatermarkPolicy.validate_param(watermark_policy)
InitialPositionInStream.validate_param(initial_position_in_stream)
if watermark_idle_duration_threshold:
assert WatermarkPolicy.ARRIVAL_TIME == watermark_policy
if request_records_limit:
assert 0 < request_records_limit <= 10000
initial_timestamp_in_stream = int(
initial_timestamp_in_stream) if initial_timestamp_in_stream else None
if initial_timestamp_in_stream and initial_timestamp_in_stream < time.time(
):
logging.warning('Provided timestamp emplaced not in the past.')
super(ReadDataFromKinesis, self).__init__(
self.URN,
NamedTupleBasedPayloadBuilder(
ReadFromKinesisSchema(
stream_name=stream_name,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key,
region=region,
service_endpoint=service_endpoint,
verify_certificate=verify_certificate,
max_num_records=max_num_records,
max_read_time=max_read_time,
initial_position_in_stream=initial_position_in_stream,
initial_timestamp_in_stream=initial_timestamp_in_stream,
request_records_limit=request_records_limit,
up_to_date_threshold=up_to_date_threshold,
max_capacity_per_shard=max_capacity_per_shard,
watermark_policy=watermark_policy,
watermark_idle_duration_threshold=
watermark_idle_duration_threshold,
rate_limit=rate_limit,
)),
expansion_service or default_io_expansion_service(),
)
class InitialPositionInStream:
LATEST = 'LATEST'
TRIM_HORIZON = 'TRIM_HORIZON'
AT_TIMESTAMP = 'AT_TIMESTAMP'
@staticmethod
def validate_param(param):
if param and not hasattr(InitialPositionInStream, param):
raise RuntimeError('Invalid initial position in stream: {}'.format(param))
class WatermarkPolicy:
PROCESSING_TYPE = 'PROCESSING_TYPE'
ARRIVAL_TIME = 'ARRIVAL_TIME'
@staticmethod
def validate_param(param):
if param and not hasattr(WatermarkPolicy, param):
raise RuntimeError('Invalid watermark policy: {}'.format(param))
| |
#!/usr/bin/python
#
# bsg_booth_4_block_gen < number of continuous rows of 4 partial products >
#
#
#
# This script generates sections of partial product arrays for use in
# multipliers. (See Computer Arthmetic Google Doc.)
#
#
#
#
import sys;
def emit_module_header (name, input_args, output_args) :
print "module " + name + " (",
my_list = []
for x in input_args :
my_list.append("input "+x+"\n");
for x in output_args :
my_list.append("output "+x+"\n");
print (" "*(len(name)+8)+",").join(my_list);
print ");";
def emit_module_footer( ) :
print "endmodule";
def emit_wire_definition (name) :
print "wire " + name + "; "
def emit_wire_definition_nocr (name) :
print "wire " + name + "; ",
def emit_gate_instance (gate_str, arg_list ) :
print gate_instance(gate_str,arg_list);
def queue_gate_instance (out_dict, gate_str, arg_list, order) :
the_string = gate_instance(gate_str,arg_list)
out_dict[the_string] = order
def gate_instance (gate_str, arg_list ) :
for i in range(0,len(arg_list)) :
gate_str = gate_str.replace("#"+str(i),arg_list[i]);
return gate_str;
def access_bit (name, bit) :
return name + "[" + str(bit) + "]";
def access_2D_bit (name, word,bit) :
if (name == "SDN_i") :
return name + "[" + str(word * 3 + bit) + "]" + "/*" + name + "[" + str(word) + "][" + str(bit) + "]" + "*/";
else :
return "error";
def access_3D_bit (name, dof, word,bit) :
if (name == "y_vec_i") :
maxword = 4;
maxbit = 2;
return name + "[" + str(maxbit*(dof*maxword+word)+bit)+ "] /*" + name + "[" + str(dof) + "][" + str(word) + "][" + str(bit) + "]" + "*/";
else :
return "error";
def param_bits_all (name, bit) :
return "[" + str(bit-1) + ":0] " + name;
def param_bits_2D_all (name, words,bit) :
return "["+str(words-1) + ":0][" + str(bit-1) + ":0] " + name;
def param_bits_3D_all (name, words,bit,zop) :
return "["+str(words-1) + ":0][" + str(bit-1) + ":0]["+str(zop-1)+":0] " + name;
def ident_name_word_bit (name,word,bit) :
return name + "_w" + str(word) + "_b" + str(bit);
def ident_name_bit_port (name,bit,port) :
return name + "_b" + str(bit) + "_p" + str(port);
def ident_name_word_bit_port (name,word,bit,port) :
return name + "_w" + str(word) + "_b" + str(bit) + "_p" + str(port);
def ident_name_bit (name,bit) :
return name + "_b" + str(bit);
def emit_rp_group_begin (name) :
print "// synopsys rp_group (" + name + ")"
def emit_rp_group_end (name) :
print "// synopsys rp_endgroup (" + name +")"
def emit_rp_fill (params):
print "// synopsys rp_fill (" + params +")"
# NOTE: for symmetric pins, assume that earlier ones are always faster.
# For example, for AOI22 A's are faster than B's and A0 is faster than A1.
# fixme: the code currently assumes that the A input of ADDFHX's are the slowest
# input, which is true in TSMC. We should fix the code so that we swizzle
# the order in the below string, rather than in the code base, which is
# just confusing.
fab = "tsmc_40"
aoi22 = "AOI22X1 #0 (.A0(#1), .A1(#2), .B0(#3), .B1(#4), .Y(#5) );"
xnor2 = "XNOR2X1 #0 (.A (#1), .B (#2), .Y (#3) );"
addf = "ADDFHX1 #0 (.A (#1), .B (#2), .CI (#3), .S(#4), .CO(#5) );"
#
# AOI22 (3) XNOR2 (2) CSA AOI22 (1) XNOR2 (0)
# XNOR2 (3) AOI22 (2) CSA XNOR2 (1) AOI22 (0)
#
def generate_booth_4_block ( rows ) :
module_name = ident_name_bit("bsg_rp_"+fab+"_booth_4_block",rows);
emit_module_header (module_name
, [ param_bits_all("SDN_i",5*3) + " /*" + param_bits_2D_all("SDN_i",5,3)+ "*/"
, "cr_i"
, param_bits_all("y_vec_i",rows*4*2) + " /*" + param_bits_3D_all("y_vec_i",rows,4,2) + "*/"
]
, [ "cl_o", param_bits_all("c_o",rows), param_bits_all("s_o",rows)]
);
column = 0
emit_rp_group_begin("b4b")
for pos in range (0,rows) :
print ""
print "wire " + ",".join([ident_name_word_bit("pp",pos,b) for b in range(0,4)])+";"
print "wire " + ",".join([ident_name_word_bit("aoi",pos,b) for b in range(0,4)])+";"
print "wire " + ",".join([ident_name_word_bit("cl",pos,b) for b in range(0,1)])+";"
print "wire " + ",".join([ident_name_word_bit("s0",pos,b) for b in range(0,1)])+";"
emit_rp_fill("0 " + str(pos*2) + " RX");
#3
emit_gate_instance(xnor2
, [ ident_name_word_bit ("xnor2" ,pos ,3)
, ident_name_word_bit("aoi" ,pos ,3)
, access_2D_bit ("SDN_i" ,3+1,0)
, ident_name_word_bit("pp" ,pos ,3)
]);
#2
emit_gate_instance(aoi22
, [ ident_name_word_bit ("aoi2" ,pos,2)
, access_2D_bit ("SDN_i" ,2+1,1)
, access_3D_bit ("y_vec_i" ,pos,2,0)
, access_2D_bit ("SDN_i" ,2+1,2)
, access_3D_bit ("y_vec_i" ,pos,2,1)
, ident_name_word_bit("aoi" ,pos,2)
]);
# insert ADDFH here
# fixme define these bits
emit_gate_instance(addf
, [ ident_name_word_bit("add42", pos, 0)
, ident_name_word_bit("pp", pos, 3)
, ident_name_word_bit("pp", pos, 2)
, ident_name_word_bit("pp", pos, 1)
, ident_name_word_bit("s0", pos, 0)
, ident_name_word_bit("cl", pos, 0) if (pos < rows-1) else "cl_o"
]);
#1
emit_gate_instance(xnor2
, [ ident_name_word_bit ("xnor2",pos,1)
, ident_name_word_bit("aoi" ,pos,1)
, access_2D_bit ("SDN_i", 1+1,0)
, ident_name_word_bit("pp" ,pos,1)
]);
#0
emit_gate_instance(aoi22
, [ ident_name_word_bit ("aoi2" ,pos ,0)
, access_2D_bit ("SDN_i" ,0+1 ,1)
, access_3D_bit ("y_vec_i" ,pos ,0,0)
, access_2D_bit ("SDN_i" ,0+1 ,2)
, access_3D_bit ("y_vec_i" ,pos ,0,1)
, ident_name_word_bit("aoi" ,pos ,0)
]);
emit_rp_fill("0 " + str(pos*2+1) + " RX");
#3
emit_gate_instance(aoi22
, [ ident_name_word_bit ("aoi2" ,pos,3)
, access_2D_bit ("SDN_i" ,3+1,1)
, access_3D_bit ("y_vec_i" ,pos,3 ,0)
, access_2D_bit ("SDN_i" ,3+1,2)
, access_3D_bit ("y_vec_i" ,pos,3 ,1)
, ident_name_word_bit("aoi" ,pos,3)
]);
#2
emit_gate_instance(xnor2
, [ ident_name_word_bit ("xnor2" ,pos ,2)
, ident_name_word_bit("aoi" ,pos ,2)
, access_2D_bit ("SDN_i" ,2+1,0)
, ident_name_word_bit("pp" ,pos ,2)
]);
# insert ADDF here
emit_gate_instance(addf
, [ ident_name_word_bit("add42", pos, 1)
, ident_name_word_bit("pp" , pos, 0)
, ident_name_word_bit("s0" , pos, 0)
, ident_name_word_bit("cl" , pos-1,0) if (pos > 0) else "cr_i"
, access_bit("s_o", pos)
, access_bit("c_o", pos)
]);
#1
emit_gate_instance(aoi22
, [ ident_name_word_bit ("aoi2" ,pos,1)
, access_2D_bit ("SDN_i" ,1+1,1)
, access_3D_bit ("y_vec_i" ,pos,1 ,0)
, access_2D_bit ("SDN_i" ,1+1,2)
, access_3D_bit ("y_vec_i" ,pos,1 ,1)
, ident_name_word_bit("aoi" ,pos,1)
]);
#0
emit_gate_instance(xnor2
, [ ident_name_word_bit ("xnor2",pos,0)
, ident_name_word_bit("aoi" ,pos,0)
, access_2D_bit ("SDN_i", 0+1,0)
, ident_name_word_bit("pp" ,pos,0)
]);
emit_rp_group_end("b4b")
emit_module_footer()
if len(sys.argv) == 2 :
generate_booth_4_block (int(sys.argv[1]));
else :
print "Usage: " + sys.argv[0] + " rows";
| |
#!/usr/bin/env python
##########################################################################
#
# Copyright 2011 Jose Fonseca
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
'''Run two retrace instances in parallel, comparing generated snapshots.
'''
import math
import optparse
import os.path
import subprocess
import platform
import sys
from PIL import Image
from snapdiff import Comparer
from highlight import AutoHighlighter
import jsondiff
# Null file, to use when we're not interested in subprocesses output
if platform.system() == 'Windows':
NULL = open('NUL:', 'wb')
else:
NULL = open('/dev/null', 'wb')
class RetraceRun:
def __init__(self, process):
self.process = process
def nextSnapshot(self):
image, comment = read_pnm(self.process.stdout)
if image is None:
return None, None
callNo = int(comment.strip())
return image, callNo
def terminate(self):
try:
self.process.terminate()
except OSError:
# Avoid http://bugs.python.org/issue14252
pass
class Retracer:
def __init__(self, retraceExe, args, env=None):
self.retraceExe = retraceExe
self.args = args
self.env = env
def _retrace(self, args, stdout=subprocess.PIPE):
cmd = [
self.retraceExe,
] + args + self.args
if self.env:
for name, value in self.env.iteritems():
sys.stderr.write('%s=%s ' % (name, value))
sys.stderr.write(' '.join(cmd) + '\n')
try:
return subprocess.Popen(cmd, env=self.env, stdout=stdout, stderr=NULL)
except OSError, ex:
sys.stderr.write('error: failed to execute %s: %s\n' % (cmd[0], ex.strerror))
sys.exit(1)
def retrace(self, args):
p = self._retrace([])
p.wait()
return p.returncode
def snapshot(self, call_nos):
process = self._retrace([
'-s', '-',
'-S', call_nos,
])
return RetraceRun(process)
def dump_state(self, call_no):
'''Get the state dump at the specified call no.'''
p = self._retrace([
'-D', str(call_no),
])
state = jsondiff.load(p.stdout)
p.wait()
return state.get('parameters', {})
def diff_state(self, ref_call_no, src_call_no, stream):
'''Compare the state between two calls.'''
ref_state = self.dump_state(ref_call_no)
src_state = self.dump_state(src_call_no)
stream.flush()
differ = jsondiff.Differ(stream)
differ.visit(ref_state, src_state)
stream.write('\n')
def read_pnm(stream):
'''Read a PNM from the stream, and return the image object, and the comment.'''
magic = stream.readline()
if not magic:
return None, None
magic = magic.rstrip()
if magic == 'P5':
channels = 1
bytesPerChannel = 1
mode = 'L'
elif magic == 'P6':
channels = 3
bytesPerChannel = 1
mode = 'RGB'
elif magic == 'Pf':
channels = 1
bytesPerChannel = 4
mode = 'R'
elif magic == 'PF':
channels = 3
bytesPerChannel = 4
mode = 'RGB'
elif magic == 'PX':
channels = 4
bytesPerChannel = 4
mode = 'RGB'
else:
raise Exception('Unsupported magic `%s`' % magic)
comment = ''
line = stream.readline()
while line.startswith('#'):
comment += line[1:]
line = stream.readline()
width, height = map(int, line.strip().split())
maximum = int(stream.readline().strip())
if bytesPerChannel == 1:
assert maximum == 255
else:
assert maximum == 1
data = stream.read(height * width * channels * bytesPerChannel)
if bytesPerChannel == 4:
# Image magic only supports single channel floating point images, so
# represent the image as numpy arrays
import numpy
pixels = numpy.fromstring(data, dtype=numpy.float32)
pixels.resize((height, width, channels))
return pixels, comment
image = Image.frombuffer(mode, (width, height), data, 'raw', mode, 0, 1)
return image, comment
def dumpNumpyImage(output, pixels, filename):
height, width, channels = pixels.shape
import numpy
pixels = (pixels*255).clip(0, 255).astype('uint8')
if 0:
# XXX: Doesn't work somehow
im = Image.fromarray(pixels)
else:
# http://code.activestate.com/recipes/577591-conversion-of-pil-image-and-numpy-array/
pixels = pixels.reshape(height*width, channels)
if channels == 4:
mode = 'RGBA'
else:
if channels < 3:
pixels = numpy.c_[arr, 255*numpy.ones((heigth * width, 3 - channels), numpy.uint8)]
assert channels == 3
mode = 'RGB'
im = Image.frombuffer(mode, (width, height), pixels.tostring(), 'raw', mode, 0, 1)
im.save(filename)
if 0:
# Dump to stdout
for y in range(height):
output.write(' ')
for x in range(width):
for c in range(channels):
output.write('%0.9g,' % pixels[y, x, c])
output.write(' ')
output.write('\n')
def parse_env(optparser, entries):
'''Translate a list of NAME=VALUE entries into an environment dictionary.'''
if not entries:
return None
env = os.environ.copy()
for entry in entries:
try:
name, var = entry.split('=', 1)
except Exception:
optparser.error('invalid environment entry %r' % entry)
env[name] = var
return env
def main():
'''Main program.
'''
global options
# Parse command line options
optparser = optparse.OptionParser(
usage='\n\t%prog [options] -- [glretrace options] <trace>',
version='%%prog')
optparser.add_option(
'-r', '--retrace', metavar='PROGRAM',
type='string', dest='retrace', default='glretrace',
help='retrace command [default: %default]')
optparser.add_option(
'--ref-driver', metavar='DRIVER',
type='string', dest='ref_driver', default=None,
help='force reference driver')
optparser.add_option(
'--src-driver', metavar='DRIVER',
type='string', dest='src_driver', default=None,
help='force source driver')
optparser.add_option(
'--ref-arg', metavar='OPTION',
type='string', action='append', dest='ref_args', default=[],
help='pass argument to reference retrace')
optparser.add_option(
'--src-arg', metavar='OPTION',
type='string', action='append', dest='src_args', default=[],
help='pass argument to source retrace')
optparser.add_option(
'--ref-env', metavar='NAME=VALUE',
type='string', action='append', dest='ref_env', default=[],
help='add variable to reference environment')
optparser.add_option(
'--src-env', metavar='NAME=VALUE',
type='string', action='append', dest='src_env', default=[],
help='add variable to source environment')
optparser.add_option(
'--diff-prefix', metavar='PATH',
type='string', dest='diff_prefix', default='.',
help='prefix for the difference images')
optparser.add_option(
'-t', '--threshold', metavar='BITS',
type="float", dest="threshold", default=12.0,
help="threshold precision [default: %default]")
optparser.add_option(
'-S', '--snapshot-frequency', metavar='CALLSET',
type="string", dest="snapshot_frequency", default='draw',
help="calls to compare [default: %default]")
optparser.add_option(
'--diff-state',
action='store_true', dest='diff_state', default=False,
help='diff state between failing calls')
optparser.add_option(
'-o', '--output', metavar='FILE',
type="string", dest="output",
help="output file [default: stdout]")
(options, args) = optparser.parse_args(sys.argv[1:])
ref_env = parse_env(optparser, options.ref_env)
src_env = parse_env(optparser, options.src_env)
if not args:
optparser.error("incorrect number of arguments")
if options.ref_driver:
options.ref_args.insert(0, '--driver=' + options.ref_driver)
if options.src_driver:
options.src_args.insert(0, '--driver=' + options.src_driver)
refRetracer = Retracer(options.retrace, options.ref_args + args, ref_env)
srcRetracer = Retracer(options.retrace, options.src_args + args, src_env)
if options.output:
output = open(options.output, 'wt')
else:
output = sys.stdout
highligher = AutoHighlighter(output)
highligher.write('call\tprecision\n')
last_bad = -1
last_good = 0
refRun = refRetracer.snapshot(options.snapshot_frequency)
try:
srcRun = srcRetracer.snapshot(options.snapshot_frequency)
try:
while True:
# Get the reference image
refImage, refCallNo = refRun.nextSnapshot()
if refImage is None:
break
# Get the source image
srcImage, srcCallNo = srcRun.nextSnapshot()
if srcImage is None:
break
assert refCallNo == srcCallNo
callNo = refCallNo
# Compare the two images
if isinstance(refImage, Image.Image) and isinstance(srcImage, Image.Image):
# Using PIL
numpyImages = False
comparer = Comparer(refImage, srcImage)
precision = comparer.precision()
else:
# Using numpy (for floating point images)
# TODO: drop PIL when numpy path becomes general enough
import numpy
assert not isinstance(refImage, Image.Image)
assert not isinstance(srcImage, Image.Image)
numpyImages = True
assert refImage.shape == srcImage.shape
diffImage = numpy.square(srcImage - refImage)
height, width, channels = diffImage.shape
square_error = numpy.sum(diffImage)
square_error += numpy.finfo(numpy.float32).eps
rel_error = square_error / float(height*width*channels)
bits = -math.log(rel_error)/math.log(2.0)
precision = bits
mismatch = precision < options.threshold
if mismatch:
highligher.color(highligher.red)
highligher.bold()
highligher.write('%u\t%f\n' % (callNo, precision))
if mismatch:
highligher.normal()
if mismatch:
if options.diff_prefix:
prefix = os.path.join(options.diff_prefix, '%010u' % callNo)
prefix_dir = os.path.dirname(prefix)
if not os.path.isdir(prefix_dir):
os.makedirs(prefix_dir)
if numpyImages:
dumpNumpyImage(output, refImage, prefix + '.ref.png')
dumpNumpyImage(output, srcImage, prefix + '.src.png')
else:
refImage.save(prefix + '.ref.png')
srcImage.save(prefix + '.src.png')
comparer.write_diff(prefix + '.diff.png')
if last_bad < last_good and options.diff_state:
srcRetracer.diff_state(last_good, callNo, output)
last_bad = callNo
else:
last_good = callNo
highligher.flush()
finally:
srcRun.terminate()
finally:
refRun.terminate()
if __name__ == '__main__':
main()
| |
# -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError)
from .exceptions import (
HTTPError, RequestException, MissingSchema, InvalidURL,
ChunkedEncodingError, ContentDecodingError, ConnectionError,
StreamConsumedError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring)
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
json_dumps = json.dumps
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(),
filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
:param json: json for the body to attach to the request (if data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None,
json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = self._cookies.copy() if self._cookies is not None else None
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindy call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
url = unicode(url) if is_py2 else str(url)
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
scheme, auth, host, port, path, query, fragment = parse_url(url)
if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(url))
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
if json is not None:
content_type = 'application/json'
body = json_dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length is not None:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data and json is None:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = builtin_str(body.tell())
body.seek(0, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content',
'status_code',
'headers',
'url',
'history',
'encoding',
'reason',
'cookies',
'elapsed',
'request',
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta)
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanant versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
try:
# Special case for urllib3.
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return json.loads(self.content.decode(encoding), **kwargs)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return json.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
return self.raw.release_conn()
| |
"""
mpsim module. Contains the ModpathSim class. Note that the user can access
the ModpathSim class as `flopy.modpath.ModpathSim`.
Additional information for this MODFLOW/MODPATH package can be found at the `Online
MODFLOW Guide
<http://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/index.html?dis.htm>`_.
"""
import numpy as np
from ..pakbase import Package
from ..utils import Util2d, Util3d
class ModpathSim(Package):
"""
MODPATH Simulation File Package Class.
Parameters
----------
model : model object
The model object (of type :class:`flopy.modflow.mf.Modflow`) to which
this package will be added.
extension : string
Filename extension (default is 'mpsim')
Attributes
----------
heading : str
Text string written to top of package input file.
Methods
-------
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> m = flopy.modpath.Modpath()
>>> dis = flopy.modpath.ModpathSim(m)
"""
def __init__(self, model, mp_name_file='mp.nam', mp_list_file='mp.list',
option_flags=[1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1],
ref_time=0, ref_time_per_stp=[0, 1, 1.0], stop_time=None,
group_name=['group_1'], group_placement=[[1, 1, 1, 0, 1, 1]],
release_times=[[1, 1]],
group_region=[[1, 1, 1, 1, 1, 1]], mask_nlay=[1],
mask_layer=[1], mask_1lay=[1], face_ct=[1],
ifaces=[[6, 1, 1]], part_ct=[[1, 1, 1]],
time_ct=1, release_time_incr=1, time_pts=[1],
particle_cell_cnt=[[2, 2, 2]],
cell_bd_ct=1, bud_loc=[[1, 1, 1, 1]], trace_id=1, stop_zone=1,
zone=1,
retard_fac=1.0, retard_fcCB=1.0, extension='mpsim'):
# Call ancestor's init to set self.parent, extension, name and unit number
Package.__init__(self, model, extension, 'MPSIM', 32)
nrow, ncol, nlay, nper = self.parent.mf.nrow_ncol_nlay_nper
self.heading1 = '# MPSIM for Modpath, generated by Flopy.'
self.heading2 = '#'
self.mp_name_file = '{}.{}'.format(model.name, 'mpnam')
self.mp_list_file = '{}.{}'.format(model.name, 'mplst')
options_list = ['SimulationType', 'TrackingDirection',
'WeakSinkOption', 'WeakSourceOption',
'ReferenceTimeOption', 'StopOption',
'ParticleGenerationOption', 'TimePointOption',
'BudgetOutputOption', 'ZoneArrayOption',
'RetardationOption',
'AdvectiveObservationsOption']
self.option_flags = option_flags
options_dict = dict(list(zip(options_list, option_flags)))
self.options_dict = options_dict
self.endpoint_file = '{}.{}'.format(model.name, 'mpend')
self.pathline_file = '{}.{}'.format(model.name, 'mppth')
self.time_ser_file = '{}.{}'.format(model.name, 'mp.tim_ser')
self.advobs_file = '{}.{}'.format(model.name, '.mp.advobs')
self.ref_time = ref_time
self.ref_time_per_stp = ref_time_per_stp
self.stop_time = stop_time
self.group_ct = len(group_name)
self.group_name = group_name
self.group_placement = group_placement
self.release_times = release_times
self.group_region = group_region
self.mask_nlay = mask_nlay
self.mask_layer = mask_layer
self.mask_1lay = mask_1lay
self.face_ct = face_ct
self.ifaces = ifaces
self.part_ct = part_ct
self.strt_file = '{}.{}'.format(model.name, 'loc')
self.time_ct = time_ct
self.release_time_incr = release_time_incr
self.time_pts = time_pts
self.particle_cell_cnt = particle_cell_cnt
self.cell_bd_ct = cell_bd_ct
self.bud_loc = bud_loc
self.trace_file = '{}.{}'.format(model.name, 'trace_file.txt')
self.trace_id = trace_id
self.stop_zone = stop_zone
self.zone = zone
self.retard_fac = retard_fac
self.retard_fcCB = retard_fcCB
# self.mask_nlay = Util3d(model,(nlay,nrow,ncol),np.int,\
# mask_nlay,name='mask_nlay',locat=self.unit_number[0])
# self.mask_1lay = Util3d(model,(nlay,nrow,ncol),np.int,\
# mask_1lay,name='mask_1lay',locat=self.unit_number[0])
# self.stop_zone = Util3d(model,(nlay,nrow,ncol),np.int,\
# stop_zone,name='stop_zone',locat=self.unit_number[0])
# self.retard_fac = Util3d(model,(nlay,nrow,ncol),np.float32,\
# retard_fac,name='retard_fac',locat=self.unit_number[0])
# self.retard_fcCB = Util3d(model,(nlay,nrow,ncol),np.float32,\
# retard_fcCB,name='retard_fcCB',locat=self.unit_number[0])
self.parent.add_package(self)
def write_file(self):
"""
Write the package file
Returns
-------
None
"""
# item numbers and CamelCase variable names correspond to Modpath 6 documentation
nrow, ncol, nlay, nper = self.parent.mf.nrow_ncol_nlay_nper
f_sim = open(self.fn_path, 'w')
# item 0
f_sim.write('#{0:s}\n#{1:s}\n'.format(self.heading1, self.heading2))
# item 1
f_sim.write('{0:s}\n'.format(self.mp_name_file))
# item 2
f_sim.write('{0:s}\n'.format(self.mp_list_file))
# item 3
for i in range(12):
f_sim.write('{0:4d}'.format(self.option_flags[i]))
f_sim.write('\n')
# item 4
f_sim.write('{0:s}\n'.format(self.endpoint_file))
# item 5
if self.options_dict['SimulationType'] == 2:
f_sim.write('{0:s}\n'.format(self.pathline_file))
# item 6
if self.options_dict['SimulationType'] == 3:
f_sim.write('{0:s}\n'.format(self.time_ser_file))
# item 7
if self.options_dict['AdvectiveObservationsOption'] == 2 and \
self.option_dict['SimulationType'] == 3:
f_sim.write('{0:s}\n'.format(self.advobs_file))
# item 8
if self.options_dict['ReferenceTimeOption'] == 1:
f_sim.write('{0:f}\n'.format(self.ref_time))
# item 9
if self.options_dict['ReferenceTimeOption'] == 2:
Period, Step, TimeFraction = self.ref_time_per_stp
f_sim.write(
'{0:d} {1:d} {2:f}\n'.format(Period, Step, TimeFraction))
# item 10
if self.options_dict['StopOption'] == 3:
f_sim.write('{0:f}\n'.format(self.stop_time))
if self.options_dict['ParticleGenerationOption'] == 1:
# item 11
f_sim.write('{0:d}\n'.format(self.group_ct))
for i in range(self.group_ct):
# item 12
f_sim.write('{0:s}\n'.format(self.group_name[i]))
# item 13
Grid, GridCellRegionOption, PlacementOption, ReleaseStartTime, ReleaseOption, CHeadOption = \
self.group_placement[i]
f_sim.write(
'{0:d} {1:d} {2:d} {3:f} {4:d} {5:d}\n'.format(Grid,
GridCellRegionOption,
PlacementOption,
ReleaseStartTime,
ReleaseOption,
CHeadOption))
# item 14
if ReleaseOption == 2:
ReleasePeriodLength, ReleaseEventCount = \
self.release_times[i]
f_sim.write('{0:f} {1:d}\n'.format(ReleasePeriodLength,
ReleaseEventCount))
# item 15
if GridCellRegionOption == 1:
MinLayer, MinRow, MinColumn, MaxLayer, MaxRow, MaxColumn = \
self.group_region[i]
f_sim.write('{0:d} {1:d} {2:d} {3:d} {4:d} {5:d}\n'.format(
MinLayer + 1, MinRow + 1, MinColumn + 1,
MaxLayer + 1, MaxRow + 1, MaxColumn + 1))
# item 16
if GridCellRegionOption == 2:
f_sim.write(self.mask_nlay[i].get_file_entry())
# item 17
if GridCellRegionOption == 3:
f_sim.write('{0:s}\n'.format(self.mask_layer[i]))
# item 18
f_sim.write(self.mask_1lay[i].get_file_entry())
# item 19 and 20
if PlacementOption == 1:
f_sim.write('{0:d}\n'.format(self.face_ct[i]))
# item 20
for j in range(self.face_ct[i]):
IFace, ParticleRowCount, ParticleColumnCount = \
self.ifaces[i][j]
f_sim.write('{0:d} {1:d} {2:d} \n'.format(IFace,
ParticleRowCount,
ParticleColumnCount))
# item 21
elif PlacementOption == 2:
ParticleLayerCount, ParticleRowCount, ParticleColumnCount = \
self.particle_cell_cnt[i]
f_sim.write(
'{0:d} {1:d} {2:d} \n'.format(ParticleLayerCount,
ParticleRowCount,
ParticleColumnCount))
# item 22
if self.options_dict['ParticleGenerationOption'] == 2:
f_sim.write('{0:s}\n'.format(self.strt_file))
if self.options_dict['TimePointOption'] != 1:
# item 23
if self.options_dict['TimePointOption'] == 2 or \
self.options_dict['TimePointOption'] == 3:
f_sim.write('{0:d}\n'.format(self.time_ct))
# item 24
if self.options_dict['TimePointOption'] == 2:
f_sim.write('{0:f}\n'.format(self.release_time_incr))
# item 25
if self.options_dict['TimePointOption'] == 3:
for r in range(self.time_ct):
f_sim.write('{0:f}\n'.format(self.time_pts[r]))
if self.options_dict['BudgetOutputOption'] != 1 or \
self.options_dict['BudgetOutputOption'] != 2:
# item 26
if self.options_dict['BudgetOutputOption'] == 3:
f_sim.write('{0:d}\n'.format(self.cell_bd_ct))
# item 27
for k in range(self.cell_bd_ct):
Grid, Layer, Row, Column = self.bud_loc[k]
f_sim.write(
'{0:d} {1:d} {2:d} {3:d} \n'.format(Grid, Layer + 1,
Row + 1,
Column + 1))
if self.options_dict['BudgetOutputOption'] == 4:
# item 28
f_sim.write('{0:s}\n'.format(self.trace_file))
# item 29
f_sim.write('{0:s}\n'.format(self.trace_id))
if self.options_dict['ZoneArrayOption'] != 1:
# item 30
f_sim.write('{0:s}\n'.format(self.stop_zone))
# item 31
f_sim.write(self.stop_zone.get_file_entry())
if self.options_dict['RetardationOption'] != 1:
# item 32
f_sim.write(self.retard_fac.get_file_entry())
# item 33
f_sim.write(self.retard_fcCB.get_file_entry())
f_sim.close()
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class DeploymentsOperations(object):
"""DeploymentsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version to use for this operation. Constant value: "2017-05-10".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-05-10"
self.config = config
def delete(
self, resource_group_name, deployment_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a deployment from the deployment history.
A template deployment that is currently running cannot be deleted.
Deleting a template deployment removes the associated deployment
operations. Deleting a template deployment does not affect the state of
the resource group. This is an asynchronous operation that returns a
status of 202 until the template deployment is successfully deleted.
The Location response header contains the URI that is used to obtain
the status of the process. While the process is running, a call to the
URI in the Location header returns a status of 202. When the process
finishes, the URI in the Location header returns a status of 204 on
success. If the asynchronous request failed, the URI in the Location
header returns an error-level status code.
:param resource_group_name: The name of the resource group with the
deployment to delete. The name is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment to delete.
:type deployment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def check_existence(
self, resource_group_name, deployment_name, custom_headers=None, raw=False, **operation_config):
"""Checks whether the deployment exists.
:param resource_group_name: The name of the resource group with the
deployment to check. The name is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment to check.
:type deployment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: bool
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.head(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204, 404]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = (response.status_code == 204)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, deployment_name, properties, custom_headers=None, raw=False, **operation_config):
"""Deploys resources to a resource group.
You can provide the template and parameters directly in the request or
link to JSON files.
:param resource_group_name: The name of the resource group to deploy
the resources to. The name is case insensitive. The resource group
must already exist.
:type resource_group_name: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param properties: The deployment properties.
:type properties: :class:`DeploymentProperties
<azure.mgmt.resource.resources.v2017_05_10.models.DeploymentProperties>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`DeploymentExtended
<azure.mgmt.resource.resources.v2017_05_10.models.DeploymentExtended>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.Deployment(properties=properties)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'Deployment')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeploymentExtended', response)
if response.status_code == 201:
deserialized = self._deserialize('DeploymentExtended', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, deployment_name, custom_headers=None, raw=False, **operation_config):
"""Gets a deployment.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment to get.
:type deployment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DeploymentExtended
<azure.mgmt.resource.resources.v2017_05_10.models.DeploymentExtended>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeploymentExtended', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def cancel(
self, resource_group_name, deployment_name, custom_headers=None, raw=False, **operation_config):
"""Cancels a currently running template deployment.
You can cancel a deployment only if the provisioningState is Accepted
or Running. After the deployment is canceled, the provisioningState is
set to Canceled. Canceling a template deployment stops the currently
running template deployment and leaves the resource group partially
deployed.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment to cancel.
:type deployment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/cancel'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def validate(
self, resource_group_name, deployment_name, properties, custom_headers=None, raw=False, **operation_config):
"""Validates whether the specified template is syntactically correct and
will be accepted by Azure Resource Manager..
:param resource_group_name: The name of the resource group the
template will be deployed to. The name is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param properties: The deployment properties.
:type properties: :class:`DeploymentProperties
<azure.mgmt.resource.resources.v2017_05_10.models.DeploymentProperties>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DeploymentValidateResult
<azure.mgmt.resource.resources.v2017_05_10.models.DeploymentValidateResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.Deployment(properties=properties)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/validate'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'Deployment')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 400]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeploymentValidateResult', response)
if response.status_code == 400:
deserialized = self._deserialize('DeploymentValidateResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def export_template(
self, resource_group_name, deployment_name, custom_headers=None, raw=False, **operation_config):
"""Exports the template used for specified deployment.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment from which to get
the template.
:type deployment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DeploymentExportResult
<azure.mgmt.resource.resources.v2017_05_10.models.DeploymentExportResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/{deploymentName}/exportTemplate'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DeploymentExportResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_by_resource_group(
self, resource_group_name, filter=None, top=None, custom_headers=None, raw=False, **operation_config):
"""Get all the deployments for a resource group.
:param resource_group_name: The name of the resource group with the
deployments to get. The name is case insensitive.
:type resource_group_name: str
:param filter: The filter to apply on the operation. For example, you
can use $filter=provisioningState eq '{state}'.
:type filter: str
:param top: The number of results to get. If null is passed, returns
all deployments.
:type top: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`DeploymentExtendedPaged
<azure.mgmt.resource.resources.v2017_05_10.models.DeploymentExtendedPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.Resources/deployments/'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DeploymentExtendedPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DeploymentExtendedPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Dependency imports
import numpy as np
from scipy import stats as sp_stats
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import samplers
from tensorflow_probability.python.internal import test_util
tfd = tfp.distributions
@test_util.test_all_tf_execution_regimes
class LaplaceTest(test_util.TestCase):
def testLaplaceShape(self):
loc = tf.constant([3.0] * 5)
scale = tf.constant(11.0)
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
self.assertEqual(self.evaluate(laplace.batch_shape_tensor()), (5,))
self.assertEqual(laplace.batch_shape, tf.TensorShape([5]))
self.assertAllEqual(self.evaluate(laplace.event_shape_tensor()), [])
self.assertEqual(laplace.event_shape, tf.TensorShape([]))
def testLaplaceLogPDF(self):
batch_size = 6
loc = tf.constant([2.0] * batch_size)
scale = tf.constant([3.0] * batch_size)
loc_v = 2.0
scale_v = 3.0
x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32)
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
log_pdf = laplace.log_prob(x)
self.assertEqual(log_pdf.shape, (6,))
expected_log_pdf = sp_stats.laplace.logpdf(x, loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(log_pdf), expected_log_pdf)
pdf = laplace.prob(x)
self.assertEqual(pdf.shape, (6,))
self.assertAllClose(self.evaluate(pdf), np.exp(expected_log_pdf))
def testLaplaceLogPDFMultidimensional(self):
batch_size = 6
loc = tf.constant([[2.0, 4.0]] * batch_size)
scale = tf.constant([[3.0, 4.0]] * batch_size)
loc_v = np.array([2.0, 4.0])
scale_v = np.array([3.0, 4.0])
x = np.array([[2.5, 2.5, 4.0, 0.1, 1.0, 2.0]], dtype=np.float32).T
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
log_pdf = laplace.log_prob(x)
log_pdf_values = self.evaluate(log_pdf)
self.assertEqual(log_pdf.shape, (6, 2))
pdf = laplace.prob(x)
pdf_values = self.evaluate(pdf)
self.assertEqual(pdf.shape, (6, 2))
expected_log_pdf = sp_stats.laplace.logpdf(x, loc_v, scale=scale_v)
self.assertAllClose(log_pdf_values, expected_log_pdf)
self.assertAllClose(pdf_values, np.exp(expected_log_pdf))
def testLaplaceLogPDFMultidimensionalBroadcasting(self):
batch_size = 6
loc = tf.constant([[2.0, 4.0]] * batch_size)
scale = tf.constant(3.0)
loc_v = np.array([2.0, 4.0])
scale_v = 3.0
x = np.array([[2.5, 2.5, 4.0, 0.1, 1.0, 2.0]], dtype=np.float32).T
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
log_pdf = laplace.log_prob(x)
log_pdf_values = self.evaluate(log_pdf)
self.assertEqual(log_pdf.shape, (6, 2))
pdf = laplace.prob(x)
pdf_values = self.evaluate(pdf)
self.assertEqual(pdf.shape, (6, 2))
expected_log_pdf = sp_stats.laplace.logpdf(x, loc_v, scale=scale_v)
self.assertAllClose(log_pdf_values, expected_log_pdf)
self.assertAllClose(pdf_values, np.exp(expected_log_pdf))
def testLaplaceCDF(self):
batch_size = 6
loc = tf.constant([2.0] * batch_size)
scale = tf.constant([3.0] * batch_size)
loc_v = 2.0
scale_v = 3.0
x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float32)
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
cdf = laplace.cdf(x)
self.assertEqual(cdf.shape, (6,))
expected_cdf = sp_stats.laplace.cdf(x, loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(cdf), expected_cdf)
def testLaplaceLogCDF(self):
batch_size = 6
loc = tf.constant([2.0] * batch_size)
scale = tf.constant([3.0] * batch_size)
loc_v = 2.0
scale_v = 3.0
x = np.array([-2.5, 2.5, -4.0, 0.1, 1.0, 2.0], dtype=np.float32)
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
cdf = laplace.log_cdf(x)
self.assertEqual(cdf.shape, (6,))
expected_cdf = sp_stats.laplace.logcdf(x, loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(cdf), expected_cdf)
def testLaplaceQuantile(self):
qs = self.evaluate(
tf.concat(
[[0., 1],
samplers.uniform([10], minval=.1, maxval=.9,
seed=test_util.test_seed())],
axis=0))
d = tfd.Laplace(loc=1., scale=1.3, validate_args=True)
vals = d.quantile(qs)
self.assertAllClose([-np.inf, np.inf], vals[:2])
self.assertAllClose(qs[2:], d.cdf(vals[2:]))
def testLaplaceLogSurvivalFunction(self):
batch_size = 6
loc = tf.constant([2.0] * batch_size)
scale = tf.constant([3.0] * batch_size)
loc_v = 2.0
scale_v = 3.0
x = np.array([-2.5, 2.5, -4.0, 0.1, 1.0, 2.0], dtype=np.float32)
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
sf = laplace.log_survival_function(x)
self.assertEqual(sf.shape, (6,))
expected_sf = sp_stats.laplace.logsf(x, loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(sf), expected_sf)
def testLaplaceMean(self):
loc_v = np.array([1.0, 3.0, 2.5])
scale_v = np.array([1.0, 4.0, 5.0])
laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True)
self.assertEqual(laplace.mean().shape, (3,))
expected_means = sp_stats.laplace.mean(loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(laplace.mean()), expected_means)
def testLaplaceMode(self):
loc_v = np.array([0.5, 3.0, 2.5])
scale_v = np.array([1.0, 4.0, 5.0])
laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True)
self.assertEqual(laplace.mode().shape, (3,))
self.assertAllClose(self.evaluate(laplace.mode()), loc_v)
def testLaplaceVariance(self):
loc_v = np.array([1.0, 3.0, 2.5])
scale_v = np.array([1.0, 4.0, 5.0])
laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True)
self.assertEqual(laplace.variance().shape, (3,))
expected_variances = sp_stats.laplace.var(loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(laplace.variance()), expected_variances)
def testLaplaceStd(self):
loc_v = np.array([1.0, 3.0, 2.5])
scale_v = np.array([1.0, 4.0, 5.0])
laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True)
self.assertEqual(laplace.stddev().shape, (3,))
expected_stddev = sp_stats.laplace.std(loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(laplace.stddev()), expected_stddev)
def testLaplaceEntropy(self):
loc_v = np.array([1.0, 3.0, 2.5])
scale_v = np.array([1.0, 4.0, 5.0])
laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True)
self.assertEqual(laplace.entropy().shape, (3,))
expected_entropy = sp_stats.laplace.entropy(loc_v, scale=scale_v)
self.assertAllClose(self.evaluate(laplace.entropy()), expected_entropy)
def testLaplaceSample(self):
loc_v = 4.0
scale_v = 3.0
loc = tf.constant(loc_v)
scale = tf.constant(scale_v)
n = 100000
laplace = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
samples = laplace.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual(samples.shape, (n,))
self.assertEqual(sample_values.shape, (n,))
self.assertAllClose(
sample_values.mean(),
sp_stats.laplace.mean(loc_v, scale=scale_v),
rtol=0.05,
atol=0.)
self.assertAllClose(
sample_values.var(),
sp_stats.laplace.var(loc_v, scale=scale_v),
rtol=0.05,
atol=0.)
self.assertTrue(self._kstest(loc_v, scale_v, sample_values))
@test_util.numpy_disable_gradient_test
def testLaplaceFullyReparameterized(self):
loc = tf.constant(4.0)
scale = tf.constant(3.0)
_, [grad_loc, grad_scale] = tfp.math.value_and_gradient(
lambda l, s: tfd.Laplace(loc=l, scale=s, validate_args=True).sample( # pylint: disable=g-long-lambda
100, seed=test_util.test_seed()), [loc, scale])
self.assertIsNotNone(grad_loc)
self.assertIsNotNone(grad_scale)
def testLaplaceSampleMultiDimensional(self):
loc_v = np.array([np.arange(1, 101, dtype=np.float32)]) # 1 x 100
scale_v = np.array([np.arange(1, 11, dtype=np.float32)]).T # 10 x 1
laplace = tfd.Laplace(loc=loc_v, scale=scale_v, validate_args=True)
n = 10000
samples = laplace.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual(samples.shape, (n, 10, 100))
self.assertEqual(sample_values.shape, (n, 10, 100))
zeros = np.zeros_like(loc_v + scale_v) # 10 x 100
loc_bc = loc_v + zeros
scale_bc = scale_v + zeros
self.assertAllClose(
sample_values.mean(axis=0),
sp_stats.laplace.mean(loc_bc, scale=scale_bc),
rtol=0.35,
atol=0.)
self.assertAllClose(
sample_values.var(axis=0),
sp_stats.laplace.var(loc_bc, scale=scale_bc),
rtol=0.10,
atol=0.)
fails = 0
trials = 0
for ai, a in enumerate(np.reshape(loc_v, [-1])):
for bi, b in enumerate(np.reshape(scale_v, [-1])):
s = sample_values[:, bi, ai]
trials += 1
fails += 0 if self._kstest(a, b, s) else 1
self.assertLess(fails, trials * 0.03)
def _kstest(self, loc, scale, samples):
# Uses the Kolmogorov-Smirnov test for goodness of fit.
ks, _ = sp_stats.kstest(samples, sp_stats.laplace(loc, scale=scale).cdf)
# Return True when the test passes.
return ks < 0.02
def testLaplacePdfOfSampleMultiDims(self):
laplace = tfd.Laplace(loc=[7., 11.], scale=[[5.], [6.]], validate_args=True)
num = 50000
samples = laplace.sample(num, seed=test_util.test_seed())
pdfs = laplace.prob(samples)
sample_vals, pdf_vals = self.evaluate([samples, pdfs])
self.assertEqual(samples.shape, (num, 2, 2))
self.assertEqual(pdfs.shape, (num, 2, 2))
self._assertIntegral(sample_vals[:, 0, 0], pdf_vals[:, 0, 0], err=0.02)
self._assertIntegral(sample_vals[:, 0, 1], pdf_vals[:, 0, 1], err=0.02)
self._assertIntegral(sample_vals[:, 1, 0], pdf_vals[:, 1, 0], err=0.02)
self._assertIntegral(sample_vals[:, 1, 1], pdf_vals[:, 1, 1], err=0.02)
self.assertAllClose(
sp_stats.laplace.mean(
[[7., 11.], [7., 11.]], scale=np.array([[5., 5.], [6., 6.]])),
sample_vals.mean(axis=0),
rtol=0.05,
atol=0.)
self.assertAllClose(
sp_stats.laplace.var([[7., 11.], [7., 11.]],
scale=np.array([[5., 5.], [6., 6.]])),
sample_vals.var(axis=0),
rtol=0.05,
atol=0.)
def _assertIntegral(self, sample_vals, pdf_vals, err=1e-3):
s_p = zip(sample_vals, pdf_vals)
prev = (0, 0)
total = 0
for k in sorted(s_p, key=lambda x: x[0]):
pair_pdf = (k[1] + prev[1]) / 2
total += (k[0] - prev[0]) * pair_pdf
prev = k
self.assertNear(1., total, err=err)
def testLaplaceNonPositiveInitializationParamsRaises(self):
loc_v = tf.constant(0.0, name='loc')
scale_v = tf.constant(-1.0, name='scale')
with self.assertRaisesOpError('Argument `scale` must be positive.'):
laplace = tfd.Laplace(
loc=loc_v, scale=scale_v, validate_args=True)
self.evaluate(laplace.mean())
loc_v = tf.constant(1.0, name='loc')
scale_v = tf.constant(0.0, name='scale')
with self.assertRaisesOpError('Argument `scale` must be positive.'):
laplace = tfd.Laplace(
loc=loc_v, scale=scale_v, validate_args=True)
self.evaluate(laplace.mean())
scale = tf.Variable([1., 2., -3.])
self.evaluate(scale.initializer)
with self.assertRaisesOpError('Argument `scale` must be positive.'):
d = tfd.Laplace(loc=0, scale=scale, validate_args=True)
self.evaluate(d.sample(seed=test_util.test_seed()))
def testLaplaceLaplaceKL(self):
batch_size = 6
event_size = 3
a_loc = np.array([[0.5] * event_size] * batch_size, dtype=np.float32)
a_scale = np.array([[0.1] * event_size] * batch_size, dtype=np.float32)
b_loc = np.array([[0.4] * event_size] * batch_size, dtype=np.float32)
b_scale = np.array([[0.2] * event_size] * batch_size, dtype=np.float32)
a = tfd.Laplace(loc=a_loc, scale=a_scale, validate_args=True)
b = tfd.Laplace(loc=b_loc, scale=b_scale, validate_args=True)
distance = tf.abs(a_loc - b_loc)
ratio = a_scale / b_scale
true_kl = (-tf.math.log(ratio) - 1 + distance / b_scale +
ratio * tf.exp(-distance / a_scale))
kl = tfd.kl_divergence(a, b)
x = a.sample(int(1e4), seed=test_util.test_seed())
kl_sample = tf.reduce_mean(a.log_prob(x) - b.log_prob(x), axis=0)
true_kl_, kl_, kl_sample_ = self.evaluate([true_kl, kl, kl_sample])
self.assertAllClose(true_kl_, kl_, atol=1e-5, rtol=1e-5)
self.assertAllClose(true_kl_, kl_sample_, atol=0., rtol=1e-1)
zero_kl = tfd.kl_divergence(a, a)
true_zero_kl_, zero_kl_ = self.evaluate([tf.zeros_like(true_kl), zero_kl])
self.assertAllEqual(true_zero_kl_, zero_kl_)
@test_util.tf_tape_safety_test
def testGradientThroughParams(self):
loc = tf.Variable([-5., 0., 5.])
scale = tf.Variable(2.)
d = tfd.Laplace(loc=loc, scale=scale, validate_args=True)
with tf.GradientTape() as tape:
loss = -d.log_prob([1., 2., 3.])
grad = tape.gradient(loss, d.trainable_variables)
self.assertLen(grad, 2)
self.assertAllNotNone(grad)
def testAssertsPositiveScaleAfterMutation(self):
scale = tf.Variable([1., 2., 3.])
d = tfd.Laplace(loc=0., scale=scale, validate_args=True)
self.evaluate([v.initializer for v in d.variables])
with self.assertRaisesOpError('Argument `scale` must be positive.'):
with tf.control_dependencies([scale.assign([1., 2., -3.])]):
self.evaluate(tfd.Laplace(loc=0., scale=1.).kl_divergence(d))
def testAssertParamsAreFloats(self):
loc = tf.convert_to_tensor(0, dtype=tf.int32)
scale = tf.convert_to_tensor(1, dtype=tf.int32)
with self.assertRaisesRegexp(ValueError, 'Expected floating point'):
tfd.Laplace(loc=loc, scale=scale)
if __name__ == '__main__':
test_util.main()
| |
print('Importing command definitions...')
from net.canarymod import Canary
from net.canarymod import LineTracer
from net.canarymod.api.world.blocks import BlockType
from net.canarymod.api.world.effects import Particle
from net.canarymod.api import GameMode
from net.canarymod.api.world.position import Location
from net.canarymod.api.world.position import Position
from net.canarymod.commandsys import Command, CommandListener, CanaryCommand
from net.canarymod.chat import MessageReceiver
from net.canarymod.plugin import Priority, PluginListener
from net.canarymod.hook import Dispatcher
from time import *
from random import *
from math import *
SERVER = Canary.getServer()
WORLD = SERVER.getDefaultWorld()
MORNING = 2000
NOON = 6000
EVENING = 14000
NIGHT = 18000
#full list of BlockTypes available in JavaDocs on canarymod.net
AIR = BlockType.Air
STONE = BlockType.Stone
GRASS = BlockType.Grass
DIRT = BlockType.Dirt
COBBLESTONE = BlockType.Cobble
WOOD_PLANKS = BlockType.OakWood
SAPLING = BlockType.OakSapling
BEDROCK = BlockType.Bedrock
WATER_FLOWING = BlockType.WaterFlowing
WATER = WATER_FLOWING
WATER_STATIONARY = BlockType.Water
LAVA_FLOWING = BlockType.LavaFlowing
LAVA = LAVA_FLOWING
LAVA_STATIONARY = BlockType.Lava
SAND = BlockType.Sand
GRAVEL = BlockType.Gravel
GOLD_ORE = BlockType.GoldOre
IRON_ORE = BlockType.IronOre
COAL_ORE = BlockType.CoalOre
WOOD = BlockType.OakLog
LEAVES = BlockType.OakLeaves
GLASS = BlockType.Glass
LAPIS_LAZULI_ORE = BlockType.LapisOre
LAPIS_LAZULI_BLOCK = BlockType.LapisBlock
SANDSTONE = BlockType.Sandstone
BED = BlockType.Bed
COBWEB = BlockType.Web
GRASS_TALL = BlockType.TallGrass
WOOL = BlockType.WhiteWool
FLOWER_YELLOW = BlockType.Dandelion
FLOWER_CYAN = BlockType.BlueOrchid
MUSHROOM_BROWN = BlockType.BrownMushroom
MUSHROOM_RED = BlockType.RedMushroom
GOLD_BLOCK = BlockType.GoldBlock
IRON_BLOCK = BlockType.IronBlock
STONE_SLAB_DOUBLE = BlockType.DoubleStoneSlab
STONE_SLAB = BlockType.StoneSlab
BRICK_BLOCK = BlockType.BrickBlock
TNT = BlockType.TNT
BOOKSHELF = BlockType.Bookshelf
MOSS_STONE = BlockType.MossyCobble
OBSIDIAN = BlockType.Obsidian
TORCH = BlockType.Torch
FIRE = BlockType.FireBlock
STAIRS_WOOD = BlockType.OakStairs
CHEST = BlockType.Chest
DIAMOND_ORE = BlockType.DiamondOre
DIAMOND_BLOCK = BlockType.DiamondBlock
CRAFTING_TABLE = BlockType.Workbench
FARMLAND = BlockType.Farmland
FURNACE_INACTIVE = BlockType.Furnace
FURNACE_ACTIVE = BlockType.BurningFurnace
DOOR_WOOD = BlockType.WoodenDoor
LADDER = BlockType.Ladder
STAIRS_COBBLESTONE = BlockType.StoneStairs
DOOR_IRON = BlockType.IronDoor
REDSTONE_ORE = BlockType.RedstoneOre
SNOW = BlockType.Snow
ICE = BlockType.Ice
SNOW_BLOCK = BlockType.SnowBlock
CACTUS = BlockType.Cactus
CLAY = BlockType.Clay
SUGAR_CANE = BlockType.Reed
FENCE = BlockType.Fence
GLOWSTONE_BLOCK = BlockType.GlowStone
STONE_BRICK = BlockType.StoneBrick
GLASS_PANE = BlockType.GlassPane
MELON = BlockType.Melon
FENCE_GATE = BlockType.FenceGate
def pos(positionable):
return positionable.getPosition()
def parseargswithpos(args, kwargs, asint=True, ledger={}):
results = {}
if isinstance(args[0], Position):
base = 1
if asint:
pos = (args[0].getBlockX(), args[0].getBlockY(), args[0].getBlockZ())
else:
pos = (args[0].getX(), args[0].getY(), args[0].getZ())
else:
base = 3
tr = [args[0], args[1], args[2]]
if asint:
pos = (int(tr[0]), int(tr[1]), int(tr[2]))
results['x'] = pos[0]
results['y'] = pos[1]
results['z'] = pos[2]
for k,v in ledger.iteritems():
results[k] = kwargs.get(v[0], None)
if results[k] is None:
if len(args) > base+v[1]:
results[k] = args[base+v[1]]
else:
results[k] = v[2]
return results
def getplayer(name):
return SERVER.getPlayer(name)
def randomplayer():
pl = SERVER.getPlayerNameList()
return getplayer(choice(pl))
def yell(message):
SERVER.broadcastMessage(message)
def time(time):
WORLD.setTime(time)
def weather(rainsnow, thunder):
WORLD.setRaining(rainsnow)
WORLD.setthundering(thunder)
def explosion(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'power':['power', 0, 8]})
WORLD.makeExplosion(None, r['x'], r['y'], r['z'], r['power'], True)
def teleport(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'whom':['whom', 0, 'GameStartSchool']})
someone = getplayer(r['whom'])
someone.teleportTo(r['x'], r['y'], r['z'])
def setblock(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={'type':['type', 0, BlockType.Cobble]})
WORLD.setBlockAt(r['x'], r['y'], r['z'], r['type'])
def cube(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, BlockType.Cobble],
'size':['size', 1, 4]})
size = min(r['size'], 12)
for x in range(size):
for y in range(size):
for z in range(size):
setblock(x + r['x'], y + r['y'], z + r['z'], r['type'])
def bolt(*args, **kwargs):
r = parseargswithpos(args, kwargs)
WORLD.makeLightningBolt(r['x'], r['y'], r['z'])
def bless(*args, **kwargs):
r = parseargswithpos(args, kwargs, ledger={
'type':['type', 0, Particle.Type.REDSTONE],
'vx':['vx', 1, 1],
'vy':['vy', 2, 1],
'vz':['vz', 3, 1],
'sp':['sp', 4, 100],
'q':['q', 5, 100]})
par = Particle(r['x'], r['y'], r['z'], r['vx'], r['vy'], r['vz'], r['sp'], r['q'], r['type'])
WORLD.spawnParticle(par)
def lookingat(player):
return LineTracer(player).getTargetBlock()
class ChatCommand(Command):
def __init__(self, names, min=2, max=2, permissions=[""], toolTip="", description="", parent="", helpLookup="", searchTerms=[], version=1):
self.var_names = names
self.var_min = min
self.var_max = max
self.var_permissions = permissions
self.var_toolTip = toolTip
self.var_description = description
self.var_parent = parent
self.var_helpLookup = helpLookup
self.var_searchTerms = searchTerms
self.var_version = version
def aliases(self): return self.var_names
def permissions(self): return self.var_permissions
def toolTip(self): return self.var_toolTip
def description(self): return self.var_description
def parent(self): return self.var_parent
def helpLookup(self): return self.var_helpLookup
def min(self): return self.var_min
def max(self): return self.var_max
def searchTerms(self): return self.var_searchTerms
def version(self): return self.var_version
class CanaryChatCommand(CanaryCommand):
def __init__(self, chatcommand, owner, execfunc):
super(CanaryCommand, self).__init__(chatcommand, owner, None)
self.execfunc = execfunc
def execute(self, caller, parameters):
self.execfunc(caller, parameters)
class PluginEventListener(PluginListener):
pass
class EventDispatcher(Dispatcher):
def __init__(self, execfunc):
self.execfunc = execfunc
def execute(self, listener, hook):
self.execfunc(listener, hook)
def registercommand(name, min, max, execfunc):
# Use like this:
# >>> def functiontest(caller, params):
# ... yell(params[1])
# >>> registercommand("test", 2, 2, functiontest)
Canary.commands().registerCommand(CanaryChatCommand(ChatCommand(names=[name], min=min, max=max), SERVER, execfunc), SERVER, True)
def registerhook(hookCls, execfunc):
# Use like this:
# >>> from net.canarymod.hook.player import BlockDestroyHook
# >>> def hookfunc(listener, hook):
# ... yell(str(hook.getBlock().getType()))
# >>> registerhook(BlockDestroyHook,hookfunc)
Canary.hooks().registerHook(PluginEventListener(), Canary.manager().getPlugin('CanaryConsole'), hookCls, EventDispatcher(execfunc), Priority.NORMAL)
| |
#!/usr/bin/env python
import os
import glob
from optparse import OptionParser
import fnmatch
__version__ = '0.7.1'
class Line:
def __init__(self, dir):
self.raw_select_rule = set()
self.raw_ignore_rule = set()
self.final_file = set()
self.final_file_dic = {}
self.line_count = 0
self.file_count = 0
self.start_dir = dir
self.rule_on_file = {}
def get_line_select(self):
if not os.path.exists('line.select'):
self.raw_select_rule.add('*')
return
for line in open('line.select'):
if line[0] == '#':
continue
if line[len(line) - 1] == '/':
line = line[:len(line) - 1]
if line[0] == '!':
self.raw_ignore_rule.add(line[1:].strip())
else:
self.raw_select_rule.add(line.strip())
def find_files(self, path, mode):
file_and_folder = glob.glob(path + os.sep + '*')
folders = filter(self.folder_filter, file_and_folder)
files = filter(self.file_filter, file_and_folder)
if mode == 'filtrate':
files = filter(self.select_filter, files)
for file in files:
self.add_file(file)
for folder in folders:
if mode == 'filtrate':
if not self.select_filter(folder):
if not self.ignore_filter(folder):
self.find_files(folder, 'filtrate')
else:
if not self.ignore_filter(folder):
self.find_files(folder, 'all')
else:
if not self.ignore_filter(folder):
self.find_files(folder, 'all')
def select_filter(self, path):
should_select = False
for select_rule in self.raw_select_rule:
if fnmatch.fnmatch(path, self.start_dir + os.sep + select_rule):
should_select = True
break
return should_select
def ignore_filter(self, path):
should_ignore = False
for ignore_rule in self.raw_ignore_rule:
if fnmatch.fnmatch(path, self.start_dir + os.sep + ignore_rule):
should_ignore = True
break
return should_ignore
def file_filter(self, path):
return os.path.isfile(path)
def folder_filter(self, path):
return os.path.isdir(path)
def add_file(self, file):
if not self.ignore_filter(file):
self.final_file.add(os.path.abspath(file))
self.final_file_dic[file] = self.analyze_file(file)
def analyze_file(self, file):
file_line_count = self.read_line_count(file)
self.line_count += file_line_count
self.file_count += 1
return file_line_count
def read_line_count(self, file_name):
count = 0
for file_line in open(file_name):
count += 1
return count
def show_result(self):
print('file count: %d' % self.file_count)
print('line count: %d' % self.line_count)
def show_detail_result(self):
sorted_list = sorted(self.final_file_dic.items(), key=lambda d: d[0], reverse=False)
for one_file in sorted_list:
file_name, file_lines = one_file
file_name = file_name[len(self.start_dir) + 1:]
print('%-50s %10s' % (file_name, str(file_lines)))
self.show_result()
def get_rule_on_file(self):
file_path = self.start_dir + os.sep + 'line.select'
if not os.path.exists(file_path):
return
i = 0
for line in open(file_path):
self.rule_on_file[i] = line
i += 1
def show_rule_on_file(self):
self.get_rule_on_file()
if len(self.rule_on_file) == 0:
print("Not found 'line.select' or no rules in it")
return
print("Here are the rules in 'line.select' under " + self.start_dir + os.sep + ":")
for i in range(0, len(self.rule_on_file)):
if i in self.rule_on_file:
print(self.rule_on_file.get(i).strip())
def add_rule_to_file(self, rule):
file_path = self.start_dir + os.sep + 'line.select'
if not os.path.exists(file_path):
rule_file = open(file_path, "w")
rule_file.write(rule)
rule_file.close()
print('Add successfully')
else:
exist = False
self.get_rule_on_file()
for r in self.rule_on_file:
if self.rule_on_file.get(r) == rule:
print('This rule already exists')
exist = True
break
if not exist:
rule_file = open(file_path, "a")
rule_file.write('\n' + rule)
print('Add successfully')
def delete_rule_from_file(self, rule):
file_path = self.start_dir + os.sep + 'line.select'
if not os.path.exists(file_path):
print("'line.select' does not exist")
else:
exist = False
about_to_delete_rule_index = -1
self.get_rule_on_file()
for r in self.rule_on_file:
if self.rule_on_file.get(r) == rule:
about_to_delete_rule_index = r
exist = True
break
if exist:
del self.rule_on_file[about_to_delete_rule_index]
rule_file = open(file_path, "w")
for i in range(0, len(self.rule_on_file) + 1):
if self.rule_on_file.has_key(i):
rule_file.write(self.rule_on_file.get(i))
rule_file.close()
print('Delete successfully')
if not exist:
print("This rule doesn't exist")
def _main():
command_parser = OptionParser(usage="%prog [options] [args]", version="%prog " + __version__,
description="Analyze the amount of lines and files under current directory following the rules in 'line.select' or analyze all files if 'line.select' doesn't exist")
command_parser.add_option("-d", "--detail", action="store_true", dest="d_flag", default=False,
help="show more detail in the result")
command_parser.add_option("-s", "--show", action="store_true", dest="show_rule_flag", default=False,
help="show rules in 'line.select'")
# command_parser.add_option("-a", "--add", action="store", dest="add_rule_content", type="string",
# help="add a rule to the end of 'line.select'")
# command_parser.add_option("-d", "--delete", action="store", dest="delete_rule_content", type="string",
# help="delete a rule in 'line.select")
command_options, command_args = command_parser.parse_args()
d_flag = command_options.d_flag
show_rule_flag = command_options.show_rule_flag
# add_rule_arg = command_options.add_rule_content
# delete_rule_arg = command_options.delete_rule_content
a = Line(os.getcwd())
if show_rule_flag:
a.show_rule_on_file()
# elif add_rule_arg is not None:
# a.add_rule_to_file(add_rule_arg)
# elif delete_rule_arg is not None:
# a.delete_rule_from_file(delete_rule_arg)
else:
print('Search in ' + os.getcwd() + os.sep)
a.get_line_select()
a.find_files(os.getcwd(), 'filtrate')
if d_flag:
a.show_detail_result()
else:
a.show_result()
if __name__ == '__main__':
_main()
| |
#!/usr/bin/env python3
"""
Manage activities
Copyright (C) 2020 Anders Lowinger, anders@abundo.se
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import queue
import threading
import random
import requests
from orderedattrdict import AttrDict
import PyQt5.QtCore as QtCore
from logger import log
from settings import sett
import util
import lib.db as db
class ActivityMgr(QtCore.QObject):
sig = QtCore.pyqtSignal()
def __init__(self, localdb=None):
super().__init__()
self.localdb = localdb
self.periodicsync_timer = None
self.activities = []
self.activities_id = {}
self.toThreadQ = queue.Queue()
self.t = threading.Thread(target=self.run)
self.t.setName("ActivityMgr")
self.t.daemon = True
self.t.start()
sett.updated.connect(self.handle_settings)
self.handle_settings()
def handle_settings(self):
"""
Handle changes in settings
"""
if sett.activity_sync_interval:
if self.periodicsync_timer:
# has the interval changed?
if self.periodicsync_timer.interval != sett.activity_sync_interval:
self.periodicsync_timer.cancel()
self.periodicsync_timer = None
if self.periodicsync_timer is None:
log.debug(f"ActivityMgr starting autosync timer, interval {sett.activity_sync_interval}")
self._start_periodicsync_timer()
else:
if self.periodicsync_timer:
log.debug("ActivityMgr stopping autosync timer")
self.periodicsync_timer.cancel()
self.periodicsync_timer = None
def _start_periodicsync_timer(self):
jitter = sett.activity_sync_interval // 10 # 10% jitter
if jitter < 1:
jitter = 1
interval = sett.activity_sync_interval + random.randint(-jitter, jitter)
log.debug(f"ActivityMgr interval {interval} jitter {jitter}")
self.periodicsync_timer = threading.Timer(interval, self.periodic_sync)
self.periodicsync_timer.daemon = True
self.periodicsync_timer.setName("ActivityMgr.Timer")
self.periodicsync_timer.start()
def periodic_sync(self):
log.debug("ActivityMgr.periodic_sync triggered")
self.sync()
self._start_periodicsync_timer()
def init(self):
"""
Load the list of activities from local db
"""
self._loadList()
self.sig.emit()
def get(self, activityid):
if activityid in self.activities_id:
return self.activities_id[activityid]
return None
def getList(self):
return self.activities
def save(self):
log.debugf(log.DEBUG_ACTIVITYMGR, "Saving activities")
for activity in self.activities:
log.debugf(log.DEBUG_ACTIVITYMGR, f"Storing activity {activity.name}")
try:
self.localdb.update("activity", d=activity, primary_key="_id")
except db.DbException as err:
log.error(f"Cant save activity in local database, {err}")
def _loadList(self):
sql = "SELECT * FROM activity ORDER BY active desc,name"
activities = self.localdb.select_all(sql)
self.activities.clear()
self.activities_id.clear()
for activity in activities:
self.activities.append(activity)
self.activities_id[activity.server_id] = activity
def sync(self):
"""
Sync the local database with the one on the server
"""
self.toThreadQ.put("sync")
def stop(self):
if self.periodicsync_timer and self.periodicsync_timer.is_alive():
self.periodicsync_timer.cancel()
self.toThreadQ.put("quit")
##############################################################################
#
# Everything below is running in a different thread
#
##############################################################################
def _do_sync(self):
"""
Runs as a separate thread
"""
# Get list of all activities on server
try:
r = requests.get(f"{sett.server_url}/api/activity")
srv_activities = r.json()
srv_activities = srv_activities["data"]
except requests.exceptions.RequestException as err:
log.error(f"Cannot load list of activities from server {err}")
return
for srv_activity in srv_activities:
srv_activity = AttrDict(srv_activity)
log.debug(f"Server activity {srv_activity}")
sql = "SELECT * FROM activity WHERE server_id=?"
local_activity = self.localdb.select_one(sql, (srv_activity["_id"],))
if local_activity:
# we have the activity locally, check if changed
changes = []
for attr in ["name", "description", "active"]:
if getattr(local_activity, attr) != getattr(srv_activity, attr):
changes.append(attr)
if changes:
tmp = str(srv_activity).replace("\n", " ")
log.debugf(log.DEBUG_ACTIVITYMGR, f"Updating local copy of activity, changed columns {changes}, {tmp}")
local_activity.name = srv_activity["name"]
local_activity.server_id = srv_activity["_id"]
local_activity.active = srv_activity["active"]
try:
self.localdb.update("activity", d=local_activity, primary_key="_id")
except db.DbException as err:
log.error(f"Cannot update local activity {err}")
return
else:
# new activity
log.debugf(log.DEBUG_ACTIVITYMGR, f"New activity '{srv_activity.name}' on server, saving in local database")
srv_activity.server_id = srv_activity._id
srv_activity._id = -1
try:
self.localdb.insert("activity", d=srv_activity, primary_key="_id")
except db.DbException as err:
log.error(f"Cannot save new activity in local database {err}")
return
self._loadList()
self.sig.emit()
def run(self):
"""
Runs as a separate thread
"""
log.debugf(log.DEBUG_ACTIVITYMGR, "Starting activitymgr thread")
while True:
req = self.toThreadQ.get()
log.debugf(log.DEBUG_ACTIVITYMGR, f"activitymgr, request={req}")
if req == "quit":
log.debugf(log.DEBUG_ACTIVITYMGR, "activitymgr thread stopping")
return
elif req == "sync":
# connect to database, we have a separate connection in this thread to
# simplify database operations
self.localdb = util.openLocalDatabase2()
self._do_sync()
else:
log.error(f"activitymgr thread, unknown command {req}")
if __name__ == "__main__":
# Module test
import time
from PyQt5.Qt import QApplication
app = util.createQApplication()
localdb = util.openLocalDatabase2(":memory:")
activityMgr = ActivityMgr(localdb=localdb)
activityMgr.init()
activityMgr.sync()
activityMgr.stop()
while not activityMgr.toThreadQ.empty():
QApplication.processEvents()
time.sleep(0.5)
| |
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from collections import Counter
from datetime import timedelta
from operator import attrgetter
import dateutil.parser
from flask import flash, jsonify, request, session
from pytz import utc
from werkzeug.exceptions import BadRequest, Forbidden, NotFound
from indico.core.errors import UserValueError
from indico.modules.events.contributions import Contribution
from indico.modules.events.contributions.operations import create_contribution, delete_contribution, update_contribution
from indico.modules.events.management.util import flash_if_unregistered
from indico.modules.events.sessions.controllers.management.sessions import RHCreateSession, RHSessionREST
from indico.modules.events.sessions.forms import SessionForm
from indico.modules.events.sessions.models.blocks import SessionBlock
from indico.modules.events.sessions.operations import delete_session_block, update_session, update_session_block
from indico.modules.events.timetable.controllers import (RHManageTimetableBase, RHManageTimetableEntryBase,
SessionManagementLevel)
from indico.modules.events.timetable.controllers.manage import RHBreakREST
from indico.modules.events.timetable.forms import (BaseEntryForm, BreakEntryForm, ContributionEntryForm,
SessionBlockEntryForm)
from indico.modules.events.timetable.legacy import (TimetableSerializer, serialize_contribution, serialize_day_update,
serialize_entry_update, serialize_session)
from indico.modules.events.timetable.models.breaks import Break
from indico.modules.events.timetable.models.entries import TimetableEntryType
from indico.modules.events.timetable.operations import (create_break_entry, create_session_block_entry,
delete_timetable_entry, fit_session_block_entry,
move_timetable_entry, schedule_contribution,
swap_timetable_entry, update_break_entry,
update_timetable_entry, update_timetable_entry_object)
from indico.modules.events.timetable.reschedule import RescheduleMode, Rescheduler
from indico.modules.events.timetable.util import (find_next_start_dt, get_session_block_entries,
get_time_changes_notifications, shift_following_entries)
from indico.modules.events.util import get_field_values, get_random_color, track_time_changes
from indico.util.date_time import as_utc, iterdays
from indico.util.i18n import _
from indico.util.string import handle_legacy_description
from indico.web.forms.base import FormDefaults
from indico.web.util import jsonify_data, jsonify_form, jsonify_template
class RHLegacyTimetableAddEntryBase(RHManageTimetableBase):
session_management_level = SessionManagementLevel.manage
def _process_args(self):
RHManageTimetableBase._process_args(self)
self.day = dateutil.parser.parse(request.args['day']).date()
self.session_block = None
if 'session_block_id' in request.args:
self.session_block = self.event.get_session_block(request.args['session_block_id'])
if not self.session_block:
raise BadRequest
def _get_form_defaults(self, **kwargs):
location_parent = kwargs.pop('location_parent', None)
inherited_location = location_parent.location_data if location_parent else self.event.location_data
inherited_location['inheriting'] = True
return FormDefaults(location_data=inherited_location, **kwargs)
def _get_form_params(self):
return {'event': self.event,
'session_block': self.session_block,
'day': self.day}
class RHLegacyTimetableAddBreak(RHLegacyTimetableAddEntryBase):
session_management_level = SessionManagementLevel.coordinate
def _get_default_colors(self):
breaks = Break.query.filter(Break.timetable_entry.has(event=self.event)).all()
common_colors = Counter(b.colors for b in breaks)
most_common = common_colors.most_common(1)
colors = most_common[0][0] if most_common else get_random_color(self.event)
return colors
def _process(self):
colors = self._get_default_colors()
defaults = self._get_form_defaults(colors=colors, location_parent=self.session_block)
form = BreakEntryForm(obj=defaults, **self._get_form_params())
if form.validate_on_submit():
with track_time_changes(auto_extend=True, user=session.user) as changes:
entry = create_break_entry(self.event, form.data, session_block=self.session_block)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=entry)
return jsonify_data(update=serialize_entry_update(entry, session_=self.session),
notifications=notifications, flash=False)
return jsonify_form(form, fields=form._display_fields)
class RHLegacyTimetableAddContribution(RHLegacyTimetableAddEntryBase):
session_management_level = SessionManagementLevel.manage
def _process(self):
defaults = self._get_form_defaults(location_parent=self.session_block)
form = ContributionEntryForm(obj=defaults, to_schedule=True, **self._get_form_params())
if form.validate_on_submit():
contrib = Contribution()
with track_time_changes(auto_extend=True, user=session.user) as changes:
with flash_if_unregistered(self.event, lambda: contrib.person_links):
contrib = create_contribution(self.event, form.data, session_block=self.session_block,
extend_parent=True)
entry = contrib.timetable_entry
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=entry)
return jsonify_data(entries=[serialize_entry_update(entry, session_=self.session)],
notifications=notifications)
self.commit = False
return jsonify_template('events/contributions/forms/contribution.html', form=form, fields=form._display_fields)
class RHLegacyTimetableAddSessionBlock(RHLegacyTimetableAddEntryBase):
session_management_level = SessionManagementLevel.coordinate_with_blocks
def _process_args(self):
RHLegacyTimetableAddEntryBase._process_args(self)
self.parent_session = self.session or self.event.get_session(request.args['parent_session_id'])
if not self.parent_session:
raise NotFound
def _process(self):
defaults = self._get_form_defaults(location_parent=self.parent_session)
form = SessionBlockEntryForm(obj=defaults, **self._get_form_params())
if form.validate_on_submit():
with track_time_changes(auto_extend=True, user=session.user) as changes:
entry = create_session_block_entry(self.parent_session, form.data)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=entry)
return jsonify_data(update=serialize_entry_update(entry, session_=self.session),
notifications=notifications, flash=False)
self.commit = False
return jsonify_form(form, fields=form._display_fields, disabled_until_change=False)
class RHLegacyTimetableDeleteEntry(RHManageTimetableEntryBase):
@property
def session_management_level(self):
if self.entry.type == TimetableEntryType.SESSION_BLOCK:
return SessionManagementLevel.coordinate_with_blocks
elif self.entry.type == TimetableEntryType.CONTRIBUTION and self.event.type != 'conference':
return SessionManagementLevel.coordinate_with_contribs
else:
return SessionManagementLevel.coordinate
def _process(self):
day = self.entry.start_dt.astimezone(self.entry.event.tzinfo).date()
block = self.entry.parent
if self.entry.type == TimetableEntryType.SESSION_BLOCK:
delete_session_block(self.entry.session_block)
elif self.entry.type == TimetableEntryType.CONTRIBUTION and self.event.type != 'conference':
delete_contribution(self.entry.contribution)
else:
delete_timetable_entry(self.entry)
return jsonify_data(update=serialize_day_update(self.event, day, block=block, session_=self.session),
flash=False)
class RHLegacyTimetableEditEntry(RHManageTimetableEntryBase):
@property
def session_management_level(self):
if self.edit_session:
return SessionManagementLevel.manage
elif self.entry.type == TimetableEntryType.SESSION_BLOCK:
return SessionManagementLevel.coordinate_with_blocks
elif self.entry.type == TimetableEntryType.CONTRIBUTION:
return SessionManagementLevel.coordinate_with_contribs
else:
return SessionManagementLevel.coordinate
def _process_args(self):
RHManageTimetableEntryBase._process_args(self)
self.edit_session = request.args.get('edit_session') == '1'
def _process(self):
form = None
parent_session_block = self.entry.parent.object if self.entry.parent else None
if self.entry.contribution:
contrib = self.entry.contribution
tt_entry_dt = self.entry.start_dt.astimezone(self.event.tzinfo)
form = ContributionEntryForm(obj=FormDefaults(contrib, time=tt_entry_dt.time()),
event=self.event, contrib=contrib, to_schedule=False,
day=tt_entry_dt.date(), session_block=parent_session_block)
if form.validate_on_submit():
with track_time_changes(auto_extend=True, user=session.user) as changes:
with flash_if_unregistered(self.event, lambda: contrib.person_links):
update_contribution(contrib, *get_field_values(form.data))
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=self.entry)
return jsonify_data(update=serialize_entry_update(self.entry, session_=self.session),
notifications=notifications)
elif not form.is_submitted():
handle_legacy_description(form.description, contrib)
return jsonify_template('events/contributions/forms/contribution.html', form=form,
fields=form._display_fields)
elif self.entry.break_:
break_ = self.entry.break_
tt_entry_dt = self.entry.start_dt.astimezone(self.event.tzinfo)
form = BreakEntryForm(obj=FormDefaults(break_, time=tt_entry_dt.time()), event=self.event,
day=tt_entry_dt.date(), session_block=parent_session_block)
if form.validate_on_submit():
with track_time_changes(auto_extend=True, user=session.user) as changes:
update_break_entry(break_, form.data)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=self.entry)
return jsonify_data(update=serialize_entry_update(self.entry, session_=self.session),
notifications=notifications, flash=False)
elif self.entry.session_block:
if self.edit_session:
session_ = self.entry.session_block.session
form = SessionForm(obj=FormDefaults(session_), event=self.event)
if form.validate_on_submit():
update_session(session_, form.data)
return jsonify_data(update=serialize_entry_update(self.entry, session_=self.session), flash=False)
else:
block = self.entry.session_block
tt_entry_dt = self.entry.start_dt.astimezone(self.event.tzinfo)
form = SessionBlockEntryForm(obj=FormDefaults(block, time=tt_entry_dt.time()),
event=self.event, session_block=block, to_schedule=False,
day=tt_entry_dt.date())
if form.validate_on_submit():
with track_time_changes(auto_extend=True, user=session.user) as changes:
update_session_block(block, form.data)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo,
entry=self.entry)
return jsonify_data(update=serialize_entry_update(self.entry, session_=self.session),
notifications=notifications, flash=False)
self.commit = False
return jsonify_form(form, fields=getattr(form, '_display_fields', None))
class RHLegacyTimetableEditEntryTime(RHManageTimetableEntryBase):
@property
def session_management_level(self):
if self.entry.type == TimetableEntryType.SESSION_BLOCK:
return SessionManagementLevel.coordinate_with_blocks
else:
return SessionManagementLevel.coordinate
def _process(self):
item = self.entry.object
entry_dt = self.entry.start_dt.astimezone(self.event.tzinfo)
form = BaseEntryForm(obj=FormDefaults(item, time=entry_dt.time()), day=entry_dt.date(),
event=self.event, entry=self.entry,
session_block=self.entry.parent.object if self.entry.parent else None)
data = form.data
shift_later = data.pop('shift_later')
if form.validate_on_submit():
with track_time_changes(auto_extend=True, user=session.user) as changes:
if shift_later:
new_end_dt = form.start_dt.data + form.duration.data
shift = new_end_dt - self.entry.end_dt
shift_following_entries(self.entry, shift, session_=self.session)
if self.entry.contribution:
update_timetable_entry(self.entry, {'start_dt': form.start_dt.data})
update_contribution(item, {'duration': form.duration.data})
elif self.entry.break_:
update_break_entry(item, data)
elif self.entry.session_block:
update_session_block(item, data)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=self.entry)
return jsonify_data(update=serialize_entry_update(self.entry, session_=self.session),
notifications=notifications, flash=False)
self.commit = False
return jsonify_form(form, back_button=False, disabled_until_change=True)
class RHLegacyTimetableAddSession(RHCreateSession):
def _get_response(self, new_session):
return jsonify_data(session=serialize_session(new_session))
class RHLegacyTimetableGetUnscheduledContributions(RHManageTimetableBase):
session_management_level = SessionManagementLevel.coordinate
def _process_args(self):
RHManageTimetableBase._process_args(self)
self.session_id = None
if 'session_block_id' in request.args:
self.session_id = SessionBlock.get_one(request.args['session_block_id']).session_id
if self.session and self.session.id != self.session_id:
raise BadRequest
def _process(self):
contributions = Contribution.query.with_parent(self.event).filter_by(is_scheduled=False)
contributions = [c for c in contributions if c.session_id == self.session_id]
return jsonify(contributions=[serialize_contribution(x) for x in contributions])
class RHLegacyTimetableScheduleContribution(RHManageTimetableBase):
session_management_level = SessionManagementLevel.coordinate
def _process_args(self):
RHManageTimetableBase._process_args(self)
self.session_block = None
if 'block_id' in request.view_args:
self.session_block = self.event.get_session_block(request.view_args['block_id'])
if self.session_block is None:
raise NotFound
if self.session and self.session != self.session_block.session:
raise BadRequest
def _process(self):
data = request.json
required_keys = {'contribution_ids', 'day'}
allowed_keys = required_keys | {'session_block_id'}
if set(data.viewkeys()) > allowed_keys:
raise BadRequest('Invalid keys found')
elif required_keys > set(data.viewkeys()):
raise BadRequest('Required keys missing')
entries = []
day = dateutil.parser.parse(data['day']).date()
query = Contribution.query.with_parent(self.event).filter(Contribution.id.in_(data['contribution_ids']))
with track_time_changes(auto_extend='end', user=session.user) as changes:
for contribution in query:
if self.session and contribution.session_id != self.session.id:
raise Forbidden('Contribution not assigned to this session')
start_dt = find_next_start_dt(contribution.duration,
obj=self.session_block or self.event,
day=None if self.session_block else day,
force=True)
entry = self._schedule(contribution, start_dt)
if entry.end_dt.astimezone(entry.event.tzinfo).date() > day:
raise UserValueError(_("Contribution '{}' could not be scheduled since it doesn't fit on this day.")
.format(contribution.title))
entries.append(entry)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo)
return jsonify_data(update=serialize_entry_update(entries[0], session_=self.session) if entries else None,
notifications=notifications, flash=False)
def _schedule(self, contrib, start_dt):
return schedule_contribution(contrib, start_dt, session_block=self.session_block, extend_parent=True)
class RHLegacyTimetableReschedule(RHManageTimetableBase):
_json_schema = {
'type': 'object',
'properties': {
'mode': {'type': 'string', 'enum': ['none', 'time', 'duration']},
'day': {'type': 'string', 'format': 'date'},
'gap': {'type': 'integer', 'minimum': 0},
'fit_blocks': {'type': 'boolean'},
'session_block_id': {'type': 'integer'},
'session_id': {'type': 'integer'}
},
'required': ['mode', 'day', 'gap', 'fit_blocks']
}
@property
def session_management_level(self):
if self.session_block:
return SessionManagementLevel.coordinate
elif self.session:
return SessionManagementLevel.coordinate_with_blocks
else:
return SessionManagementLevel.none
def _process_args(self):
RHManageTimetableBase._process_args(self)
self.validate_json(self._json_schema)
self.day = dateutil.parser.parse(request.json['day']).date()
self.session_block = None
if request.json.get('session_block_id') is not None:
self.session_block = self.event.get_session_block(request.json['session_block_id'], scheduled_only=True)
if self.session_block is None:
raise NotFound
if self.session and self.session != self.session_block.session:
raise BadRequest
elif request.json.get('session_id') is not None:
if self.session.id != request.json['session_id']:
raise BadRequest
def _process(self):
sess = self.session if not self.session_block else None
rescheduler = Rescheduler(self.event, RescheduleMode[request.json['mode']], self.day,
session=sess, session_block=self.session_block, fit_blocks=request.json['fit_blocks'],
gap=timedelta(minutes=request.json['gap']))
with track_time_changes(auto_extend='end', user=session.user) as changes:
rescheduler.run()
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo)
for notification in notifications:
flash(notification, 'highlight')
return jsonify_data(flash=False)
class RHLegacyTimetableFitBlock(RHManageTimetableBase):
session_management_level = SessionManagementLevel.coordinate_with_blocks
def _process_args(self):
RHManageTimetableBase._process_args(self)
self.session_block = self.event.get_session_block(request.view_args['block_id'], scheduled_only=True)
if self.session_block is None:
raise NotFound
if self.session and self.session != self.session_block.session:
raise BadRequest
def _process(self):
with track_time_changes():
fit_session_block_entry(self.session_block.timetable_entry)
return jsonify_data(flash=False)
class RHLegacyTimetableMoveEntry(RHManageTimetableEntryBase):
"""Moves a TimetableEntry into a Session or top-level timetable"""
def _process_GET(self):
current_day = dateutil.parser.parse(request.args.get('day')).date()
return jsonify_template('events/timetable/move_entry.html', event=self.event,
top_level_entries=self._get_session_timetable_entries(),
current_day=current_day, timetable_entry=self.entry,
parent_entry=self.entry.parent)
def _process_POST(self):
self.serializer = TimetableSerializer(self.event, management=True)
with track_time_changes(auto_extend=True, user=session.user) as changes:
entry_data = self._move_entry(request.json)
rv = dict(serialize_entry_update(self.entry), **entry_data)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=self.entry)
return jsonify_data(flash=False, entry=rv, notifications=notifications)
def _move_entry(self, data):
rv = {}
if data.get('parent_id'):
rv['old'] = self.serializer.serialize_timetable_entry(self.entry)
parent_timetable_entry = self.event.timetable_entries.filter_by(id=data['parent_id']).one()
move_timetable_entry(self.entry, parent=parent_timetable_entry)
rv['session'] = rv['slotEntry'] = self.serializer.serialize_session_block_entry(parent_timetable_entry)
elif data.get('day'):
rv['old'] = self.serializer.serialize_timetable_entry(self.entry)
new_date = as_utc(dateutil.parser.parse(data['day']))
move_timetable_entry(self.entry, day=new_date)
return rv
def _get_session_timetable_entries(self):
entries = {}
for day in iterdays(self.event.start_dt, self.event.end_dt):
entries[day.date()] = get_session_block_entries(self.event, day)
return entries
class RHLegacyTimetableShiftEntries(RHManageTimetableEntryBase):
@property
def session_management_level(self):
if self.entry.type == TimetableEntryType.SESSION_BLOCK:
return SessionManagementLevel.coordinate_with_blocks
else:
return SessionManagementLevel.coordinate
def _process(self):
new_start_dt = (self.event.tzinfo.localize(dateutil.parser.parse(request.form.get('startDate')))
.astimezone(utc))
shift = new_start_dt - self.entry.start_dt
with track_time_changes(auto_extend=True, user=session.user) as changes:
shift_following_entries(self.entry, shift, session_=self.session)
self.entry.move(new_start_dt)
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=self.entry)
return jsonify_data(flash=False, update=serialize_entry_update(self.entry, session_=self.session),
notifications=notifications)
class RHLegacyTimetableSwapEntries(RHManageTimetableEntryBase):
@property
def session_management_level(self):
if self.entry.type == TimetableEntryType.SESSION_BLOCK:
return SessionManagementLevel.coordinate_with_blocks
else:
return SessionManagementLevel.coordinate
def _process_args(self):
RHManageTimetableEntryBase._process_args(self)
if self.entry.is_parallel(in_session=self.session is not None):
raise BadRequest
def _process(self):
direction = request.form['direction']
with track_time_changes():
swap_timetable_entry(self.entry, direction, session_=self.session)
return jsonify_data(flash=False, update=serialize_entry_update(self.entry, session_=self.session))
class RHLegacyTimetableEditEntryDateTime(RHManageTimetableEntryBase):
"""Changes the start_dt of a `TimetableEntry`"""
@property
def session_management_level(self):
if self.entry.type == TimetableEntryType.SESSION_BLOCK:
return SessionManagementLevel.coordinate_with_blocks
else:
return SessionManagementLevel.coordinate
def _process(self):
new_start_dt = self.event.tzinfo.localize(
dateutil.parser.parse(request.form.get('startDate'))).astimezone(utc)
new_end_dt = self.event.tzinfo.localize(dateutil.parser.parse(request.form.get('endDate'))).astimezone(utc)
new_duration = new_end_dt - new_start_dt
is_session_block = self.entry.type == TimetableEntryType.SESSION_BLOCK
tzinfo = self.event.tzinfo
if is_session_block and new_end_dt.astimezone(tzinfo).date() != self.entry.start_dt.astimezone(tzinfo).date():
raise UserValueError(_('Session block cannot span more than one day'))
with track_time_changes(auto_extend=True, user=session.user) as changes:
update_timetable_entry_object(self.entry, {'duration': new_duration})
if is_session_block:
self.entry.move(new_start_dt)
if not is_session_block:
update_timetable_entry(self.entry, {'start_dt': new_start_dt})
if is_session_block and self.entry.children:
if new_end_dt < max(self.entry.children, key=attrgetter('end_dt')).end_dt:
raise UserValueError(_("Session block cannot be shortened this much because contributions contained "
"wouldn't fit."))
notifications = get_time_changes_notifications(changes, tzinfo=self.event.tzinfo, entry=self.entry)
return jsonify_data(flash=False, update=serialize_entry_update(self.entry, session_=self.session),
notifications=notifications)
class RHLegacyTimetableEditSession(RHSessionREST):
def _process_args(self):
RHSessionREST._process_args(self)
self.is_session_timetable = request.args.get('is_session_timetable') == '1'
def _process_PATCH(self):
RHSessionREST._process_PATCH(self)
entries = [serialize_entry_update(block.timetable_entry,
session_=(self.session if self.is_session_timetable else None))
for block in self.session.blocks]
return jsonify_data(entries=entries, flash=False)
class RHLegacyTimetableBreakREST(RHBreakREST):
def _process_PATCH(self):
RHBreakREST._process_PATCH(self)
return jsonify_data(entries=[serialize_entry_update(self.entry)], flash=False)
| |
#
# QtHelp.py -- customized Qt widgets and convenience functions
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import time
import os
# PySide or PyQt4: choose one or the other, but not both
toolkit = 'choose'
#toolkit = 'pyside'
#toolkit = 'pyqt4'
has_pyqt4 = False
has_pyside = False
if toolkit in ('pyqt4', 'choose'):
try:
import sip
for cl in ('QString', ):
sip.setapi(cl, 2)
from PyQt4 import QtCore, QtGui
has_pyqt4 = True
try:
from PyQt4 import QtWebKit
except ImportError:
pass
# for Matplotlib
os.environ['QT_API'] = 'pyqt'
except ImportError:
pass
if toolkit in ('pyside', 'choose') and (not has_pyqt4):
try:
from PySide import QtCore, QtGui
has_pyside = True
try:
from PySide import QtWebKit
except ImportError:
pass
# for Matplotlib
os.environ['QT_API'] = 'pyside'
except ImportError:
pass
if (not has_pyside) and (not has_pyqt4):
raise ImportError("Please install pyqt4 or pyside")
from ginga.misc import Bunch, Callback
tabwidget_style = """
QTabWidget::pane { margin: 0px,0px,0px,0px; padding: 0px; }
QMdiSubWindow { margin: 0px; padding: 2px; }
"""
class TopLevel(QtGui.QWidget):
def closeEvent(self, event):
if hasattr(self, 'app') and self.app:
self.app.quit()
def setApp(self, app):
self.app = app
class TabWidget(QtGui.QTabWidget):
pass
class StackedWidget(QtGui.QStackedWidget):
def addTab(self, widget, label):
self.addWidget(widget)
def removeTab(self, index):
self.removeWidget(self.widget(index))
class Workspace(QtGui.QMdiArea):
def __init__(self):
super(Workspace, self).__init__()
self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.setViewMode(QtGui.QMdiArea.TabbedView)
def addTab(self, widget, label):
## subw = QtGui.QMdiSubWindow()
## subw.setWidget(widget)
## subw.setAttribute(QtCore.Qt.WA_DeleteOnClose)
## self.addSubWindow(subw)
w = self.addSubWindow(widget)
#w.setContentsMargins(0, 0, 0, 0)
w.setWindowTitle(label)
w.show()
def indexOf(self, widget):
try:
wl = list(self.subWindowList())
#l = [ sw.widget() for sw in wl ]
return wl.index(widget)
except (IndexError, ValueError), e:
return -1
def widget(self, index):
l = list(self.subWindowList())
sw = l[index]
#return sw.widget()
return sw.widget()
def tabBar(self):
return None
def setCurrentIndex(self, index):
l = list(self.subWindowList())
w = l[index]
self.setActiveSubWindow(w)
def sizeHint(self):
return QtCore.QSize(300, 300)
class ComboBox(QtGui.QComboBox):
def insert_alpha(self, text):
index = 0
while True:
itemText = self.itemText(index)
if len(itemText) == 0:
break
if itemText > text:
self.insertItem(index, text)
return
index += 1
self.addItem(text)
def delete_alpha(self, text):
index = self.findText(text)
self.removeItem(index)
def show_text(self, text):
index = self.findText(text)
self.setCurrentIndex(index)
def append_text(self, text):
self.addItem(text)
class VBox(QtGui.QWidget):
def __init__(self, *args, **kwdargs):
super(VBox, self).__init__(*args, **kwdargs)
layout = QtGui.QVBoxLayout()
# because of ridiculous defaults
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
def addWidget(self, w, **kwdargs):
self.layout().addWidget(w, **kwdargs)
def setSpacing(self, val):
self.layout().setSpacing(val)
class HBox(QtGui.QWidget):
def __init__(self, *args, **kwdargs):
super(HBox, self).__init__(*args, **kwdargs)
layout = QtGui.QHBoxLayout()
# because of ridiculous defaults
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
def addWidget(self, w, **kwdargs):
self.layout().addWidget(w, **kwdargs)
def setSpacing(self, val):
self.layout().setSpacing(val)
class Frame(QtGui.QFrame):
def __init__(self, title=None):
super(Frame, self).__init__()
self.setFrameStyle(QtGui.QFrame.Box | QtGui.QFrame.Raised)
vbox = QtGui.QVBoxLayout()
# because of ridiculous defaults
vbox.setContentsMargins(2, 2, 2, 2)
self.setLayout(vbox)
if title:
lbl = QtGui.QLabel(title)
lbl.setAlignment(QtCore.Qt.AlignHCenter)
vbox.addWidget(lbl, stretch=0)
self.label = lbl
else:
self.label = None
def getLabel(self):
return self.label
def addWidget(self, w, **kwdargs):
self.layout().addWidget(w, **kwdargs)
class Dialog(QtGui.QDialog):
def __init__(self, title=None, flags=None, buttons=None,
callback=None):
QtGui.QDialog.__init__(self)
self.setModal(False)
vbox = QtGui.QVBoxLayout()
self.setLayout(vbox)
self.content = QtGui.QWidget()
vbox.addWidget(self.content, stretch=1)
hbox_w = QtGui.QWidget()
hbox = QtGui.QHBoxLayout()
hbox_w.setLayout(hbox)
def mklocal(val):
def cb():
callback(self, val)
return cb
for name, val in buttons:
btn = QtGui.QPushButton(name)
if callback:
btn.clicked.connect(mklocal(val))
hbox.addWidget(btn, stretch=0)
vbox.addWidget(hbox_w, stretch=0)
#self.w.connect("close", self.close)
def get_content_area(self):
return self.content
class Desktop(Callback.Callbacks):
def __init__(self):
super(Desktop, self).__init__()
# for tabs
self.tab = Bunch.caselessDict()
self.tabcount = 0
self.notebooks = Bunch.caselessDict()
for name in ('page-switch', 'page-select'):
self.enable_callback(name)
self.popmenu = None
# --- Tab Handling ---
def make_ws(self, name=None, group=1, show_tabs=True, show_border=False,
detachable=True, tabpos=None, scrollable=True, closeable=False,
wstype='nb'):
if tabpos == None:
tabpos = QtGui.QTabWidget.North
if wstype == 'mdi':
nb = Workspace()
elif show_tabs:
nb = TabWidget()
nb.setTabPosition(tabpos)
nb.setUsesScrollButtons(scrollable)
nb.setTabsClosable(closeable)
nb.setMovable(True) # reorderable
nb.setAcceptDrops(True)
nb.currentChanged.connect(lambda idx: self.switch_page(idx, nb))
tb = nb.tabBar()
## tb.setAcceptDrops(True)
tb.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
tb.connect(tb, QtCore.SIGNAL('customContextMenuRequested(const QPoint&)'),
lambda point: self.on_context_menu(nb, point))
else:
nb = StackedWidget()
nb.currentChanged.connect(lambda idx: self.switch_page(idx, nb))
nb.setStyleSheet (tabwidget_style)
if not name:
name = str(time.time())
self.notebooks[name] = Bunch.Bunch(nb=nb, name=name, nbtype=wstype)
return self.notebooks[name]
def get_nb(self, name):
return self.notebooks[name].nb
def on_context_menu(self, nb, point):
# create context menu
popmenu = QtGui.QMenu(nb)
submenu = QtGui.QMenu(popmenu)
submenu.setTitle("Take Tab")
popmenu.addMenu(submenu)
tabnames = self.tab.keys()
tabnames.sort()
for tabname in tabnames:
item = QtGui.QAction(tabname, nb)
item.triggered.connect(self._mk_take_tab_cb(tabname, nb))
submenu.addAction(item)
popmenu.exec_(nb.mapToGlobal(point))
self.popmenu = popmenu
def add_tab(self, tab_w, widget, group, labelname, tabname=None,
data=None):
"""NOTE: use add_page() instead."""
self.tabcount += 1
if not tabname:
tabname = labelname
if self.tab.has_key(tabname):
tabname = 'tab%d' % self.tabcount
tab_w.addTab(widget, labelname)
self.tab[tabname] = Bunch.Bunch(widget=widget, name=labelname,
tabname=tabname, data=data,
group=group)
return tabname
def add_page(self, nbname, widget, group, labelname, tabname=None):
tab_w = self.get_nb(nbname)
return self.add_tab(tab_w, widget, group, labelname, tabname=tabname)
def _find_nb(self, tabname):
widget = self.tab[tabname].widget
for bnch in self.notebooks.values():
nb = bnch.nb
page_num = nb.indexOf(widget)
if page_num < 0:
continue
return (nb, page_num)
return (None, None)
def _find_tab(self, widget):
for key, bnch in self.tab.items():
if widget == bnch.widget:
return bnch
return None
def select_cb(self, widget, event, name, data):
self.make_callback('page-select', name, data)
def raise_tab(self, tabname):
nb, index = self._find_nb(tabname)
widget = self.tab[tabname].widget
if (nb != None) and (index >= 0):
nb.setCurrentIndex(index)
def highlight_tab(self, tabname, onoff):
nb, index = self._find_nb(tabname)
if nb:
tb = nb.tabBar()
if tb == None:
return
widget = tb.tabButton(index, QtGui.QTabBar.RightSide)
if widget == None:
return
name = self.tab[tabname].name
if onoff:
widget.setStyleSheet('QPushButton {color: palegreen}')
else:
widget.setStyleSheet('QPushButton {color: grey}')
def remove_tab(self, tabname):
nb, index = self._find_nb(tabname)
widget = self.tab[tabname].widget
if (nb != None) and (index >= 0):
nb.removeTab(index)
def create_toplevel_ws(self, width, height, x=None, y=None):
# create main frame
root = TopLevel()
## root.setTitle(title)
# TODO: this needs to be more sophisticated
layout = QtGui.QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
root.setLayout(layout)
menubar = QtGui.QMenuBar()
layout.addWidget(menubar, stretch=0)
# create a Window pulldown menu, and add it to the menu bar
winmenu = menubar.addMenu("Window")
## item = QtGui.QAction("Take Tab", menubar)
## item.triggered.connect(self.gui_load_file)
## winmenu.addAction(item)
sep = QtGui.QAction(menubar)
sep.setSeparator(True)
winmenu.addAction(sep)
quititem = QtGui.QAction("Quit", menubar)
winmenu.addAction(quititem)
bnch = self.make_ws(group=1)
bnch.root = root
layout.addWidget(bnch.nb, stretch=1)
root.closeEvent = lambda event: self.close_page(bnch, event)
quititem.triggered.connect(lambda: self._close_page(bnch))
root.show()
root.resize(width, height)
if x != None:
root.moveTo(x, y)
return True
def detach_page(self, source, widget, x, y, group):
# Detach page to new top-level workspace
## page = self.widgetToPage(widget)
## if not page:
## return None
width, height = widget.size()
## self.logger.info("detaching page %s" % (page.name))
bnch = self.create_toplevel_ws(width, height, x=x, y=y)
return bnch.nb
def _mk_take_tab_cb(self, tabname, to_nb):
def _foo():
nb, index = self._find_nb(tabname)
widget = self.tab[tabname].widget
if (nb != None) and (index >= 0):
nb.removeTab(index)
to_nb.addTab(widget, tabname)
return _foo
def _close_page(self, bnch):
num_children = bnch.nb.count()
if num_children == 0:
del self.notebooks[bnch.name]
root = bnch.root
bnch.root = None
root.destroy()
return True
def close_page(self, bnch, event):
num_children = bnch.nb.count()
if num_children == 0:
del self.notebooks[bnch.name]
#bnch.root.destroy()
event.accept()
else:
event.ignore()
return True
def switch_page(self, page_num, nbw):
pagew = nbw.currentWidget()
bnch = self._find_tab(pagew)
if bnch != None:
self.make_callback('page-switch', bnch.name, bnch.data)
return False
def make_desktop(self, layout, widgetDict=None):
if widgetDict == None:
widgetDict = {}
def process_common_params(widget, inparams):
params = Bunch.Bunch(name=None, height=-1, width=-1)
params.update(inparams)
if params.name:
widgetDict[params.name] = widget
if ((params.width >= 0) or (params.height >= 0)) and \
isinstance(widget, QtGui.QWidget):
if params.width < 0:
width = widget.width()
else:
width = params.width
if params.height < 0:
height = widget.height()
else:
height = params.height
widget.resize(width, height)
def make_widget(kind, paramdict, args, pack):
#print "ARGS ARE ", args
kind = kind.lower()
# Process workspace parameters
params = Bunch.Bunch(name=None, title=None, height=-1,
width=-1, group=1, show_tabs=True,
show_border=False, scrollable=True,
detachable=True, wstype='nb',
tabpos=QtGui.QTabWidget.North)
params.update(paramdict)
#print "PARAMS ARE", params
if kind == 'widget':
widget = args[0]
elif kind == 'ws':
group = int(params.group)
widget = self.make_ws(name=params.name, group=group,
show_tabs=params.show_tabs,
show_border=params.show_border,
detachable=params.detachable,
tabpos=params.tabpos,
wstype=params.wstype,
scrollable=params.scrollable).nb
#debug(widget)
# If a title was passed as a parameter, then make a frame to
# wrap the widget using the title.
if params.title:
fr = Frame(params.title)
fr.layout().addWidget(widget, stretch=1)
pack(fr)
else:
pack(widget)
process_common_params(widget, params)
if (kind in ('ws', 'mdi')) and (len(args) > 0):
# <-- Notebook ws specified a sub-layout. We expect a list
# of tabname, layout pairs--iterate over these and add them
# to the workspace as tabs.
print "ws args=", args
for tabname, layout in args[0]:
def pack(w):
# ?why should group be the same as parent group?
self.add_tab(widget, w, group,
tabname, tabname.lower())
make(layout, pack)
return widget
# Horizontal adjustable panel
def horz(params, cols, pack):
if len(cols) >= 2:
hpaned = QtGui.QSplitter()
hpaned.setOrientation(QtCore.Qt.Horizontal)
for col in cols:
make(col, lambda w: hpaned.addWidget(w))
widget = hpaned
elif len(cols) == 1:
widget = QtGui.QWidget()
layout = QtGui.QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
make(cols[0], lambda w: layout.addWidget(w, stretch=1))
widget.setLayout(layout)
#widget.show()
process_common_params(widget, params)
pack(widget)
# Vertical adjustable panel
def vert(params, rows, pack):
if len(rows) >= 2:
vpaned = QtGui.QSplitter()
vpaned.setOrientation(QtCore.Qt.Vertical)
for row in rows:
make(row, lambda w: vpaned.addWidget(w))
widget = vpaned
elif len(rows) == 1:
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
make(rows[0], lambda w: layout.addWidget(w, stretch=1))
widget.setLayout(layout)
#widget.show()
process_common_params(widget, params)
pack(widget)
# Horizontal fixed array
def hbox(params, cols, pack):
widget = QtGui.QWidget()
layout = QtGui.QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
for dct in cols:
if isinstance(dct, dict):
stretch = dct.get('stretch', 0)
col = dct.get('col', None)
else:
# assume a list defining the col
stretch = align = 0
col = dct
if col != None:
make(col, lambda w: layout.addWidget(w,
stretch=stretch))
process_common_params(widget, params)
#widget.show()
pack(widget)
# Vertical fixed array
def vbox(params, rows, pack):
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
widget.setLayout(layout)
for dct in rows:
if isinstance(dct, dict):
stretch = dct.get('stretch', 0)
row = dct.get('row', None)
else:
# assume a list defining the row
stretch = align = 0
row = dct
if row != None:
make(row, lambda w: layout.addWidget(w,
stretch=stretch))
process_common_params(widget, params)
#widget.show()
pack(widget)
def make(constituents, pack):
kind = constituents[0]
params = constituents[1]
if len(constituents) > 2:
rest = constituents[2:]
else:
rest = []
if kind == 'vpanel':
vert(params, rest, pack)
elif kind == 'hpanel':
horz(params, rest, pack)
elif kind == 'vbox':
vbox(params, rest, pack)
elif kind == 'hbox':
hbox(params, rest, pack)
elif kind in ('ws', 'mdi', 'widget'):
make_widget(kind, params, rest, pack)
widget33 = QtGui.QWidget()
layout33 = QtGui.QVBoxLayout()
layout33.setContentsMargins(0, 0, 0, 0)
widget33.setLayout(layout33)
make(layout, lambda w: layout33.addWidget(w, stretch=1))
#widget33.show()
return widget33
def _name_mangle(name, pfx=''):
newname = []
for c in name.lower():
if not (c.isalpha() or c.isdigit() or (c == '_')):
newname.append('_')
else:
newname.append(c)
return pfx + ''.join(newname)
def _make_widget(tup, ns):
swap = False
title = tup[0]
if not title.startswith('@'):
name = _name_mangle(title)
w1 = QtGui.QLabel(title + ':')
w1.setAlignment(QtCore.Qt.AlignRight)
else:
# invisible title
swap = True
name = _name_mangle(title[1:])
w1 = QtGui.QLabel('')
wtype = tup[1]
if wtype == 'label':
w2 = QtGui.QLabel('')
w2.setAlignment(QtCore.Qt.AlignLeft)
elif wtype == 'xlabel':
w2 = QtGui.QLabel('')
w2.setAlignment(QtCore.Qt.AlignLeft)
name = 'lbl_' + name
elif wtype == 'entry':
w2 = QtGui.QLineEdit()
w2.setMaxLength(12)
elif wtype == 'combobox':
w2 = ComboBox()
elif wtype == 'spinbutton':
w2 = QtGui.QSpinBox()
elif wtype == 'spinfloat':
w2 = QtGui.QDoubleSpinBox()
elif wtype == 'vbox':
w2 = VBox()
elif wtype == 'hbox':
w2 = HBox()
elif wtype == 'hscale':
w2 = QtGui.QSlider(QtCore.Qt.Horizontal)
elif wtype == 'vscale':
w2 = QtGui.QSlider(QtCore.Qt.Vertical)
elif wtype == 'checkbutton':
w1 = QtGui.QLabel('')
w2 = QtGui.QCheckBox(title)
swap = True
elif wtype == 'radiobutton':
w1 = QtGui.QLabel('')
w2 = QtGui.QRadioButton(title)
swap = True
elif wtype == 'togglebutton':
w1 = QtGui.QLabel('')
w2 = QtGui.QPushButton(title)
w2.setCheckable(True)
swap = True
elif wtype == 'button':
w1 = QtGui.QLabel('')
w2 = QtGui.QPushButton(title)
swap = True
elif wtype == 'spacer':
w1 = QtGui.QLabel('')
w2 = QtGui.QLabel('')
else:
raise Exception("Bad wtype=%s" % wtype)
if swap:
w1, w2 = w2, w1
ns[name] = w1
else:
ns[name] = w2
return (w1, w2)
def build_info(captions):
numrows = len(captions)
numcols = reduce(lambda acc, tup: max(acc, len(tup)), captions, 0)
widget = QtGui.QWidget()
table = QtGui.QGridLayout()
widget.setLayout(table)
table.setVerticalSpacing(2)
table.setHorizontalSpacing(4)
table.setContentsMargins(2, 2, 2, 2)
wb = Bunch.Bunch()
row = 0
for tup in captions:
col = 0
while col < numcols:
if col < len(tup):
tup1 = tup[col:col+2]
w1, w2 = _make_widget(tup1, wb)
table.addWidget(w1, row, col)
table.addWidget(w2, row, col+1)
col += 2
row += 1
return widget, wb
def debug(widget):
foo = dir(widget)
print "---- %s ----" % str(widget)
for x in foo:
if x.startswith('set'):
print x
def children(layout):
i = 0
res = []
child = layout.itemAt(i)
while child != None:
res.append(child)
i += 1
child = layout.itemAt(i)
return res
def removeWidget(layout, widget):
print "removing %s from list" % widget
kids = children(layout)
kids2 = map(lambda item: item.widget(), kids)
print "children are", kids2
if widget in kids2:
idx = kids2.index(widget)
w = kids[idx]
#layout.removeWidget(widget)
print "removing item"
layout.removeItem(w)
widget.setParent(None)
#print "deleting widget"
#widget.delete()
else:
print "widget is not present"
#END
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_check_existence_request(
resource_group_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="HEAD",
url=url,
params=query_parameters,
**kwargs
)
def build_create_or_update_request(
resource_group_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_delete_request_initial(
resource_group_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
def build_get_request(
resource_group_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_update_request(
resource_group_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_export_template_request(
resource_group_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/exportTemplate')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_list_request(
subscription_id: str,
*,
filter: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class ResourceGroupsOperations(object):
"""ResourceGroupsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.resources.v2018_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def check_existence(
self,
resource_group_name: str,
**kwargs: Any
) -> bool:
"""Checks whether a resource group exists.
:param resource_group_name: The name of the resource group to check. The name is case
insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_check_existence_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self.check_existence.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'} # type: ignore
@distributed_trace
def create_or_update(
self,
resource_group_name: str,
parameters: "_models.ResourceGroup",
**kwargs: Any
) -> "_models.ResourceGroup":
"""Creates or updates a resource group.
:param resource_group_name: The name of the resource group to create or update. Can include
alphanumeric, underscore, parentheses, hyphen, period (except at end), and Unicode characters
that match the allowed characters.
:type resource_group_name: str
:param parameters: Parameters supplied to the create or update a resource group.
:type parameters: ~azure.mgmt.resource.resources.v2018_05_01.models.ResourceGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2018_05_01.models.ResourceGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ResourceGroup')
request = build_create_or_update_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ResourceGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ResourceGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Deletes a resource group.
When you delete a resource group, all of its resources are also deleted. Deleting a resource
group deletes all of its template deployments and currently stored operations.
:param resource_group_name: The name of the resource group to delete. The name is case
insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
**kwargs: Any
) -> "_models.ResourceGroup":
"""Gets a resource group.
:param resource_group_name: The name of the resource group to get. The name is case
insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2018_05_01.models.ResourceGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ResourceGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'} # type: ignore
@distributed_trace
def update(
self,
resource_group_name: str,
parameters: "_models.ResourceGroupPatchable",
**kwargs: Any
) -> "_models.ResourceGroup":
"""Updates a resource group.
Resource groups can be updated through a simple PATCH operation to a group address. The format
of the request is the same as that for creating a resource group. If a field is unspecified,
the current value is retained.
:param resource_group_name: The name of the resource group to update. The name is case
insensitive.
:type resource_group_name: str
:param parameters: Parameters supplied to update a resource group.
:type parameters: ~azure.mgmt.resource.resources.v2018_05_01.models.ResourceGroupPatchable
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroup, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2018_05_01.models.ResourceGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ResourceGroupPatchable')
request = build_update_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ResourceGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}'} # type: ignore
@distributed_trace
def export_template(
self,
resource_group_name: str,
parameters: "_models.ExportTemplateRequest",
**kwargs: Any
) -> "_models.ResourceGroupExportResult":
"""Captures the specified resource group as a template.
:param resource_group_name: The name of the resource group to export as a template.
:type resource_group_name: str
:param parameters: Parameters for exporting the template.
:type parameters: ~azure.mgmt.resource.resources.v2018_05_01.models.ExportTemplateRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGroupExportResult, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2018_05_01.models.ResourceGroupExportResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceGroupExportResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ExportTemplateRequest')
request = build_export_template_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.export_template.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ResourceGroupExportResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export_template.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/exportTemplate'} # type: ignore
@distributed_trace
def list(
self,
filter: Optional[str] = None,
top: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.ResourceGroupListResult"]:
"""Gets all the resource groups for a subscription.
:param filter: The filter to apply on the operation.:code:`<br>`:code:`<br>`You can filter by
tag names and values. For example, to filter for a tag name and value, use $filter=tagName eq
'tag1' and tagValue eq 'Value1'.
:type filter: str
:param top: The number of results to return. If null is passed, returns all resource groups.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceGroupListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2018_05_01.models.ResourceGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
filter=filter,
top=top,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceGroupListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups'} # type: ignore
| |
import bs
import bsUtils
import random
def bsGetAPIVersion():
return 4
def bsGetGames():
return [GravityFalls]
class FlyMessage(object):
pass
class Icon(bs.Actor):
def __init__(self,player,position,scale,showLives=True,showDeath=True,
nameScale=1.0,nameMaxWidth=115.0,flatness=1.0,shadow=1.0):
bs.Actor.__init__(self)
self._player = player
self._showLives = showLives
self._showDeath = showDeath
self._nameScale = nameScale
self._outlineTex = bs.getTexture('characterIconMask')
icon = player.getIcon()
self.node = bs.newNode('image',
owner=self,
attrs={'texture':icon['texture'],
'tintTexture':icon['tintTexture'],
'tintColor':icon['tintColor'],
'vrDepth':400,
'tint2Color':icon['tint2Color'],
'maskTexture':self._outlineTex,
'opacity':1.0,
'absoluteScale':True,
'attach':'bottomCenter'})
self._nameText = bs.newNode('text',
owner=self.node,
attrs={'text':bs.Lstr(value=player.getName()),
'color':bs.getSafeColor(player.getTeam().color),
'hAlign':'center',
'vAlign':'center',
'vrDepth':410,
'maxWidth':nameMaxWidth,
'shadow':shadow,
'flatness':flatness,
'hAttach':'center',
'vAttach':'bottom'})
if self._showLives:
self._livesText = bs.newNode('text',
owner=self.node,
attrs={'text':'x0',
'color':(1,1,0.5),
'hAlign':'left',
'vrDepth':430,
'shadow':1.0,
'flatness':1.0,
'hAttach':'center',
'vAttach':'bottom'})
self.setPositionAndScale(position,scale)
def setPositionAndScale(self,position,scale):
self.node.position = position
self.node.scale = [70.0*scale]
self._nameText.position = (position[0],position[1]+scale*52.0)
self._nameText.scale = 1.0*scale*self._nameScale
if self._showLives:
self._livesText.position = (position[0]+scale*10.0,position[1]-scale*43.0)
self._livesText.scale = 1.0*scale
def updateForLives(self):
if self._player.exists():
lives = self._player.gameData['lives']
else: lives = 0
if self._showLives:
if lives > 0: self._livesText.text = 'x'+str(lives-1)
else: self._livesText.text = ''
if lives == 0:
self._nameText.opacity = 0.2
self.node.color = (0.7,0.3,0.3)
self.node.opacity = 0.2
def handlePlayerSpawned(self):
if not self.node.exists(): return
self.node.opacity = 1.0
self.updateForLives()
def handlePlayerDied(self):
if not self.node.exists(): return
if self._showDeath:
bs.animate(self.node,'opacity',{0:1.0,50:0.0,100:1.0,150:0.0,200:1.0,250:0.0,
300:1.0,350:0.0,400:1.0,450:0.0,500:1.0,550:0.2})
lives = self._player.gameData['lives']
if lives == 0: bs.gameTimer(600,self.updateForLives)
class AntiGravityPlayerSpaz(bs.PlayerSpaz):
def handleMessage(self,m):
if isinstance(m, FlyMessage):
try:
self.node.handleMessage("impulse",self.node.position[0],self.node.position[1]+.5,self.node.position[2],0,5,0,
3,10,0,0,
0,5,0)
except Exception: pass
else: bs.PlayerSpaz.handleMessage(self,m)
class GravityFalls(bs.TeamGameActivity):
@classmethod
def getName(cls):
return 'Gravity Falls'
@classmethod
def getScoreInfo(cls):
return {'scoreName':'Survived',
'scoreType':'seconds',
'noneIsWinner':True}
@classmethod
def getDescription(cls,sessionType):
return 'Last remaining alive wins.'
@classmethod
def supportsSessionType(cls,sessionType):
return True if (issubclass(sessionType,bs.TeamsSession)
or issubclass(sessionType,bs.FreeForAllSession)) else False
@classmethod
def getSupportedMaps(cls,sessionType):
return bs.getMapsSupportingPlayType("melee")
@classmethod
def getSettings(cls,sessionType):
settings = [("Lives Per Player",{'default':1,'minValue':1,'maxValue':10,'increment':1}),
("Time Limit",{'choices':[('None',0),('1 Minute',60),
('2 Minutes',120),('5 Minutes',300),
('10 Minutes',600),('20 Minutes',1200)],'default':0}),
("Respawn Times",{'choices':[('Shorter',0.25),('Short',0.5),('Normal',1.0),('Long',2.0),('Longer',4.0)],'default':1.0}),
("Epic Mode",{'default':False})]
if issubclass(sessionType,bs.TeamsSession):
settings.append(("Solo Mode",{'default':False}))
settings.append(("Balance Total Lives",{'default':False}))
return settings
def __init__(self,settings):
bs.TeamGameActivity.__init__(self,settings)
if self.settings['Epic Mode']: self._isSlowMotion = True
self.info = bs.NodeActor(bs.newNode('text',
attrs={'vAttach': 'bottom',
'hAlign': 'center',
'vrDepth': 0,
'color': (0,.2,0),
'shadow': 1.0,
'flatness': 1.0,
'position': (0,0),
'scale': 0.8,
'text': "Created by MattZ45986 on Github",
}))
self.announcePlayerDeaths = True
try: self._soloMode = settings['Solo Mode']
except Exception: self._soloMode = False
self._scoreBoard = bs.ScoreBoard()
def getInstanceDescription(self):
return 'Last team standing wins.' if isinstance(self.getSession(),bs.TeamsSession) else 'Last one standing wins.'
def getInstanceScoreBoardDescription(self):
return 'last team standing wins' if isinstance(self.getSession(),bs.TeamsSession) else 'last one standing wins'
def onTransitionIn(self):
bs.TeamGameActivity.onTransitionIn(self, music='Epic' if self.settings['Epic Mode'] else 'Survival')
self._startGameTime = bs.getGameTime()
def onTeamJoin(self,team):
team.gameData['survivalSeconds'] = None
team.gameData['spawnOrder'] = []
def _updateSoloMode(self):
# for both teams, find the first player on the spawn order list with lives remaining
# and spawn them if they're not alive
for team in self.teams:
# prune dead players from the spawn order
team.gameData['spawnOrder'] = [p for p in team.gameData['spawnOrder'] if p.exists()]
for player in team.gameData['spawnOrder']:
if player.gameData['lives'] > 0:
if not player.isAlive(): self.spawnPlayer(player)
break
def _updateIcons(self):
# in free-for-all mode, everyone is just lined up along the bottom
if isinstance(self.getSession(),bs.FreeForAllSession):
count = len(self.teams)
xOffs = 85
x = xOffs*(count-1) * -0.5
for i,team in enumerate(self.teams):
if len(team.players) == 1:
player = team.players[0]
for icon in player.gameData['icons']:
icon.setPositionAndScale((x,30),0.7)
icon.updateForLives()
x += xOffs
# in teams mode we split up teams
else:
if self._soloMode:
# first off, clear out all icons
for player in self.players:
player.gameData['icons'] = []
# now for each team, cycle through our available players adding icons
for team in self.teams:
if team.getID() == 0:
x = -60
xOffs = -78
else:
x = 60
xOffs = 78
isFirst = True
testLives = 1
while True:
playersWithLives = [p for p in team.gameData['spawnOrder'] if p.exists() and p.gameData['lives'] >= testLives]
if len(playersWithLives) == 0: break
for player in playersWithLives:
player.gameData['icons'].append(Icon(player,
position=(x,(40 if isFirst else 25)),
scale=1.0 if isFirst else 0.5,
nameMaxWidth=130 if isFirst else 75,
nameScale=0.8 if isFirst else 1.0,
flatness=0.0 if isFirst else 1.0,
shadow=0.5 if isFirst else 1.0,
showDeath=True if isFirst else False,
showLives=False))
x += xOffs * (0.8 if isFirst else 0.56)
isFirst = False
testLives += 1
# non-solo mode
else:
for team in self.teams:
if team.getID() == 0:
x = -50
xOffs = -85
else:
x = 50
xOffs = 85
for player in team.players:
for icon in player.gameData['icons']:
icon.setPositionAndScale((x,30),0.7)
icon.updateForLives()
x += xOffs
def _getSpawnPoint(self,player):
# in solo-mode, if there's an existing live player on the map, spawn at whichever
# spot is farthest from them (keeps the action spread out)
if self._soloMode:
livingPlayer = None
for team in self.teams:
for player in team.players:
if player.isAlive():
p = player.actor.node.position
livingPlayer = player
livingPlayerPos = p
break
if livingPlayer:
playerPos = bs.Vector(*livingPlayerPos)
points = []
for team in self.teams:
startPos = bs.Vector(*self.getMap().getStartPosition(team.getID()))
points.append([(startPos-playerPos).length(),startPos])
points.sort()
return points[-1][1]
else:
return None
else:
return None
def spawnPlayer(self,player):
self.spawnPlayerSpaz(player,(0,5,0))
if not self._soloMode:
bs.gameTimer(300,bs.Call(self._printLives,player))
# if we have any icons, update their state
for icon in player.gameData['icons']:
icon.handlePlayerSpawned()
def spawnPlayerSpaz(self,player,position=(0,0,0),angle=None):
name = player.getName()
color = player.color
highlight = player.highlight
lightColor = bsUtils.getNormalizedColor(color)
displayColor = bs.getSafeColor(color,targetIntensity=0.75)
spaz = AntiGravityPlayerSpaz(color=color,
highlight=highlight,
character=player.character,
player=player)
player.setActor(spaz)
if isinstance(self.getSession(),bs.CoopSession) and self.getMap().getName() in ['Courtyard','Tower D']:
mat = self.getMap().preloadData['collideWithWallMaterial']
spaz.node.materials += (mat,)
spaz.node.rollerMaterials += (mat,)
spaz.node.name = name
spaz.node.nameColor = displayColor
spaz.connectControlsToPlayer()
spaz.handleMessage(bs.StandMessage(position,angle if angle is not None else random.uniform(0,360)))
t = bs.getGameTime()
bs.playSound(self._spawnSound,1,position=spaz.node.position)
light = bs.newNode('light',attrs={'color':lightColor})
spaz.node.connectAttr('position',light,'position')
bsUtils.animate(light,'intensity',{0:0,250:1,500:0})
bs.gameTimer(500,light.delete)
bs.gameTimer(1000,bs.Call(self.raisePlayer, player))
return spaz
def _printLives(self,player):
if not player.exists() or not player.isAlive(): return
try: pos = player.actor.node.position
except Exception,e:
print 'EXC getting player pos in bsElim',e
return
bs.PopupText('x'+str(player.gameData['lives']-1),color=(1,1,0,1),
offset=(0,-0.8,0),randomOffset=0.0,scale=1.8,position=pos).autoRetain()
def onPlayerJoin(self, player):
# no longer allowing mid-game joiners here... too easy to exploit
if self.hasBegun():
player.gameData['lives'] = 0
player.gameData['icons'] = []
# make sure our team has survival seconds set if they're all dead
# (otherwise blocked new ffa players would be considered 'still alive' in score tallying)
if self._getTotalTeamLives(player.getTeam()) == 0 and player.getTeam().gameData['survivalSeconds'] is None:
player.getTeam().gameData['survivalSeconds'] = 0
bs.screenMessage(bs.Lstr(resource='playerDelayedJoinText',subs=[('${PLAYER}',player.getName(full=True))]),color=(0,1,0))
return
player.gameData['lives'] = self.settings['Lives Per Player']
if self._soloMode:
player.gameData['icons'] = []
player.getTeam().gameData['spawnOrder'].append(player)
self._updateSoloMode()
else:
# create our icon and spawn
player.gameData['icons'] = [Icon(player,position=(0,50),scale=0.8)]
if player.gameData['lives'] > 0:
self.spawnPlayer(player)
# dont waste time doing this until begin
if self.hasBegun():
self._updateIcons()
def onPlayerLeave(self,player):
bs.TeamGameActivity.onPlayerLeave(self,player)
player.gameData['icons'] = None
# remove us from spawn-order
if self._soloMode:
if player in player.getTeam().gameData['spawnOrder']:
player.getTeam().gameData['spawnOrder'].remove(player)
# update icons in a moment since our team will be gone from the list then
bs.gameTimer(0, self._updateIcons)
def raisePlayer(self, player):
player.actor.handleMessage(FlyMessage())
if player.isAlive():
bs.gameTimer(50,bs.Call(self.raisePlayer,player))
"""spaz.node.handleMessage("impulse",spaz.node.position[0],spaz.node.position[1],spaz.node.position[2],
0,8,0,
2,6,0,0,0,8,0)"""
def onBegin(self):
bs.TeamGameActivity.onBegin(self)
self.setupStandardTimeLimit(self.settings['Time Limit'])
self.setupStandardPowerupDrops()
if self._soloMode:
self._vsText = bs.NodeActor(bs.newNode("text",
attrs={'position':(0,105),
'hAttach':"center",
'hAlign':'center',
'maxWidth':200,
'shadow':0.5,
'vrDepth':390,
'scale':0.6,
'vAttach':"bottom",
'color':(0.8,0.8,0.3,1.0),
'text':bs.Lstr(resource='vsText')}))
# if balance-team-lives is on, add lives to the smaller team until total lives match
if (isinstance(self.getSession(),bs.TeamsSession)
and self.settings['Balance Total Lives']
and len(self.teams[0].players) > 0
and len(self.teams[1].players) > 0):
if self._getTotalTeamLives(self.teams[0]) < self._getTotalTeamLives(self.teams[1]):
lesserTeam = self.teams[0]
greaterTeam = self.teams[1]
else:
lesserTeam = self.teams[1]
greaterTeam = self.teams[0]
addIndex = 0
while self._getTotalTeamLives(lesserTeam) < self._getTotalTeamLives(greaterTeam):
lesserTeam.players[addIndex].gameData['lives'] += 1
addIndex = (addIndex + 1) % len(lesserTeam.players)
self._updateIcons()
# we could check game-over conditions at explicit trigger points,
# but lets just do the simple thing and poll it...
bs.gameTimer(1000, self._update, repeat=True)
def _getTotalTeamLives(self,team):
return sum(player.gameData['lives'] for player in team.players)
def handleMessage(self,m):
if isinstance(m,bs.PlayerSpazDeathMessage):
bs.TeamGameActivity.handleMessage(self, m) # augment standard behavior
player = m.spaz.getPlayer()
player.gameData['lives'] -= 1
if player.gameData['lives'] < 0:
bs.printError('Got lives < 0 in Elim; this shouldnt happen. solo:'+str(self._soloMode))
player.gameData['lives'] = 0
# if we have any icons, update their state
for icon in player.gameData['icons']:
icon.handlePlayerDied()
# play big death sound on our last death or for every one in solo mode
if self._soloMode or player.gameData['lives'] == 0:
bs.playSound(bs.Spaz.getFactory().singlePlayerDeathSound)
# if we hit zero lives, we're dead (and our team might be too)
if player.gameData['lives'] == 0:
# if the whole team is now dead, mark their survival time..
#if all(teammate.gameData['lives'] == 0 for teammate in player.getTeam().players):
if self._getTotalTeamLives(player.getTeam()) == 0:
player.getTeam().gameData['survivalSeconds'] = (bs.getGameTime()-self._startGameTime)/1000
else:
# otherwise, in regular mode, respawn..
if not self._soloMode:
self.respawnPlayer(player)
# in solo, put ourself at the back of the spawn order
if self._soloMode:
player.getTeam().gameData['spawnOrder'].remove(player)
player.getTeam().gameData['spawnOrder'].append(player)
def _update(self):
if self._soloMode:
# for both teams, find the first player on the spawn order list with lives remaining
# and spawn them if they're not alive
for team in self.teams:
# prune dead players from the spawn order
team.gameData['spawnOrder'] = [p for p in team.gameData['spawnOrder'] if p.exists()]
for player in team.gameData['spawnOrder']:
if player.gameData['lives'] > 0:
if not player.isAlive():
self.spawnPlayer(player)
self._updateIcons()
break
# if we're down to 1 or fewer living teams, start a timer to end the game
# (allows the dust to settle and draws to occur if deaths are close enough)
if len(self._getLivingTeams()) < 2:
self._roundEndTimer = bs.Timer(500,self.endGame)
def _getLivingTeams(self):
return [team for team in self.teams if len(team.players) > 0 and any(player.gameData['lives'] > 0 for player in team.players)]
def endGame(self):
if self.hasEnded(): return
results = bs.TeamGameResults()
self._vsText = None # kill our 'vs' if its there
for team in self.teams:
results.setTeamScore(team, team.gameData['survivalSeconds'])
self.end(results=results)
| |
#!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
'''This file generates shell code for the setup.SHELL scripts to set environment variables'''
from __future__ import print_function
import argparse
import copy
import os
import platform
import sys
# environment at generation time
CMAKE_PREFIX_PATH = '/opt/ros/groovy'.split(';')
setup_dir = '/opt/ros/groovy/sandbox/arduo_joy/build/devel'
if setup_dir and setup_dir not in CMAKE_PREFIX_PATH:
CMAKE_PREFIX_PATH.insert(0, setup_dir)
CMAKE_PREFIX_PATH = os.pathsep.join(CMAKE_PREFIX_PATH)
CATKIN_MARKER_FILE = '.catkin'
system = platform.system()
IS_DARWIN = (system == 'Darwin')
IS_WINDOWS = (system == 'Windows')
# subfolder of workspace prepended to CMAKE_PREFIX_PATH
ENV_VAR_SUBFOLDERS = {
'CMAKE_PREFIX_PATH': '',
'CPATH': 'include',
'LD_LIBRARY_PATH' if not IS_DARWIN else 'DYLD_LIBRARY_PATH': 'lib',
'PATH': 'bin',
'PKG_CONFIG_PATH': 'lib/pkgconfig',
'PYTHONPATH': 'lib/python2.7/dist-packages',
}
def rollback_env_variables(environ, env_var_subfolders):
'''
Generate shell code to reset environment variables
by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH.
This does not cover modifications performed by environment hooks.
'''
lines = []
unmodified_environ = copy.copy(environ)
for key in sorted(env_var_subfolders.keys()):
subfolder = env_var_subfolders[key]
value = _rollback_env_variable(unmodified_environ, key, subfolder)
if value is not None:
environ[key] = value
lines.append(assignment(key, value))
if lines:
lines.insert(0, comment('reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH'))
return lines
def _rollback_env_variable(environ, name, subfolder):
'''
For each catkin workspace in CMAKE_PREFIX_PATH remove the first entry from env[NAME] matching workspace + subfolder.
:param subfolder: str '' or subfoldername that may start with '/'
:returns: the updated value of the environment variable.
'''
value = environ[name] if name in environ else ''
env_paths = [path for path in value.split(os.pathsep) if path]
value_modified = False
if subfolder:
if subfolder.startswith(os.path.sep) or (os.path.altsep and subfolder.startswith(os.path.altsep)):
subfolder = subfolder[1:]
if subfolder.endswith(os.path.sep) or (os.path.altsep and subfolder.endswith(os.path.altsep)):
subfolder = subfolder[:-1]
for ws_path in _get_workspaces(environ, include_fuerte=True):
path_to_find = os.path.join(ws_path, subfolder) if subfolder else ws_path
path_to_remove = None
for env_path in env_paths:
env_path_clean = env_path[:-1] if env_path and env_path[-1] in [os.path.sep, os.path.altsep] else env_path
if env_path_clean == path_to_find:
path_to_remove = env_path
break
if path_to_remove:
env_paths.remove(path_to_remove)
value_modified = True
new_value = os.pathsep.join(env_paths)
return new_value if value_modified else None
def _get_workspaces(environ, include_fuerte=False):
'''
Based on CMAKE_PREFIX_PATH return all catkin workspaces.
:param include_fuerte: The flag if paths starting with '/opt/ros/fuerte' should be considered workspaces, ``bool``
'''
# get all cmake prefix paths
env_name = 'CMAKE_PREFIX_PATH'
value = environ[env_name] if env_name in environ else ''
paths = [path for path in value.split(os.pathsep) if path]
# remove non-workspace paths
workspaces = [path for path in paths if os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE)) or (include_fuerte and path.startswith('/opt/ros/fuerte'))]
return workspaces
def prepend_env_variables(environ, env_var_subfolders, workspaces):
'''
Generate shell code to prepend environment variables
for the all workspaces.
'''
lines = []
lines.append(comment('prepend folders of workspaces to environment variables'))
paths = [path for path in workspaces.split(os.pathsep) if path]
prefix = _prefix_env_variable(environ, 'CMAKE_PREFIX_PATH', paths, '')
lines.append(prepend(environ, 'CMAKE_PREFIX_PATH', prefix))
for key in sorted([key for key in env_var_subfolders.keys() if key != 'CMAKE_PREFIX_PATH']):
subfolder = env_var_subfolders[key]
prefix = _prefix_env_variable(environ, key, paths, subfolder)
lines.append(prepend(environ, key, prefix))
return lines
def _prefix_env_variable(environ, name, paths, subfolder):
'''
Return the prefix to prepend to the environment variable NAME, adding any path in NEW_PATHS_STR without creating duplicate or empty items.
'''
value = environ[name] if name in environ else ''
environ_paths = [path for path in value.split(os.pathsep) if path]
checked_paths = []
for path in paths:
if subfolder:
path = os.path.join(path, subfolder)
# exclude any path already in env and any path we already added
if path not in environ_paths and path not in checked_paths:
checked_paths.append(path)
prefix_str = os.pathsep.join(checked_paths)
if prefix_str != '' and environ_paths:
prefix_str += os.pathsep
return prefix_str
def assignment(key, value):
if not IS_WINDOWS:
return 'export %s="%s"' % (key, value)
else:
return 'set %s=%s' % (key, value)
def comment(msg):
if not IS_WINDOWS:
return '# %s' % msg
else:
return 'REM %s' % msg
def prepend(environ, key, prefix):
if key not in environ or not environ[key]:
return assignment(key, prefix)
if not IS_WINDOWS:
return 'export %s="%s$%s"' % (key, prefix, key)
else:
return 'set %s=%s%%%s%%' % (key, prefix, key)
def find_env_hooks(environ, cmake_prefix_path):
'''
Generate shell code with found environment hooks
for the all workspaces.
'''
lines = []
lines.append(comment('found environment hooks in workspaces'))
generic_env_hooks = []
specific_env_hooks = []
generic_env_hooks_by_filename = {}
specific_env_hooks_by_filename = {}
generic_env_hook_ext = 'bat' if IS_WINDOWS else 'sh'
specific_env_hook_ext = environ['CATKIN_SHELL'] if not IS_WINDOWS and 'CATKIN_SHELL' in environ and environ['CATKIN_SHELL'] else None
# remove non-workspace paths
workspaces = [path for path in cmake_prefix_path.split(os.pathsep) if path and os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE))]
for workspace in reversed(workspaces):
env_hook_dir = os.path.join(workspace, 'etc', 'catkin', 'profile.d')
if os.path.isdir(env_hook_dir):
for filename in sorted(os.listdir(env_hook_dir)):
if filename.endswith('.%s' % generic_env_hook_ext):
generic_env_hooks.append(os.path.join(env_hook_dir, filename))
# remove previous env hook with same name if present
if filename in generic_env_hooks_by_filename:
generic_env_hooks.remove(generic_env_hooks_by_filename[filename])
generic_env_hooks_by_filename[filename] = generic_env_hooks[-1]
elif specific_env_hook_ext is not None and filename.endswith('.%s' % specific_env_hook_ext):
specific_env_hooks.append(os.path.join(env_hook_dir, filename))
# remove previous env hook with same name if present
if filename in specific_env_hooks_by_filename:
specific_env_hooks.remove(specific_env_hooks_by_filename[filename])
specific_env_hooks_by_filename[filename] = specific_env_hooks[-1]
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS', os.pathsep.join(generic_env_hooks + specific_env_hooks)))
return lines
def _parse_arguments(args=None):
parser = argparse.ArgumentParser(description='Generates code blocks for the setup.SHELL script.')
parser.add_argument('--extend', action='store_true', help='Skip unsetting previous environment variables to extend context')
return parser.parse_known_args(args=args)[0]
if __name__ == '__main__':
try:
args = _parse_arguments()
except Exception as e:
print(e, file=sys.stderr)
exit(1)
environ = dict(os.environ)
lines = []
if not args.extend:
lines += rollback_env_variables(environ, ENV_VAR_SUBFOLDERS)
lines += prepend_env_variables(environ, ENV_VAR_SUBFOLDERS, CMAKE_PREFIX_PATH)
lines += find_env_hooks(environ, CMAKE_PREFIX_PATH)
print('\n'.join(lines))
sys.exit(0)
| |
"""
Optimade support.
"""
import logging
import sys
from collections import namedtuple
from os.path import join
from typing import Dict, List, Optional, Union, Set
from urllib.parse import urlparse
import requests
from tqdm import tqdm
# from retrying import retry
from pymatgen.core.periodic_table import DummySpecies
from pymatgen.core.structure import Structure
from pymatgen.util.provenance import StructureNL
# TODO: importing optimade-python-tool's data structures will make more sense
Provider = namedtuple("Provider", ["name", "base_url", "description", "homepage", "prefix"])
_logger = logging.getLogger(__name__)
_handler = logging.StreamHandler(sys.stdout)
_logger.addHandler(_handler)
_logger.setLevel(logging.WARNING)
class OptimadeRester:
"""
Class to call OPTIMADE-compliant APIs, see https://optimade.org and [1].
This class is ready to use but considered in-development and subject to change.
[1] Andersen, C.W., *et al*.
OPTIMADE, an API for exchanging materials data.
Sci Data 8, 217 (2021). https://doi.org/10.1038/s41597-021-00974-z
"""
# regenerate on-demand from official providers.json using OptimadeRester.refresh_aliases()
# these aliases are provided as a convenient shortcut for users of the OptimadeRester class
aliases: Dict[str, str] = {
"aflow": "http://aflow.org/API/optimade/",
"cod": "https://www.crystallography.net/cod/optimade",
"mcloud.2dstructures": "https://aiida.materialscloud.org/2dstructures/optimade",
"mcloud.2dtopo": "https://aiida.materialscloud.org/2dtopo/optimade",
"mcloud.curated-cofs": "https://aiida.materialscloud.org/curated-cofs/optimade",
"mcloud.li-ion-conductors": "https://aiida.materialscloud.org/li-ion-conductors/optimade",
"mcloud.optimade-sample": "https://aiida.materialscloud.org/optimade-sample/optimade",
"mcloud.pyrene-mofs": "https://aiida.materialscloud.org/pyrene-mofs/optimade",
"mcloud.scdm": "https://aiida.materialscloud.org/autowannier/optimade",
"mcloud.sssp": "https://aiida.materialscloud.org/sssplibrary/optimade",
"mcloud.stoceriaitf": "https://aiida.materialscloud.org/stoceriaitf/optimade",
"mcloud.tc-applicability": "https://aiida.materialscloud.org/tc-applicability/optimade",
"mcloud.threedd": "https://aiida.materialscloud.org/3dd/optimade",
"mp": "https://optimade.materialsproject.org",
"mpds": "https://api.mpds.io",
"nmd": "https://nomad-lab.eu/prod/rae/optimade/",
"odbx": "https://optimade.odbx.science",
"omdb.omdb_production": "http://optimade.openmaterialsdb.se",
"oqmd": "http://oqmd.org/optimade/",
"tcod": "https://www.crystallography.net/tcod/optimade",
}
# The set of OPTIMADE fields that are required to define a `pymatgen.core.Structure`
mandatory_response_fields: Set[str] = {"lattice_vectors", "cartesian_site_positions", "species", "species_at_sites"}
def __init__(self, aliases_or_resource_urls: Optional[Union[str, List[str]]] = None, timeout: int = 5):
"""
OPTIMADE is an effort to provide a standardized interface to retrieve information
from many different materials science databases.
This is a client to retrieve structures from OPTIMADE v1 compliant endpoints. It
does not yet support all features of the OPTIMADE v1 specification but is intended
as a way to quickly search an endpoint in a way familiar to users of pymatgen without
needing to know the full OPTIMADE specification.
For advanced usage, please see the OPTIMADE documentation at optimade.org and
consider calling the APIs directly.
For convenience, known OPTIMADE endpoints have been given aliases in pymatgen to save
typing the full URL. The current list of aliases is:
aflow, cod, mcloud.sssp, mcloud.2dstructures, mcloud.2dtopo, mcloud.tc-applicability,
mcloud.threedd, mcloud.scdm, mcloud.curated-cofs, mcloud.optimade-sample, mcloud.stoceriaitf,
mcloud.pyrene-mofs, mcloud.li-ion-conductors, mp, odbx, omdb.omdb_production, oqmd, tcod
To refresh this list of aliases, generated from the current list of OPTIMADE providers
at optimade.org, call the refresh_aliases() method.
Args:
aliases_or_resource_urls: the alias or structure resource URL or a list of
aliases or resource URLs, if providing the resource URL directly it should not
be an index, this interface can only currently access the "v1/structures"
information from the specified resource URL
timeout: number of seconds before an attempted request is abandoned, a good
timeout is useful when querying many providers, some of which may be offline
"""
# TODO: maybe we should use the nice pydantic models from optimade-python-tools
# for response validation, and use the Lark parser for filter validation
self.session = requests.Session()
self._timeout = timeout # seconds
if isinstance(aliases_or_resource_urls, str):
aliases_or_resource_urls = [aliases_or_resource_urls]
# this stores a dictionary with keys provider id (in the same format as the aliases)
# and values as the corresponding URL
self.resources = {}
if not aliases_or_resource_urls:
aliases_or_resource_urls = list(self.aliases.keys())
_logger.warning(
"Connecting to all known OPTIMADE providers, this will be slow. Please connect to only the "
f"OPTIMADE providers you want to query. Choose from: {', '.join(self.aliases.keys())}"
)
for alias_or_resource_url in aliases_or_resource_urls:
if alias_or_resource_url in self.aliases:
self.resources[alias_or_resource_url] = self.aliases[alias_or_resource_url]
elif self._validate_provider(alias_or_resource_url):
# TODO: unclear what the key should be here, the "prefix" is for the root provider,
# may need to walk back to the index for the given provider to find the correct identifier
self.resources[alias_or_resource_url] = alias_or_resource_url
else:
_logger.error(f"The following is not a known alias or a valid url: {alias_or_resource_url}")
self._providers = {url: self._validate_provider(provider_url=url) for url in self.resources.values()}
def __repr__(self):
return f"OptimadeRester connected to: {', '.join(self.resources.values())}"
def __str__(self):
return self.describe()
def describe(self):
"""
Provides human-readable information about the resources being searched by the OptimadeRester.
"""
provider_text = "\n".join(map(str, (provider for provider in self._providers.values() if provider)))
description = f"OptimadeRester connected to:\n{provider_text}"
return description
# @retry(stop_max_attempt_number=3, wait_random_min=1000, wait_random_max=2000)
def _get_json(self, url):
"""
Retrieves JSON, will attempt to (politely) try again on failure subject to a
random delay and a maximum number of attempts.
"""
return self.session.get(url, timeout=self._timeout).json()
@staticmethod
def _build_filter(
elements: Union[str, List[str]] = None,
nelements: int = None,
nsites: int = None,
chemical_formula_anonymous: str = None,
chemical_formula_hill: str = None,
):
"""
Convenience method to build an OPTIMADE filter.
"""
filters = []
if elements:
if isinstance(elements, str):
elements = [elements]
elements_str = ", ".join([f'"{el}"' for el in elements])
filters.append(f"(elements HAS ALL {elements_str})")
if nsites:
if isinstance(nsites, (list, tuple)):
filters.append(f"(nsites>={min(nsites)} AND nsites<={max(nsites)})")
else:
filters.append(f"(nsites={int(nsites)})")
if nelements:
if isinstance(nelements, (list, tuple)):
filters.append(f"(nelements>={min(nelements)} AND nelements<={max(nelements)})")
else:
filters.append(f"(nelements={int(nelements)})")
if chemical_formula_anonymous:
filters.append(f'(chemical_formula_anonymous="{chemical_formula_anonymous}")')
if chemical_formula_hill:
filters.append(f'(chemical_formula_hill="{chemical_formula_anonymous}")')
return " AND ".join(filters)
def get_structures(
self,
elements: Union[List[str], str] = None,
nelements: int = None,
nsites: int = None,
chemical_formula_anonymous: str = None,
chemical_formula_hill: str = None,
) -> Dict[str, Dict[str, Structure]]:
"""
Retrieve Structures from OPTIMADE providers.
Not all functionality of OPTIMADE is currently exposed in this convenience method. To
use a custom filter, call get_structures_with_filter().
Args:
elements: List of elements
nelements: Number of elements, e.g. 4 or [2, 5] for the range >=2 and <=5
nsites: Number of sites, e.g. 4 or [2, 5] for the range >=2 and <=5
chemical_formula_anonymous: Anonymous chemical formula
chemical_formula_hill: Chemical formula following Hill convention
Returns: Dict of (Dict Structures keyed by that database's id system) keyed by provider
"""
optimade_filter = self._build_filter(
elements=elements,
nelements=nelements,
nsites=nsites,
chemical_formula_anonymous=chemical_formula_anonymous,
chemical_formula_hill=chemical_formula_hill,
)
return self.get_structures_with_filter(optimade_filter)
def get_snls(
self,
elements: Union[List[str], str] = None,
nelements: int = None,
nsites: int = None,
chemical_formula_anonymous: str = None,
chemical_formula_hill: str = None,
additional_response_fields: Union[str, List[str], Set[str]] = None,
) -> Dict[str, Dict[str, StructureNL]]:
"""
Retrieve StructureNL from OPTIMADE providers.
A StructureNL is an object provided by pymatgen which combines Structure with
associated metadata, such as the URL is was downloaded from and any additional namespaced
data.
Not all functionality of OPTIMADE is currently exposed in this convenience method. To
use a custom filter, call get_structures_with_filter().
Args:
elements: List of elements
nelements: Number of elements, e.g. 4 or [2, 5] for the range >=2 and <=5
nsites: Number of sites, e.g. 4 or [2, 5] for the range >=2 and <=5
chemical_formula_anonymous: Anonymous chemical formula
chemical_formula_hill: Chemical formula following Hill convention
additional_response_fields: Any additional fields desired from the OPTIMADE API,
these will be stored under the `'_optimade'` key in each `StructureNL.data` dictionary.
Returns: Dict of (Dict of StructureNLs keyed by that database's id system) keyed by provider
"""
optimade_filter = self._build_filter(
elements=elements,
nelements=nelements,
nsites=nsites,
chemical_formula_anonymous=chemical_formula_anonymous,
chemical_formula_hill=chemical_formula_hill,
)
return self.get_snls_with_filter(optimade_filter, additional_response_fields=additional_response_fields)
def get_structures_with_filter(self, optimade_filter: str) -> Dict[str, Dict[str, Structure]]:
"""
Get structures satisfying a given OPTIMADE filter.
Args:
filter: An OPTIMADE-compliant filter
Returns: Dict of Structures keyed by that database's id system
"""
all_snls = self.get_snls_with_filter(optimade_filter)
all_structures = {}
for identifier, snls_dict in all_snls.items():
all_structures[identifier] = {k: snl.structure for k, snl in snls_dict.items()}
return all_structures
def get_snls_with_filter(
self,
optimade_filter: str,
additional_response_fields: Union[str, List[str], Set[str]] = None,
) -> Dict[str, Dict[str, StructureNL]]:
"""
Get structures satisfying a given OPTIMADE filter.
Args:
filter: An OPTIMADE-compliant filter
Returns: Dict of Structures keyed by that database's id system
"""
all_snls = {}
fields = self._handle_response_fields(additional_response_fields)
for identifier, resource in self.resources.items():
url = join(resource, f"v1/structures?filter={optimade_filter}&response_fields={fields}")
try:
json = self._get_json(url)
structures = self._get_snls_from_resource(json, url, identifier)
pbar = tqdm(total=json["meta"].get("data_returned", 0), desc=identifier, initial=len(structures))
# TODO: check spec for `more_data_available` boolean, may simplify this conditional
if ("links" in json) and ("next" in json["links"]) and (json["links"]["next"]):
while "next" in json["links"] and json["links"]["next"]:
next_link = json["links"]["next"]
if isinstance(next_link, dict) and "href" in next_link:
next_link = next_link["href"]
json = self._get_json(next_link)
additional_structures = self._get_snls_from_resource(json, url, identifier)
structures.update(additional_structures)
pbar.update(len(additional_structures))
if structures:
all_snls[identifier] = structures
except Exception as exc:
# TODO: manually inspect failures to either (a) correct a bug or (b) raise more appropriate error
_logger.error(
f"Could not retrieve required information from provider {identifier} and url {url}: {exc}"
)
return all_snls
@staticmethod
def _get_snls_from_resource(json, url, identifier) -> Dict[str, StructureNL]:
snls = {}
exceptions = set()
def _sanitize_symbol(symbol):
if symbol == "vacancy":
symbol = DummySpecies("X_vacancy", oxidation_state=None)
elif symbol == "X":
symbol = DummySpecies("X", oxidation_state=None)
return symbol
def _get_comp(sp_dict):
return {
_sanitize_symbol(symbol): conc
for symbol, conc in zip(sp_dict["chemical_symbols"], sp_dict["concentration"])
}
for data in json["data"]:
# TODO: check the spec! and remove this try/except (are all providers following spec?)
# e.g. can check data["type"] == "structures"
try:
# e.g. COD
structure = Structure(
lattice=data["attributes"]["lattice_vectors"],
species=[_get_comp(d) for d in data["attributes"]["species"]],
coords=data["attributes"]["cartesian_site_positions"],
coords_are_cartesian=True,
)
# Grab any custom fields or non-mandatory fields if they were requested
namespaced_data = {
k: v
for k, v in data["attributes"].items()
if k.startswith("_") or k not in {"lattice_vectors", "species", "cartesian_site_positions"}
}
# TODO: follow `references` to add reference information here
snl = StructureNL(
structure,
authors={},
history=[{"name": identifier, "url": url, "description": {"id": data["id"]}}],
data={"_optimade": namespaced_data},
)
snls[data["id"]] = snl
# TODO: bare exception, remove...
except Exception:
try:
# e.g. MP (all ordered, no vacancies)
structure = Structure(
lattice=data["attributes"]["lattice_vectors"],
species=data["attributes"]["species_at_sites"],
coords=data["attributes"]["cartesian_site_positions"],
coords_are_cartesian=True,
)
# Grab any custom fields or non-mandatory fields if they were requested
namespaced_data = {
k: v
for k, v in data["attributes"].items()
if k.startswith("_") or k not in {"lattice_vectors", "species", "cartesian_site_positions"}
}
# TODO: follow `references` to add reference information here
snl = StructureNL(
structure,
authors={},
history=[{"name": identifier, "url": url, "description": {"id": data["id"]}}],
data={"_optimade": namespaced_data},
)
snls[data["id"]] = snl
except Exception as exc:
if str(exc) not in exceptions:
exceptions.add(str(exc))
if exceptions:
_logger.error(f'Failed to parse returned data for {url}: {", ".join(exceptions)}')
return snls
def _validate_provider(self, provider_url) -> Optional[Provider]:
"""
Checks that a given URL is indeed an OPTIMADE provider,
returning None if it is not a provider, or the provider
prefix if it is.
TODO: careful reading of OPTIMADE specification required
TODO: add better exception handling, intentionally permissive currently
"""
def is_url(url):
"""
Basic URL validation thanks to https://stackoverflow.com/a/52455972
"""
try:
result = urlparse(url)
return all([result.scheme, result.netloc])
except ValueError:
return False
if not is_url(provider_url):
_logger.warning(f"An invalid url was supplied: {provider_url}")
return None
try:
url = join(provider_url, "v1/info")
provider_info_json = self._get_json(url)
except Exception as exc:
_logger.warning(f"Failed to parse {url} when validating: {exc}")
return None
try:
return Provider(
name=provider_info_json["meta"].get("provider", {}).get("name", "Unknown"),
base_url=provider_url,
description=provider_info_json["meta"].get("provider", {}).get("description", "Unknown"),
homepage=provider_info_json["meta"].get("provider", {}).get("homepage"),
prefix=provider_info_json["meta"].get("provider", {}).get("prefix", "Unknown"),
)
except Exception as exc:
_logger.warning(f"Failed to extract required information from {url}: {exc}")
return None
def _parse_provider(self, provider, provider_url) -> Dict[str, Provider]:
"""
Used internally to update the list of providers or to
check a given URL is valid.
It does not raise exceptions but will instead _logger.warning and provide
an empty dictionary in the case of invalid data.
In future, when the specification is sufficiently well adopted,
we might be more strict here.
Args:
provider: the provider prefix
provider_url: An OPTIMADE provider URL
Returns:
A dictionary of keys (in format of "provider.database") to
Provider objects.
"""
try:
url = join(provider_url, "v1/links")
provider_link_json = self._get_json(url)
except Exception as exc:
_logger.error(f"Failed to parse {url} when following links: {exc}")
return {}
def _parse_provider_link(provider, provider_link_json):
"""No validation attempted."""
ps = {}
try:
d = [d for d in provider_link_json["data"] if d["attributes"]["link_type"] == "child"]
for link in d:
key = f"{provider}.{link['id']}" if provider != link["id"] else provider
if link["attributes"]["base_url"]:
ps[key] = Provider(
name=link["attributes"]["name"],
base_url=link["attributes"]["base_url"],
description=link["attributes"]["description"],
homepage=link["attributes"].get("homepage"),
prefix=link["attributes"].get("prefix"),
)
except Exception:
# print(f"Failed to parse {provider}: {exc}")
# Not all providers parse yet.
pass
return ps
return _parse_provider_link(provider, provider_link_json)
def _handle_response_fields(self, additional_response_fields: Union[str, List[str], Set[str]] = None) -> str:
"""
Used internally to handle the mandatory and additional response fields.
Args:
additional_response_fields: A set of additional fields to request.
Returns:
A string of comma-separated OPTIMADE response fields.
"""
if isinstance(additional_response_fields, str):
additional_response_fields = [additional_response_fields]
if not additional_response_fields:
additional_response_fields = set()
return ",".join(set(additional_response_fields).union(self.mandatory_response_fields))
def refresh_aliases(self, providers_url="https://providers.optimade.org/providers.json"):
"""
Updates available OPTIMADE structure resources based on the current list of OPTIMADE
providers.
"""
json = self._get_json(providers_url)
providers_from_url = {
entry["id"]: entry["attributes"]["base_url"] for entry in json["data"] if entry["attributes"]["base_url"]
}
structure_providers = {}
for provider, provider_link in providers_from_url.items():
structure_providers.update(self._parse_provider(provider, provider_link))
self.aliases = {alias: provider.base_url for alias, provider in structure_providers.items()}
# TODO: revisit context manager logic here and in MPRester
def __enter__(self):
"""
Support for "with" context.
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Support for "with" context.
"""
self.session.close()
| |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Simple REST server that takes commands in a JSON payload
Interface to the :py:class:`~luigi.scheduler.CentralPlannerScheduler` class.
See :doc:`/central_scheduler` for more info.
"""
#
# Description: Added codes for visualization of how long each task takes
# running-time until it reaches the next status (failed or done)
# At "{base_url}/tasklist", all completed(failed or done) tasks are shown.
# At "{base_url}/tasklist", a user can select one specific task to see
# how its running-time has changed over time.
# At "{base_url}/tasklist/{task_name}", it visualizes a multi-bar graph
# that represents the changes of the running-time for a selected task
# up to the next status (failed or done).
# This visualization let us know how the running-time of the specific task
# has changed over time.
#
# Copyright 2015 Naver Corp.
# Author Yeseul Park (yeseul.park@navercorp.com)
#
import atexit
import json
import logging
import mimetypes
import os
import posixpath
import signal
import sys
import datetime
import time
import pkg_resources
import tornado.httpclient
import tornado.httpserver
import tornado.ioloop
import tornado.netutil
import tornado.web
from luigi import configuration
from luigi.scheduler import CentralPlannerScheduler
logger = logging.getLogger("luigi.server")
class RPCHandler(tornado.web.RequestHandler):
"""
Handle remote scheduling calls using rpc.RemoteSchedulerResponder.
"""
def initialize(self, scheduler):
self._scheduler = scheduler
def get(self, method):
payload = self.get_argument('data', default="{}")
arguments = json.loads(payload)
# TODO: we should probably denote all methods on the scheduler that are "API-level"
# versus internal methods. Right now you can do a REST method call to any method
# defined on the scheduler, which is pretty bad from a security point of view.
if hasattr(self._scheduler, method):
result = getattr(self._scheduler, method)(**arguments)
self.write({"response": result}) # wrap all json response in a dictionary
else:
self.send_error(404)
post = get
class BaseTaskHistoryHandler(tornado.web.RequestHandler):
def initialize(self, scheduler):
self._scheduler = scheduler
def get_template_path(self):
return pkg_resources.resource_filename(__name__, 'templates')
class AllRunHandler(BaseTaskHistoryHandler):
def get(self):
all_tasks = self._scheduler.task_history.find_all_runs()
tasknames = []
for task in all_tasks:
tasknames.append(task.name)
# show all tasks with their name list to be selected
# why all tasks? the duration of the event history of a selected task
# can be more than 24 hours.
self.render("menu.html", tasknames=tasknames)
class SelectedRunHandler(BaseTaskHistoryHandler):
def get(self, name):
tasks = {}
statusResults = {}
taskResults = []
# get all tasks that has been updated
all_tasks = self._scheduler.task_history.find_all_runs()
# get events history for all tasks
all_tasks_event_history = self._scheduler.task_history.find_all_events()
for task in all_tasks:
task_seq = task.id
task_name = task.name
# build the dictionary, tasks with index: id, value: task_name
tasks[task_seq] = str(task_name)
for task in all_tasks_event_history:
# if the name of user-selected task is in tasks, get its task_id
if tasks.get(task.task_id) == str(name):
status = str(task.event_name)
if status not in statusResults:
statusResults[status] = []
# append the id, task_id, ts, y with 0, next_process with null
# for the status(running/failed/done) of the selected task
statusResults[status].append(({
'id': str(task.id), 'task_id': str(task.task_id),
'x': from_utc(str(task.ts)), 'y': 0, 'next_process': ''}))
# append the id, task_name, task_id, status, datetime, timestamp
# for the selected task
taskResults.append({
'id': str(task.id), 'taskName': str(name), 'task_id': str(task.task_id),
'status': str(task.event_name), 'datetime': str(task.ts),
'timestamp': from_utc(str(task.ts))})
statusResults = json.dumps(statusResults)
taskResults = json.dumps(taskResults)
statusResults = tornado.escape.xhtml_unescape(str(statusResults))
taskResults = tornado.escape.xhtml_unescape(str(taskResults))
self.render('history.html', name=name, statusResults=statusResults, taskResults=taskResults)
def from_utc(utcTime, fmt=None):
"""convert UTC time string to time.struct_time: change datetime.datetime to time, return time.struct_time type"""
if fmt is None:
try_formats = ["%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"]
else:
try_formats = [fmt]
for fmt in try_formats:
try:
time_struct = datetime.datetime.strptime(utcTime, fmt)
except ValueError:
pass
else:
date = int(time.mktime(time_struct.timetuple()))
return date
else:
raise ValueError("No UTC format matches {}".format(utcTime))
class RecentRunHandler(BaseTaskHistoryHandler):
def get(self):
tasks = self._scheduler.task_history.find_latest_runs()
self.render("recent.html", tasks=tasks)
class ByNameHandler(BaseTaskHistoryHandler):
def get(self, name):
tasks = self._scheduler.task_history.find_all_by_name(name)
self.render("recent.html", tasks=tasks)
class ByIdHandler(BaseTaskHistoryHandler):
def get(self, id):
task = self._scheduler.task_history.find_task_by_id(id)
self.render("show.html", task=task)
class ByParamsHandler(BaseTaskHistoryHandler):
def get(self, name):
payload = self.get_argument('data', default="{}")
arguments = json.loads(payload)
tasks = self._scheduler.task_history.find_all_by_parameters(name, session=None, **arguments)
self.render("recent.html", tasks=tasks)
class StaticFileHandler(tornado.web.RequestHandler):
def get(self, path):
# Path checking taken from Flask's safe_join function:
# https://github.com/mitsuhiko/flask/blob/1d55b8983/flask/helpers.py#L563-L587
path = posixpath.normpath(path)
if os.path.isabs(path) or path.startswith(".."):
return self.send_error(404)
extension = os.path.splitext(path)[1]
if extension in mimetypes.types_map:
self.set_header("Content-Type", mimetypes.types_map[extension])
data = pkg_resources.resource_string(__name__, os.path.join("static", path))
self.write(data)
class RootPathHandler(BaseTaskHistoryHandler):
def get(self):
visualization_graph = self._scheduler._config.visualization_graph
if visualization_graph == "d3":
self.redirect("/static/visualiser/index.d3.html")
elif visualization_graph == "svg":
self.redirect("/static/visualiser/index.html")
else:
self.redirect("/static/visualiser/index.html")
def app(scheduler):
settings = {"static_path": os.path.join(os.path.dirname(__file__), "static"), "unescape": tornado.escape.xhtml_unescape}
handlers = [
(r'/api/(.*)', RPCHandler, {"scheduler": scheduler}),
(r'/static/(.*)', StaticFileHandler),
(r'/', RootPathHandler, {'scheduler': scheduler}),
(r'/tasklist', AllRunHandler, {'scheduler': scheduler}),
(r'/tasklist/(.*?)', SelectedRunHandler, {'scheduler': scheduler}),
(r'/history', RecentRunHandler, {'scheduler': scheduler}),
(r'/history/by_name/(.*?)', ByNameHandler, {'scheduler': scheduler}),
(r'/history/by_id/(.*?)', ByIdHandler, {'scheduler': scheduler}),
(r'/history/by_params/(.*?)', ByParamsHandler, {'scheduler': scheduler})
]
api_app = tornado.web.Application(handlers, **settings)
return api_app
def _init_api(scheduler, responder=None, api_port=None, address=None, unix_socket=None):
if responder:
raise Exception('The "responder" argument is no longer supported')
api_app = app(scheduler)
if unix_socket is not None:
api_sockets = [tornado.netutil.bind_unix_socket(unix_socket)]
else:
api_sockets = tornado.netutil.bind_sockets(api_port, address=address)
server = tornado.httpserver.HTTPServer(api_app)
server.add_sockets(api_sockets)
# Return the bound socket names. Useful for connecting client in test scenarios.
return [s.getsockname() for s in api_sockets]
def run(api_port=8082, address=None, unix_socket=None, scheduler=None, responder=None):
"""
Runs one instance of the API server.
"""
if scheduler is None:
scheduler = CentralPlannerScheduler()
# load scheduler state
scheduler.load()
_init_api(
scheduler=scheduler,
responder=responder,
api_port=api_port,
address=address,
unix_socket=unix_socket,
)
# prune work DAG every 60 seconds
pruner = tornado.ioloop.PeriodicCallback(scheduler.prune, 60000)
pruner.start()
def shutdown_handler(signum, frame):
exit_handler()
sys.exit(0)
@atexit.register
def exit_handler():
logger.info("Scheduler instance shutting down")
scheduler.dump()
stop()
signal.signal(signal.SIGINT, shutdown_handler)
signal.signal(signal.SIGTERM, shutdown_handler)
if os.name == 'nt':
signal.signal(signal.SIGBREAK, shutdown_handler)
else:
signal.signal(signal.SIGQUIT, shutdown_handler)
logger.info("Scheduler starting up")
tornado.ioloop.IOLoop.instance().start()
def stop():
tornado.ioloop.IOLoop.instance().stop()
if __name__ == "__main__":
run()
| |
import inspect
import functools
import warnings
from distutils.version import LooseVersion
import uuid
import marshmallow as ma
from marshmallow import validate, fields
from sqlalchemy.dialects import postgresql, mysql, mssql
from sqlalchemy.orm import SynonymProperty
import sqlalchemy as sa
from .exceptions import ModelConversionError
from .fields import Related, RelatedList
_META_KWARGS_DEPRECATED = LooseVersion(ma.__version__) >= LooseVersion("3.10.0")
def _is_field(value):
return isinstance(value, type) and issubclass(value, fields.Field)
def _base_column(column):
"""Unwrap proxied columns"""
if column not in column.base_columns and len(column.base_columns) == 1:
[base] = column.base_columns
return base
return column
def _has_default(column):
return (
column.default is not None
or column.server_default is not None
or _is_auto_increment(column)
)
def _is_auto_increment(column):
return column.table is not None and column is column.table._autoincrement_column
def _postgres_array_factory(converter, data_type):
return functools.partial(
fields.List, converter._get_field_class_for_data_type(data_type.item_type)
)
def _set_meta_kwarg(field_kwargs, key, value):
if _META_KWARGS_DEPRECATED:
field_kwargs["metadata"][key] = value
else:
field_kwargs[key] = value
def _field_update_kwargs(field_class, field_kwargs, kwargs):
if not kwargs:
return field_kwargs
if isinstance(field_class, functools.partial):
# Unwrap partials, assuming that they bind a Field to arguments
field_class = field_class.func
possible_field_keywords = {
key
for cls in inspect.getmro(field_class)
for key, param in inspect.signature(cls).parameters.items()
if param.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
or param.kind is inspect.Parameter.KEYWORD_ONLY
}
for k, v in kwargs.items():
if k in possible_field_keywords:
field_kwargs[k] = v
else:
_set_meta_kwarg(field_kwargs, k, v)
return field_kwargs
class ModelConverter:
"""Class that converts a SQLAlchemy model into a dictionary of corresponding
marshmallow `Fields <marshmallow.fields.Field>`.
"""
SQLA_TYPE_MAPPING = {
sa.Enum: fields.Field,
sa.JSON: fields.Raw,
postgresql.BIT: fields.Integer,
postgresql.OID: fields.Integer,
postgresql.UUID: fields.UUID,
postgresql.MACADDR: fields.String,
postgresql.INET: fields.String,
postgresql.CIDR: fields.String,
postgresql.JSON: fields.Raw,
postgresql.JSONB: fields.Raw,
postgresql.HSTORE: fields.Raw,
postgresql.ARRAY: _postgres_array_factory,
postgresql.MONEY: fields.Decimal,
postgresql.DATE: fields.Date,
postgresql.TIME: fields.Time,
mysql.BIT: fields.Integer,
mysql.YEAR: fields.Integer,
mysql.SET: fields.List,
mysql.ENUM: fields.Field,
mysql.INTEGER: fields.Integer,
mysql.DATETIME: fields.DateTime,
mssql.BIT: fields.Integer,
mssql.UNIQUEIDENTIFIER: fields.UUID,
}
DIRECTION_MAPPING = {"MANYTOONE": False, "MANYTOMANY": True, "ONETOMANY": True}
def __init__(self, schema_cls=None):
self.schema_cls = schema_cls
@property
def type_mapping(self):
if self.schema_cls:
return self.schema_cls.TYPE_MAPPING
else:
return ma.Schema.TYPE_MAPPING
def fields_for_model(
self,
model,
*,
include_fk=False,
include_relationships=False,
fields=None,
exclude=None,
base_fields=None,
dict_cls=dict,
):
result = dict_cls()
base_fields = base_fields or {}
for prop in model.__mapper__.iterate_properties:
key = self._get_field_name(prop)
if self._should_exclude_field(prop, fields=fields, exclude=exclude):
# Allow marshmallow to validate and exclude the field key.
result[key] = None
continue
if isinstance(prop, SynonymProperty):
continue
if hasattr(prop, "columns"):
if not include_fk:
# Only skip a column if there is no overriden column
# which does not have a Foreign Key.
for column in prop.columns:
if not column.foreign_keys:
break
else:
continue
if not include_relationships and hasattr(prop, "direction"):
continue
field = base_fields.get(key) or self.property2field(prop)
if field:
result[key] = field
return result
def fields_for_table(
self,
table,
*,
include_fk=False,
fields=None,
exclude=None,
base_fields=None,
dict_cls=dict,
):
result = dict_cls()
base_fields = base_fields or {}
for column in table.columns:
key = self._get_field_name(column)
if self._should_exclude_field(column, fields=fields, exclude=exclude):
# Allow marshmallow to validate and exclude the field key.
result[key] = None
continue
if not include_fk and column.foreign_keys:
continue
# Overridden fields are specified relative to key generated by
# self._get_key_for_column(...), rather than keys in source model
field = base_fields.get(key) or self.column2field(column)
if field:
result[key] = field
return result
def property2field(self, prop, *, instance=True, field_class=None, **kwargs):
# handle synonyms
# Attribute renamed "_proxied_object" in 1.4
for attr in ("_proxied_property", "_proxied_object"):
proxied_obj = getattr(prop, attr, None)
if proxied_obj is not None:
prop = proxied_obj
field_class = field_class or self._get_field_class_for_property(prop)
if not instance:
return field_class
field_kwargs = self._get_field_kwargs_for_property(prop)
_field_update_kwargs(field_class, field_kwargs, kwargs)
ret = field_class(**field_kwargs)
if (
hasattr(prop, "direction")
and self.DIRECTION_MAPPING[prop.direction.name]
and prop.uselist is True
):
related_list_kwargs = _field_update_kwargs(
RelatedList, self.get_base_kwargs(), kwargs
)
ret = RelatedList(ret, **related_list_kwargs)
return ret
def column2field(self, column, *, instance=True, **kwargs):
field_class = self._get_field_class_for_column(column)
if not instance:
return field_class
field_kwargs = self.get_base_kwargs()
self._add_column_kwargs(field_kwargs, column)
_field_update_kwargs(field_class, field_kwargs, kwargs)
return field_class(**field_kwargs)
def field_for(self, model, property_name, **kwargs):
target_model = model
prop_name = property_name
attr = getattr(model, property_name)
remote_with_local_multiplicity = False
if hasattr(attr, "remote_attr"):
target_model = attr.target_class
prop_name = attr.value_attr
remote_with_local_multiplicity = attr.local_attr.prop.uselist
prop = target_model.__mapper__.get_property(prop_name)
converted_prop = self.property2field(prop, **kwargs)
if remote_with_local_multiplicity:
related_list_kwargs = _field_update_kwargs(
RelatedList, self.get_base_kwargs(), kwargs
)
return RelatedList(converted_prop, **related_list_kwargs)
else:
return converted_prop
def _get_field_name(self, prop_or_column):
return prop_or_column.key
def _get_field_class_for_column(self, column):
return self._get_field_class_for_data_type(column.type)
def _get_field_class_for_data_type(self, data_type):
field_cls = None
types = inspect.getmro(type(data_type))
# First search for a field class from self.SQLA_TYPE_MAPPING
for col_type in types:
if col_type in self.SQLA_TYPE_MAPPING:
field_cls = self.SQLA_TYPE_MAPPING[col_type]
if callable(field_cls) and not _is_field(field_cls):
field_cls = field_cls(self, data_type)
break
else:
# Try to find a field class based on the column's python_type
try:
python_type = data_type.python_type
except NotImplementedError:
python_type = None
if python_type in self.type_mapping:
field_cls = self.type_mapping[python_type]
else:
if hasattr(data_type, "impl"):
return self._get_field_class_for_data_type(data_type.impl)
raise ModelConversionError(
f"Could not find field column of type {types[0]}."
)
return field_cls
def _get_field_class_for_property(self, prop):
if hasattr(prop, "direction"):
field_cls = Related
else:
column = _base_column(prop.columns[0])
field_cls = self._get_field_class_for_column(column)
return field_cls
def _merge_validators(self, defaults, new):
new_classes = [validator.__class__ for validator in new]
return [
validator
for validator in defaults
if validator.__class__ not in new_classes
] + new
def _get_field_kwargs_for_property(self, prop):
kwargs = self.get_base_kwargs()
if hasattr(prop, "columns"):
column = _base_column(prop.columns[0])
self._add_column_kwargs(kwargs, column)
prop = column
if hasattr(prop, "direction"): # Relationship property
self._add_relationship_kwargs(kwargs, prop)
if getattr(prop, "doc", None): # Useful for documentation generation
_set_meta_kwarg(kwargs, "description", prop.doc)
info = getattr(prop, "info", dict())
overrides = info.get("marshmallow")
if overrides is not None:
warnings.warn(
'Passing `info={"marshmallow": ...}` is deprecated. '
"Use `SQLAlchemySchema` and `auto_field` instead.",
DeprecationWarning,
)
validate = overrides.pop("validate", [])
kwargs["validate"] = self._merge_validators(
kwargs["validate"], validate
) # Ensure we do not override the generated validators.
kwargs.update(overrides) # Override other kwargs.
return kwargs
def _add_column_kwargs(self, kwargs, column):
"""Add keyword arguments to kwargs (in-place) based on the passed in
`Column <sqlalchemy.schema.Column>`.
"""
if hasattr(column, "nullable"):
if column.nullable:
kwargs["allow_none"] = True
kwargs["required"] = not column.nullable and not _has_default(column)
# If there is no nullable attribute, we are dealing with a property
# that does not derive from the Column class. Mark as dump_only.
else:
kwargs["dump_only"] = True
if hasattr(column.type, "enums") and not kwargs.get("dump_only"):
kwargs["validate"].append(validate.OneOf(choices=column.type.enums))
# Add a length validator if a max length is set on the column
# Skip UUID columns
# (see https://github.com/marshmallow-code/marshmallow-sqlalchemy/issues/54)
if hasattr(column.type, "length") and not kwargs.get("dump_only"):
column_length = column.type.length
if column_length is not None:
try:
python_type = column.type.python_type
except (AttributeError, NotImplementedError):
python_type = None
if not python_type or not issubclass(python_type, uuid.UUID):
kwargs["validate"].append(validate.Length(max=column_length))
if getattr(column.type, "asdecimal", False):
kwargs["places"] = getattr(column.type, "scale", None)
def _add_relationship_kwargs(self, kwargs, prop):
"""Add keyword arguments to kwargs (in-place) based on the passed in
relationship `Property`.
"""
nullable = True
for pair in prop.local_remote_pairs:
if not pair[0].nullable:
if prop.uselist is True:
nullable = False
break
kwargs.update({"allow_none": nullable, "required": not nullable})
def _should_exclude_field(self, column, fields=None, exclude=None):
key = self._get_field_name(column)
if fields and key not in fields:
return True
if exclude and key in exclude:
return True
return False
def get_base_kwargs(self):
kwargs = {"validate": []}
if _META_KWARGS_DEPRECATED:
kwargs["metadata"] = {}
return kwargs
default_converter = ModelConverter()
fields_for_model = default_converter.fields_for_model
"""Generate a dict of field_name: `marshmallow.fields.Field` pairs for the given model.
Note: SynonymProperties are ignored. Use an explicit field if you want to include a synonym.
:param model: The SQLAlchemy model
:param bool include_fk: Whether to include foreign key fields in the output.
:param bool include_relationships: Whether to include relationships fields in the output.
:return: dict of field_name: Field instance pairs
"""
property2field = default_converter.property2field
"""Convert a SQLAlchemy `Property` to a field instance or class.
:param Property prop: SQLAlchemy Property.
:param bool instance: If `True`, return `Field` instance, computing relevant kwargs
from the given property. If `False`, return the `Field` class.
:param kwargs: Additional keyword arguments to pass to the field constructor.
:return: A `marshmallow.fields.Field` class or instance.
"""
column2field = default_converter.column2field
"""Convert a SQLAlchemy `Column <sqlalchemy.schema.Column>` to a field instance or class.
:param sqlalchemy.schema.Column column: SQLAlchemy Column.
:param bool instance: If `True`, return `Field` instance, computing relevant kwargs
from the given property. If `False`, return the `Field` class.
:return: A `marshmallow.fields.Field` class or instance.
"""
field_for = default_converter.field_for
"""Convert a property for a mapped SQLAlchemy class to a marshmallow `Field`.
Example: ::
date_created = field_for(Author, 'date_created', dump_only=True)
author = field_for(Book, 'author')
:param type model: A SQLAlchemy mapped class.
:param str property_name: The name of the property to convert.
:param kwargs: Extra keyword arguments to pass to `property2field`
:return: A `marshmallow.fields.Field` class or instance.
"""
| |
"""Tests for autoreload extension.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012 IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import sys
import tempfile
import textwrap
import shutil
import random
import time
from io import StringIO
import nose.tools as nt
import IPython.testing.tools as tt
from IPython.testing.decorators import skipif
from IPython.extensions.autoreload import AutoreloadMagics
from IPython.core.events import EventManager, pre_run_cell
#-----------------------------------------------------------------------------
# Test fixture
#-----------------------------------------------------------------------------
noop = lambda *a, **kw: None
class FakeShell(object):
def __init__(self):
self.ns = {}
self.events = EventManager(self, {'pre_run_cell', pre_run_cell})
self.auto_magics = AutoreloadMagics(shell=self)
self.events.register('pre_run_cell', self.auto_magics.pre_run_cell)
register_magics = set_hook = noop
def run_code(self, code):
self.events.trigger('pre_run_cell')
exec(code, self.ns)
self.auto_magics.post_execute_hook()
def push(self, items):
self.ns.update(items)
def magic_autoreload(self, parameter):
self.auto_magics.autoreload(parameter)
def magic_aimport(self, parameter, stream=None):
self.auto_magics.aimport(parameter, stream=stream)
self.auto_magics.post_execute_hook()
class Fixture(object):
"""Fixture for creating test module files"""
test_dir = None
old_sys_path = None
filename_chars = "abcdefghijklmopqrstuvwxyz0123456789"
def setUp(self):
self.test_dir = tempfile.mkdtemp()
self.old_sys_path = list(sys.path)
sys.path.insert(0, self.test_dir)
self.shell = FakeShell()
def tearDown(self):
shutil.rmtree(self.test_dir)
sys.path = self.old_sys_path
self.test_dir = None
self.old_sys_path = None
self.shell = None
def get_module(self):
module_name = "tmpmod_" + "".join(random.sample(self.filename_chars,20))
if module_name in sys.modules:
del sys.modules[module_name]
file_name = os.path.join(self.test_dir, module_name + ".py")
return module_name, file_name
def write_file(self, filename, content):
"""
Write a file, and force a timestamp difference of at least one second
Notes
-----
Python's .pyc files record the timestamp of their compilation
with a time resolution of one second.
Therefore, we need to force a timestamp difference between .py
and .pyc, without having the .py file be timestamped in the
future, and without changing the timestamp of the .pyc file
(because that is stored in the file). The only reliable way
to achieve this seems to be to sleep.
"""
# Sleep one second + eps
time.sleep(1.05)
# Write
f = open(filename, 'w')
try:
f.write(content)
finally:
f.close()
def new_module(self, code):
mod_name, mod_fn = self.get_module()
f = open(mod_fn, 'w')
try:
f.write(code)
finally:
f.close()
return mod_name, mod_fn
#-----------------------------------------------------------------------------
# Test automatic reloading
#-----------------------------------------------------------------------------
class TestAutoreload(Fixture):
@skipif(sys.version_info < (3, 6))
def test_reload_enums(self):
import enum
mod_name, mod_fn = self.new_module(textwrap.dedent("""
from enum import Enum
class MyEnum(Enum):
A = 'A'
B = 'B'
"""))
self.shell.magic_autoreload("2")
self.shell.magic_aimport(mod_name)
self.write_file(mod_fn, textwrap.dedent("""
from enum import Enum
class MyEnum(Enum):
A = 'A'
B = 'B'
C = 'C'
"""))
with tt.AssertNotPrints(('[autoreload of %s failed:' % mod_name), channel='stderr'):
self.shell.run_code("pass") # trigger another reload
def _check_smoketest(self, use_aimport=True):
"""
Functional test for the automatic reloader using either
'%autoreload 1' or '%autoreload 2'
"""
mod_name, mod_fn = self.new_module("""
x = 9
z = 123 # this item will be deleted
def foo(y):
return y + 3
class Baz(object):
def __init__(self, x):
self.x = x
def bar(self, y):
return self.x + y
@property
def quux(self):
return 42
def zzz(self):
'''This method will be deleted below'''
return 99
class Bar: # old-style class: weakref doesn't work for it on Python < 2.7
def foo(self):
return 1
""")
#
# Import module, and mark for reloading
#
if use_aimport:
self.shell.magic_autoreload("1")
self.shell.magic_aimport(mod_name)
stream = StringIO()
self.shell.magic_aimport("", stream=stream)
nt.assert_in(("Modules to reload:\n%s" % mod_name), stream.getvalue())
with nt.assert_raises(ImportError):
self.shell.magic_aimport("tmpmod_as318989e89ds")
else:
self.shell.magic_autoreload("2")
self.shell.run_code("import %s" % mod_name)
stream = StringIO()
self.shell.magic_aimport("", stream=stream)
nt.assert_true("Modules to reload:\nall-except-skipped" in
stream.getvalue())
nt.assert_in(mod_name, self.shell.ns)
mod = sys.modules[mod_name]
#
# Test module contents
#
old_foo = mod.foo
old_obj = mod.Baz(9)
old_obj2 = mod.Bar()
def check_module_contents():
nt.assert_equal(mod.x, 9)
nt.assert_equal(mod.z, 123)
nt.assert_equal(old_foo(0), 3)
nt.assert_equal(mod.foo(0), 3)
obj = mod.Baz(9)
nt.assert_equal(old_obj.bar(1), 10)
nt.assert_equal(obj.bar(1), 10)
nt.assert_equal(obj.quux, 42)
nt.assert_equal(obj.zzz(), 99)
obj2 = mod.Bar()
nt.assert_equal(old_obj2.foo(), 1)
nt.assert_equal(obj2.foo(), 1)
check_module_contents()
#
# Simulate a failed reload: no reload should occur and exactly
# one error message should be printed
#
self.write_file(mod_fn, """
a syntax error
""")
with tt.AssertPrints(('[autoreload of %s failed:' % mod_name), channel='stderr'):
self.shell.run_code("pass") # trigger reload
with tt.AssertNotPrints(('[autoreload of %s failed:' % mod_name), channel='stderr'):
self.shell.run_code("pass") # trigger another reload
check_module_contents()
#
# Rewrite module (this time reload should succeed)
#
self.write_file(mod_fn, """
x = 10
def foo(y):
return y + 4
class Baz(object):
def __init__(self, x):
self.x = x
def bar(self, y):
return self.x + y + 1
@property
def quux(self):
return 43
class Bar: # old-style class
def foo(self):
return 2
""")
def check_module_contents():
nt.assert_equal(mod.x, 10)
nt.assert_false(hasattr(mod, 'z'))
nt.assert_equal(old_foo(0), 4) # superreload magic!
nt.assert_equal(mod.foo(0), 4)
obj = mod.Baz(9)
nt.assert_equal(old_obj.bar(1), 11) # superreload magic!
nt.assert_equal(obj.bar(1), 11)
nt.assert_equal(old_obj.quux, 43)
nt.assert_equal(obj.quux, 43)
nt.assert_false(hasattr(old_obj, 'zzz'))
nt.assert_false(hasattr(obj, 'zzz'))
obj2 = mod.Bar()
nt.assert_equal(old_obj2.foo(), 2)
nt.assert_equal(obj2.foo(), 2)
self.shell.run_code("pass") # trigger reload
check_module_contents()
#
# Another failure case: deleted file (shouldn't reload)
#
os.unlink(mod_fn)
self.shell.run_code("pass") # trigger reload
check_module_contents()
#
# Disable autoreload and rewrite module: no reload should occur
#
if use_aimport:
self.shell.magic_aimport("-" + mod_name)
stream = StringIO()
self.shell.magic_aimport("", stream=stream)
nt.assert_true(("Modules to skip:\n%s" % mod_name) in
stream.getvalue())
# This should succeed, although no such module exists
self.shell.magic_aimport("-tmpmod_as318989e89ds")
else:
self.shell.magic_autoreload("0")
self.write_file(mod_fn, """
x = -99
""")
self.shell.run_code("pass") # trigger reload
self.shell.run_code("pass")
check_module_contents()
#
# Re-enable autoreload: reload should now occur
#
if use_aimport:
self.shell.magic_aimport(mod_name)
else:
self.shell.magic_autoreload("")
self.shell.run_code("pass") # trigger reload
nt.assert_equal(mod.x, -99)
def test_smoketest_aimport(self):
self._check_smoketest(use_aimport=True)
def test_smoketest_autoreload(self):
self._check_smoketest(use_aimport=False)
| |
# See https://zulip.readthedocs.io/en/latest/subsystems/caching.html for docs
from functools import wraps
from django.utils.lru_cache import lru_cache
from django.core.cache import cache as djcache
from django.core.cache import caches
from django.conf import settings
from django.db.models import Q
from django.core.cache.backends.base import BaseCache
from django.http import HttpRequest
from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Tuple
from zerver.lib.utils import statsd, statsd_key, make_safe_digest
import time
import base64
import random
import sys
import os
import hashlib
if False:
# These modules have to be imported for type annotations but
# they cannot be imported at runtime due to cyclic dependency.
from zerver.models import UserProfile, Realm, Message
ReturnT = TypeVar('ReturnT') # Useful for matching return types via Callable[..., ReturnT]
class NotFoundInCache(Exception):
pass
remote_cache_time_start = 0.0
remote_cache_total_time = 0.0
remote_cache_total_requests = 0
def get_remote_cache_time() -> float:
return remote_cache_total_time
def get_remote_cache_requests() -> int:
return remote_cache_total_requests
def remote_cache_stats_start() -> None:
global remote_cache_time_start
remote_cache_time_start = time.time()
def remote_cache_stats_finish() -> None:
global remote_cache_total_time
global remote_cache_total_requests
global remote_cache_time_start
remote_cache_total_requests += 1
remote_cache_total_time += (time.time() - remote_cache_time_start)
def get_or_create_key_prefix() -> str:
if settings.CASPER_TESTS:
# This sets the prefix for the benefit of the Casper tests.
#
# Having a fixed key is OK since we don't support running
# multiple copies of the casper tests at the same time anyway.
return 'casper_tests:'
elif settings.TEST_SUITE:
# The Python tests overwrite KEY_PREFIX on each test, but use
# this codepath as well, just to save running the more complex
# code below for reading the normal key prefix.
return 'django_tests_unused:'
# directory `var` should exist in production
os.makedirs(os.path.join(settings.DEPLOY_ROOT, "var"), exist_ok=True)
filename = os.path.join(settings.DEPLOY_ROOT, "var", "remote_cache_prefix")
try:
fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_RDWR, 0o444)
random_hash = hashlib.sha256(str(random.getrandbits(256)).encode('utf-8')).digest()
prefix = base64.b16encode(random_hash)[:32].decode('utf-8').lower() + ':'
# This does close the underlying file
with os.fdopen(fd, 'w') as f:
f.write(prefix + "\n")
except OSError:
# The file already exists
tries = 1
while tries < 10:
with open(filename, 'r') as f:
prefix = f.readline()[:-1]
if len(prefix) == 33:
break
tries += 1
prefix = ''
time.sleep(0.5)
if not prefix:
print("Could not read remote cache key prefix file")
sys.exit(1)
return prefix
KEY_PREFIX = get_or_create_key_prefix() # type: str
def bounce_key_prefix_for_testing(test_name: str) -> None:
global KEY_PREFIX
KEY_PREFIX = test_name + ':' + str(os.getpid()) + ':'
# We are taking the hash of the KEY_PREFIX to decrease the size of the key.
# Memcached keys should have a length of less than 256.
KEY_PREFIX = hashlib.sha1(KEY_PREFIX.encode('utf-8')).hexdigest() + ":"
def get_cache_backend(cache_name: Optional[str]) -> BaseCache:
if cache_name is None:
return djcache
return caches[cache_name]
def get_cache_with_key(
keyfunc: Callable[..., str],
cache_name: Optional[str]=None
) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
"""
The main goal of this function getting value from the cache like in the "cache_with_key".
A cache value can contain any data including the "None", so
here used exception for case if value isn't found in the cache.
"""
def decorator(func: Callable[..., ReturnT]) -> (Callable[..., ReturnT]):
@wraps(func)
def func_with_caching(*args: Any, **kwargs: Any) -> Callable[..., ReturnT]:
key = keyfunc(*args, **kwargs)
val = cache_get(key, cache_name=cache_name)
if val is not None:
return val[0]
raise NotFoundInCache()
return func_with_caching
return decorator
def cache_with_key(
keyfunc: Callable[..., str], cache_name: Optional[str]=None,
timeout: Optional[int]=None, with_statsd_key: Optional[str]=None
) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
"""Decorator which applies Django caching to a function.
Decorator argument is a function which computes a cache key
from the original function's arguments. You are responsible
for avoiding collisions with other uses of this decorator or
other uses of caching."""
def decorator(func: Callable[..., ReturnT]) -> Callable[..., ReturnT]:
@wraps(func)
def func_with_caching(*args: Any, **kwargs: Any) -> ReturnT:
key = keyfunc(*args, **kwargs)
val = cache_get(key, cache_name=cache_name)
extra = ""
if cache_name == 'database':
extra = ".dbcache"
if with_statsd_key is not None:
metric_key = with_statsd_key
else:
metric_key = statsd_key(key)
status = "hit" if val is not None else "miss"
statsd.incr("cache%s.%s.%s" % (extra, metric_key, status))
# Values are singleton tuples so that we can distinguish
# a result of None from a missing key.
if val is not None:
return val[0]
val = func(*args, **kwargs)
cache_set(key, val, cache_name=cache_name, timeout=timeout)
return val
return func_with_caching
return decorator
def cache_set(key: str, val: Any, cache_name: Optional[str]=None, timeout: Optional[int]=None) -> None:
remote_cache_stats_start()
cache_backend = get_cache_backend(cache_name)
cache_backend.set(KEY_PREFIX + key, (val,), timeout=timeout)
remote_cache_stats_finish()
def cache_get(key: str, cache_name: Optional[str]=None) -> Any:
remote_cache_stats_start()
cache_backend = get_cache_backend(cache_name)
ret = cache_backend.get(KEY_PREFIX + key)
remote_cache_stats_finish()
return ret
def cache_get_many(keys: List[str], cache_name: Optional[str]=None) -> Dict[str, Any]:
keys = [KEY_PREFIX + key for key in keys]
remote_cache_stats_start()
ret = get_cache_backend(cache_name).get_many(keys)
remote_cache_stats_finish()
return dict([(key[len(KEY_PREFIX):], value) for key, value in ret.items()])
def cache_set_many(items: Dict[str, Any], cache_name: Optional[str]=None,
timeout: Optional[int]=None) -> None:
new_items = {}
for key in items:
new_items[KEY_PREFIX + key] = items[key]
items = new_items
remote_cache_stats_start()
get_cache_backend(cache_name).set_many(items, timeout=timeout)
remote_cache_stats_finish()
def cache_delete(key: str, cache_name: Optional[str]=None) -> None:
remote_cache_stats_start()
get_cache_backend(cache_name).delete(KEY_PREFIX + key)
remote_cache_stats_finish()
def cache_delete_many(items: Iterable[str], cache_name: Optional[str]=None) -> None:
remote_cache_stats_start()
get_cache_backend(cache_name).delete_many(
KEY_PREFIX + item for item in items)
remote_cache_stats_finish()
# Generic_bulk_cached fetch and its helpers
ObjKT = TypeVar('ObjKT')
ItemT = TypeVar('ItemT')
CompressedItemT = TypeVar('CompressedItemT')
def default_extractor(obj: CompressedItemT) -> ItemT:
return obj # type: ignore # Need a type assert that ItemT=CompressedItemT
def default_setter(obj: ItemT) -> CompressedItemT:
return obj # type: ignore # Need a type assert that ItemT=CompressedItemT
def default_id_fetcher(obj: ItemT) -> ObjKT:
return obj.id # type: ignore # Need ItemT/CompressedItemT typevars to be a Django protocol
def default_cache_transformer(obj: ItemT) -> ItemT:
return obj
# Required Arguments are as follows:
# * object_ids: The list of object ids to look up
# * cache_key_function: object_id => cache key
# * query_function: [object_ids] => [objects from database]
# Optional keyword arguments:
# * setter: Function to call before storing items to cache (e.g. compression)
# * extractor: Function to call on items returned from cache
# (e.g. decompression). Should be the inverse of the setter
# function.
# * id_fetcher: Function mapping an object from database => object_id
# (in case we're using a key more complex than obj.id)
# * cache_transformer: Function mapping an object from database =>
# value for cache (in case the values that we're caching are some
# function of the objects, not the objects themselves)
def generic_bulk_cached_fetch(
cache_key_function: Callable[[ObjKT], str],
query_function: Callable[[List[ObjKT]], Iterable[Any]],
object_ids: Iterable[ObjKT],
extractor: Callable[[CompressedItemT], ItemT] = default_extractor,
setter: Callable[[ItemT], CompressedItemT] = default_setter,
id_fetcher: Callable[[ItemT], ObjKT] = default_id_fetcher,
cache_transformer: Callable[[ItemT], ItemT] = default_cache_transformer
) -> Dict[ObjKT, ItemT]:
cache_keys = {} # type: Dict[ObjKT, str]
for object_id in object_ids:
cache_keys[object_id] = cache_key_function(object_id)
cached_objects_compressed = cache_get_many([cache_keys[object_id]
for object_id in object_ids]) # type: Dict[str, Tuple[CompressedItemT]]
cached_objects = {} # type: Dict[str, ItemT]
for (key, val) in cached_objects_compressed.items():
cached_objects[key] = extractor(cached_objects_compressed[key][0])
needed_ids = [object_id for object_id in object_ids if
cache_keys[object_id] not in cached_objects]
db_objects = query_function(needed_ids)
items_for_remote_cache = {} # type: Dict[str, Tuple[CompressedItemT]]
for obj in db_objects:
key = cache_keys[id_fetcher(obj)]
item = cache_transformer(obj)
items_for_remote_cache[key] = (setter(item),)
cached_objects[key] = item
if len(items_for_remote_cache) > 0:
cache_set_many(items_for_remote_cache)
return dict((object_id, cached_objects[cache_keys[object_id]]) for object_id in object_ids
if cache_keys[object_id] in cached_objects)
def cache(func: Callable[..., ReturnT]) -> Callable[..., ReturnT]:
"""Decorator which applies Django caching to a function.
Uses a key based on the function's name, filename, and
the repr() of its arguments."""
func_uniqifier = '%s-%s' % (func.__code__.co_filename, func.__name__)
@wraps(func)
def keyfunc(*args: Any, **kwargs: Any) -> str:
# Django complains about spaces because memcached rejects them
key = func_uniqifier + repr((args, kwargs))
return key.replace('-', '--').replace(' ', '-s')
return cache_with_key(keyfunc)(func)
def preview_url_cache_key(url: str) -> str:
return "preview_url:%s" % (make_safe_digest(url),)
def display_recipient_cache_key(recipient_id: int) -> str:
return "display_recipient_dict:%d" % (recipient_id,)
def user_profile_by_email_cache_key(email: str) -> str:
# See the comment in zerver/lib/avatar_hash.py:gravatar_hash for why we
# are proactively encoding email addresses even though they will
# with high likelihood be ASCII-only for the foreseeable future.
return 'user_profile_by_email:%s' % (make_safe_digest(email.strip()),)
def user_profile_cache_key_id(email: str, realm_id: int) -> str:
return u"user_profile:%s:%s" % (make_safe_digest(email.strip()), realm_id,)
def user_profile_cache_key(email: str, realm: 'Realm') -> str:
return user_profile_cache_key_id(email, realm.id)
def bot_profile_cache_key(email: str) -> str:
return "bot_profile:%s" % (make_safe_digest(email.strip()),)
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
return "user_profile_by_id:%s" % (user_profile_id,)
def user_profile_by_api_key_cache_key(api_key: str) -> str:
return "user_profile_by_api_key:%s" % (api_key,)
realm_user_dict_fields = [
'id', 'full_name', 'short_name', 'email',
'avatar_source', 'avatar_version', 'is_active',
'is_realm_admin', 'is_bot', 'realm_id', 'timezone',
'date_joined', 'is_guest', 'bot_owner_id'
] # type: List[str]
def realm_user_dicts_cache_key(realm_id: int) -> str:
return "realm_user_dicts:%s" % (realm_id,)
def get_realm_used_upload_space_cache_key(realm: 'Realm') -> str:
return u'realm_used_upload_space:%s' % (realm.id,)
def active_user_ids_cache_key(realm_id: int) -> str:
return "active_user_ids:%s" % (realm_id,)
def active_non_guest_user_ids_cache_key(realm_id: int) -> str:
return "active_non_guest_user_ids:%s" % (realm_id,)
bot_dict_fields = ['id', 'full_name', 'short_name', 'bot_type', 'email',
'is_active', 'default_sending_stream__name',
'realm_id',
'default_events_register_stream__name',
'default_all_public_streams', 'api_key',
'bot_owner__email', 'avatar_source',
'avatar_version'] # type: List[str]
def bot_dicts_in_realm_cache_key(realm: 'Realm') -> str:
return "bot_dicts_in_realm:%s" % (realm.id,)
def get_stream_cache_key(stream_name: str, realm_id: int) -> str:
return "stream_by_realm_and_name:%s:%s" % (
realm_id, make_safe_digest(stream_name.strip().lower()))
def delete_user_profile_caches(user_profiles: Iterable['UserProfile']) -> None:
# Imported here to avoid cyclic dependency.
from zerver.lib.users import get_all_api_keys
keys = []
for user_profile in user_profiles:
keys.append(user_profile_by_email_cache_key(user_profile.delivery_email))
keys.append(user_profile_by_id_cache_key(user_profile.id))
for api_key in get_all_api_keys(user_profile):
keys.append(user_profile_by_api_key_cache_key(api_key))
keys.append(user_profile_cache_key(user_profile.email, user_profile.realm))
cache_delete_many(keys)
def delete_display_recipient_cache(user_profile: 'UserProfile') -> None:
from zerver.models import Subscription # We need to import here to avoid cyclic dependency.
recipient_ids = Subscription.objects.filter(user_profile=user_profile)
recipient_ids = recipient_ids.values_list('recipient_id', flat=True)
keys = [display_recipient_cache_key(rid) for rid in recipient_ids]
cache_delete_many(keys)
def changed(kwargs: Any, fields: List[str]) -> bool:
if kwargs.get('update_fields') is None:
# adds/deletes should invalidate the cache
return True
update_fields = set(kwargs['update_fields'])
for f in fields:
if f in update_fields:
return True
return False
# Called by models.py to flush the user_profile cache whenever we save
# a user_profile object
def flush_user_profile(sender: Any, **kwargs: Any) -> None:
user_profile = kwargs['instance']
delete_user_profile_caches([user_profile])
# Invalidate our active_users_in_realm info dict if any user has changed
# the fields in the dict or become (in)active
if changed(kwargs, realm_user_dict_fields):
cache_delete(realm_user_dicts_cache_key(user_profile.realm_id))
if changed(kwargs, ['is_active']):
cache_delete(active_user_ids_cache_key(user_profile.realm_id))
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
if changed(kwargs, ['is_guest']):
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
if changed(kwargs, ['email', 'full_name', 'short_name', 'id', 'is_mirror_dummy']):
delete_display_recipient_cache(user_profile)
# Invalidate our bots_in_realm info dict if any bot has
# changed the fields in the dict or become (in)active
if user_profile.is_bot and changed(kwargs, bot_dict_fields):
cache_delete(bot_dicts_in_realm_cache_key(user_profile.realm))
# Invalidate realm-wide alert words cache if any user in the realm has changed
# alert words
if changed(kwargs, ['alert_words']):
cache_delete(realm_alert_words_cache_key(user_profile.realm))
cache_delete(realm_alert_words_automaton_cache_key(user_profile.realm))
# Called by models.py to flush various caches whenever we save
# a Realm object. The main tricky thing here is that Realm info is
# generally cached indirectly through user_profile objects.
def flush_realm(sender: Any, **kwargs: Any) -> None:
realm = kwargs['instance']
users = realm.get_active_users()
delete_user_profile_caches(users)
if realm.deactivated or (kwargs["update_fields"] is not None and
"string_id" in kwargs['update_fields']):
cache_delete(realm_user_dicts_cache_key(realm.id))
cache_delete(active_user_ids_cache_key(realm.id))
cache_delete(bot_dicts_in_realm_cache_key(realm))
cache_delete(realm_alert_words_cache_key(realm))
cache_delete(realm_alert_words_automaton_cache_key(realm))
cache_delete(active_non_guest_user_ids_cache_key(realm.id))
cache_delete(realm_rendered_description_cache_key(realm))
cache_delete(realm_text_description_cache_key(realm))
if changed(kwargs, ['description']):
cache_delete(realm_rendered_description_cache_key(realm))
cache_delete(realm_text_description_cache_key(realm))
def realm_alert_words_cache_key(realm: 'Realm') -> str:
return "realm_alert_words:%s" % (realm.string_id,)
def realm_alert_words_automaton_cache_key(realm: 'Realm') -> str:
return "realm_alert_words_automaton:%s" % (realm.string_id,)
def realm_rendered_description_cache_key(realm: 'Realm') -> str:
return "realm_rendered_description:%s" % (realm.string_id,)
def realm_text_description_cache_key(realm: 'Realm') -> str:
return "realm_text_description:%s" % (realm.string_id,)
# Called by models.py to flush the stream cache whenever we save a stream
# object.
def flush_stream(sender: Any, **kwargs: Any) -> None:
from zerver.models import UserProfile
stream = kwargs['instance']
items_for_remote_cache = {}
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
cache_set_many(items_for_remote_cache)
if kwargs.get('update_fields') is None or 'name' in kwargs['update_fields'] and \
UserProfile.objects.filter(
Q(default_sending_stream=stream) |
Q(default_events_register_stream=stream)).exists():
cache_delete(bot_dicts_in_realm_cache_key(stream.realm))
def flush_used_upload_space_cache(sender: Any, **kwargs: Any) -> None:
attachment = kwargs['instance']
if kwargs.get("created") is None or kwargs.get("created") is True:
cache_delete(get_realm_used_upload_space_cache_key(attachment.owner.realm))
def to_dict_cache_key_id(message_id: int) -> str:
return 'message_dict:%d' % (message_id,)
def to_dict_cache_key(message: 'Message') -> str:
return to_dict_cache_key_id(message.id)
def open_graph_description_cache_key(content: Any, request: HttpRequest) -> str:
return 'open_graph_description_path:%s' % (make_safe_digest(request.META['PATH_INFO']),)
def flush_message(sender: Any, **kwargs: Any) -> None:
message = kwargs['instance']
cache_delete(to_dict_cache_key_id(message.id))
def flush_submessage(sender: Any, **kwargs: Any) -> None:
submessage = kwargs['instance']
# submessages are not cached directly, they are part of their
# parent messages
message_id = submessage.message_id
cache_delete(to_dict_cache_key_id(message_id))
DECORATOR = Callable[[Callable[..., Any]], Callable[..., Any]]
def ignore_unhashable_lru_cache(maxsize: int=128, typed: bool=False) -> DECORATOR:
"""
This is a wrapper over lru_cache function. It adds following features on
top of lru_cache:
* It will not cache result of functions with unhashable arguments.
* It will clear cache whenever zerver.lib.cache.KEY_PREFIX changes.
"""
internal_decorator = lru_cache(maxsize=maxsize, typed=typed)
def decorator(user_function: Callable[..., Any]) -> Callable[..., Any]:
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
# In the development environment, we want every file
# change to refresh the source files from disk.
return user_function
cache_enabled_user_function = internal_decorator(user_function)
def wrapper(*args: Any, **kwargs: Any) -> Any:
if not hasattr(cache_enabled_user_function, 'key_prefix'):
cache_enabled_user_function.key_prefix = KEY_PREFIX
if cache_enabled_user_function.key_prefix != KEY_PREFIX:
# Clear cache when cache.KEY_PREFIX changes. This is used in
# tests.
cache_enabled_user_function.cache_clear()
cache_enabled_user_function.key_prefix = KEY_PREFIX
try:
return cache_enabled_user_function(*args, **kwargs)
except TypeError:
# args or kwargs contains an element which is unhashable. In
# this case we don't cache the result.
pass
# Deliberately calling this function from outside of exception
# handler to get a more descriptive traceback. Otherise traceback
# can include the exception from cached_enabled_user_function as
# well.
return user_function(*args, **kwargs)
setattr(wrapper, 'cache_info', cache_enabled_user_function.cache_info)
setattr(wrapper, 'cache_clear', cache_enabled_user_function.cache_clear)
return wrapper
return decorator
def dict_to_items_tuple(user_function: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper that converts any dict args to dict item tuples."""
def dict_to_tuple(arg: Any) -> Any:
if isinstance(arg, dict):
return tuple(sorted(arg.items()))
return arg
def wrapper(*args: Any, **kwargs: Any) -> Any:
new_args = (dict_to_tuple(arg) for arg in args)
return user_function(*new_args, **kwargs)
return wrapper
def items_tuple_to_dict(user_function: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper that converts any dict items tuple args to dicts."""
def dict_items_to_dict(arg: Any) -> Any:
if isinstance(arg, tuple):
try:
return dict(arg)
except TypeError:
pass
return arg
def wrapper(*args: Any, **kwargs: Any) -> Any:
new_args = (dict_items_to_dict(arg) for arg in args)
new_kwargs = {key: dict_items_to_dict(val) for key, val in kwargs.items()}
return user_function(*new_args, **new_kwargs)
return wrapper
| |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""RL-LIM Experiments on three synthetic datasets.
Recovering local dynamics using RL-LIM with Synthetic datasets
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import numpy as np
from sklearn import linear_model
from rllim import data_loading
from rllim import rllim
from rllim import rllim_metrics
def main(args):
"""Main function of RL-LIM for synthetic data experiments.
Args:
args: data_name, train_no, probe_no, test_no,
seed, hyperparameters, network parameters
"""
# Inputs
data_name = args.data_name
# The number of training, probe and testing samples
train_no = args.train_no
probe_no = args.probe_no
test_no = args.test_no
dim_no = args.dim_no
dict_no = {'train': train_no, 'probe': probe_no, 'test': test_no,
'dim': dim_no}
# Random seed
seed = args.seed
# Network parameters
parameters = dict()
parameters['hidden_dim'] = args.hidden_dim
parameters['iterations'] = args.iterations
parameters['num_layers'] = args.num_layers
parameters['batch_size'] = args.batch_size
parameters['batch_size_inner'] = args.batch_size_inner
parameters['lambda'] = args.hyper_lambda
# Checkpoint file name
checkpoint_file_name = args.checkpoint_file_name
# Loads data
x_train, y_train, x_probe, y_probe, x_test, y_test, c_test = \
data_loading.load_synthetic_data(data_name, dict_no, seed)
print('Finish ' + str(data_name) + ' data loading')
# Trains interpretable baseline
# Defins baseline
baseline = linear_model.Ridge(alpha=1)
# Trains interpretable baseline model
baseline.fit(x_train, y_train)
print('Finished interpretable baseline training.')
# Trains instance-wise weight estimator
# Defines locally interpretable model
interp_model = linear_model.Ridge(alpha=1)
# Initializes RL-LIM
rllim_class = rllim.Rllim(x_train, y_train, x_probe, y_probe,
parameters, interp_model,
baseline, checkpoint_file_name)
# Trains RL-LIM
rllim_class.rllim_train()
print('Finished instance-wise weight estimator training.')
# Interpretable inference
# Trains locally interpretable models and output
# instance-wise explanations (test_coef)
# and interpretable predictions (test_y_fit)
test_y_fit, test_coef = \
rllim_class.rllim_interpreter(x_train, y_train, x_test, interp_model)
print('Finished interpretable predictions and local explanations.')
# Fidelity
mae = rllim_metrics.fidelity_metrics(y_test, test_y_fit, metric='mae')
print('fidelity of RL-LIM in terms of MAE: ' + str(np.round(mae, 4)))
# Absolute Weight Differences (AWD) between ground truth local dynamics and
# estimated local dynamics by RL-LIM
awd = rllim_metrics.awd_metric(c_test, test_coef)
print('AWD of RL-LIM: ' + str(np.round(awd, 4)))
# Fidelity plot
rllim_metrics.plot_result(x_test, data_name, y_test, test_y_fit,
c_test, test_coef,
metric='mae', criteria='Fidelity')
# AWD plot
rllim_metrics.plot_result(x_test, data_name, y_test, test_y_fit,
c_test, test_coef,
metric='mae', criteria='AWD')
if __name__ == '__main__':
# Inputs for the main function
parser = argparse.ArgumentParser()
parser.add_argument(
'--data_name',
help='Synthetic data name (Syn1 or Syn2 or Syn3)',
default='Syn1',
type=str)
parser.add_argument(
'--train_no',
help='number of training samples',
default=1000,
type=int)
parser.add_argument(
'--probe_no',
help='number of probe samples',
default=100,
type=int)
parser.add_argument(
'--test_no',
help='number of testing samples',
default=1000,
type=int)
parser.add_argument(
'--dim_no',
help='number of feature dimensions',
default=11,
type=int)
parser.add_argument(
'--seed',
help='random seed',
default=0,
type=int)
parser.add_argument(
'--hyper_lambda',
help='main hyper-parameter of RL-LIM (lambda)',
default=1.0,
type=float)
parser.add_argument(
'--hidden_dim',
help='dimensions of hidden states',
default=100,
type=int)
parser.add_argument(
'--num_layers',
help='number of network layers',
default=5,
type=int)
parser.add_argument(
'--iterations',
help='number of iterations',
default=2000,
type=int)
parser.add_argument(
'--batch_size',
help='number of batch size for RL',
default=900,
type=int)
parser.add_argument(
'--batch_size_inner',
help='number of batch size for inner iterations',
default=10,
type=int)
parser.add_argument(
'--checkpoint_file_name',
help='file name for saving and loading the trained model',
default='./tmp/model.ckpt',
type=str)
args_in = parser.parse_args()
# Calls main function
main(args_in)
| |
import math
def divisors(number):
""" Returns list of all divisors of number """
alld = []
for i in range(1, number // 2 +1): #round(math.sqrt(number))+2
if number % i == 0:
alld.append(i)
#print(number, alld)
return alld
def binary_1(number):
""" Returns number of 1s in binary expansion of the number """
count = 0
while number > 0:
if number % 2 == 1:
count = count + 1
number = number // 2
return count
def seq(sequence, method, val, err):
""" Asserts/tests sequence of numbers using method """
for element in sequence:
assert(method(element)) is val, "Number: "+str(element)+" <- "+err
'''
HAPPY
Definition: One can take the sum of the squares of the digits of a number.
Those numbers are happy for which iterating this operation eventually
leads to 1.
First ten: 1, 7, 10, 13, 19, 23, 28, 31, 32, 44
There are 1441 happy numbers below 10,000.
'''
def happy(number):
""" Returns True if number is happy """
#print("start", number)
while True:
num = 0
while number > 0:
num = num + (number % 10) ** 2
number = number // 10
#print("num", num)
if num == 1:
return True
elif num < 10:
return False
else:
number = num
def test_happy():
""" Tests happy method """
seq([1, 7, 10, 13, 19, 23, 28, 31, 32, 44], happy, True,
"happy number from test sequence is not happy")
seq([2, 8, 11, 14, 20, 22, 29, 33, 34, 45], happy, False,
"not happy number is happy")
'''
LUCKY
Definition: To build the lucky number sequence, start with natural numbers.
Delete every second number, leaving 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, ... .
The second number remaining is 3, so delete every third number, leaving 1, 3, 7, 9, 13, 15, 19, 21, ... .
The next number remaining is 7, so delete every 7th number, leaving 1, 3, 7, 9, 13, 15, 21, ... .
The next number remaining is 9, so delete every ninth number, etc.
Those numbers were lucky they weren't crossed out.
First ten: 1, 3, 7, 9, 13, 15, 21, 25, 31, 33
There are 1118 lucky numbers below 10,000.
Another definition: https://en.wikipedia.org/wiki/Lucky_number
'''
def lucky(number):
""" Returns True if number is lucky """
#odd numbers are unlucky
if number % 2 == 0:
return False
sequence = [1]*(number+1)
for i in range(len(sequence)):
#zero out all even numbers
if i % 2 == 0:
sequence[i] = 0
#print(sequence)
position = 2
unlucky = 0
while unlucky < number and position < number:
count = 0
#find unlucky number
for i in range(len(sequence)):
if sequence[i] == 1:
count = count +1
if count == position:
unlucky = i
break
#print("unlucky", unlucky)
#prune sequence of unlucky-divisible positions
count = 0
for i in range(len(sequence)):
if sequence[i] == 1:
count = count + 1
if count == unlucky:
sequence[i] = 0
count = 0
#print(sequence)
position = position + 1
#print("position", position)
#if number was eliminated already then it is unlucky
if sequence[number] == 0:
return False
'''
for i in range(len(sequence)):
if sequence[i] == 1:
print("lucky", i)
exit()
'''
return sequence[number] == 1
def test_lucky():
""" Tests lucky method """
seq([1, 3, 7, 9, 13, 15, 21, 25, 31, 33], lucky, True,
"lucky number from test sequence is not lucky")
seq([2, 4, 8, 10, 14, 16, 23, 27, 34, 36], lucky, False,
"not lucky number is lucky")
'''
PERFECT
Definition: The number n is perfect if the sum of all its positive divisors except itself is equal to n.
Less than perfect numbers are called deficient, too perfect -- abundant.
First ten: 6, 28, 496, 8128, 33550336, 8589869056, 137438691328, 2305843008139952128,
2658455991569831744654692615953842176, 191561942608236107294793378084303638130997321548169216
There are 4 perfect numbers below 10,000.
'''
def perfect(number):
""" Returns True if number is perfect """
return sum(divisors(number)) == number
def test_perfect():
""" Tests perfect method """
seq([6, 28, 496, 8128], perfect, True,
"perfect number from test sequence is not perfect")
'''
seq([6, 28, 496, 8128, 33550336, 8589869056, 137438691328,
2305843008139952128, 2658455991569831744654692615953842176,
191561942608236107294793378084303638130997321548169216], perfect, True,
"perfect number from test sequence is not perfect")
'''
seq([2, 4, 8, 10, 14, 16, 23, 27, 34, 36], perfect, False,
"not perfect number is perfect")
'''
DEFICIENT
Definition: The number n is deficient if the sum of all its positive divisors except itself is less than n.
Compare with perfect and abundant numbers.
First ten: 1, 2, 3, 4, 5, 7, 8, 9, 10, 11
There are 7508 deficient numbers below 10,000.
'''
def deficient(number):
""" Returns True if number is perfect """
return sum(divisors(number)) < number
def test_deficient():
""" Tests deficient method """
seq([1, 2, 3, 4, 5, 7, 8, 9, 10, 11], deficient, True,
"deficient number from test sequence is not deficient")
seq([6, 12, 18, 20, 24, 30, 36, 40, 42, 48, 54], deficient, False,
"not deficient number is deficient")
'''
ABUNDANT
Definition: The number n is abundant if the sum of all its positive divisors except itself is more than n.
They are abundant above perfection, not to mention deficiency. See perfect and deficient numbers.
First ten: 12, 18, 20, 24, 30, 36, 40, 42, 48, 54
There are 2487 abundant numbers below 10,000.
'''
def abundant(number):
""" Returns True if number is abundant """
return sum(divisors(number)) > number
def test_abundant():
""" Tests abundant method """
seq([12, 18, 20, 24, 30, 36, 40, 42, 48, 54], abundant, True,
"abundant number from test sequence is not abundant")
seq([6, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11], abundant, False,
"not abundant number is abundant")
'''
FIBONACCI
Definition: Fibonacci numbers are numbers that form the Fibonacci sequence. The Fibonacci sequence is defined as starting with 1, 1 and then each next term is the sum of the two preceding ones.
Fibonacci numbers are very common in nature. For example, a pineapple has 8 spirals if you count one way, and 13 if you count the other way.
First ten: 1, 1, 2, 3, 5, 8, 13, 21, 34, 55
There are 19 different Fibonacci numbers below 10,000.
'''
def fibonacci(number):
""" Returns True if number is fibonacci """
if number == 1:
return True
f1 = 1
f2 = 2
while f2 < number:
f3 = f1 + f2
f1 = f2
f2 = f3
return f2 == number
def test_fibonacci():
""" Tests fibonacci method """
seq([1, 1, 2, 3, 5, 8, 13, 21, 34, 55], fibonacci, True,
"fibonacci number from test sequence is not fibonacci")
seq([6, 7, 9, 10, 11], fibonacci, False,
"not fibonacci number is fibonacci")
'''
PRIME
Definition: A prime is a positive integer greater than 1 that is divisible by no positive integers other than 1 and itself.
Prime numbers are opposite to composite numbers.
First ten: 2, 3, 5, 7, 11, 13, 17, 19, 23, 29
There are 1229 primes below 10,000.
'''
def prime(number):
""" Returns True if number is prime """
if number == 1:
return False
if number == 2:
return True
#print(list(range(2, round(math.sqrt(number))+1)))
for i in range(2, round(math.sqrt(number))+1):
if number % i == 0:
#print(number, i)
return False
return True
def test_prime():
""" Tests fibonacci method """
seq([2, 3, 5, 7, 11, 13, 17, 19, 23, 29], prime, True,
"prime number from test sequence is not prime")
seq([4, 6, 8, 9, 12, 14, 15, 16, 18, 20], prime, False,
"not prime number is prime")
'''
TRIANGULAR
Definition: If you start with n points on a line, then draw n-1 points above and between, then n-2 above and between them, and so on, you will get a triangle of points. The number of points in this triangle is a triangle number.
Compare to square, pentagonal and tetrahedral numbers.
First ten: 1, 3, 6, 10, 15, 21, 28, 36, 45, 55
There are 140 triangular numbers below 10,000.
Test method: http://mathforum.org/library/drmath/view/57162.html
'''
def triangular(number):
""" Returns True if number is triangular """
if number == 1:
return True
root = round(math.sqrt(number*2))
print(number, root)
return root * (root+1) == 2 * number or root * (root-1) == 2 * number
def test_triangular():
""" Tests triangular method """
seq([1, 3, 6, 10, 15, 21, 28, 36, 45, 55], triangular, True,
"triangular number from test sequence is not triangular")
seq([4, 8, 9, 12, 14, 16, 18, 20], triangular, False,
"not triangular number is triangular")
'''
SQUARE
Definition: The number n is a square if it is the square of an integer.
First ten: 1, 4, 9, 16, 25, 36, 49, 64, 81, 100
There are 99 squares below 10,000.
'''
def square(number):
""" Returns True if number is square """
if number == 1:
return True
root = round(math.sqrt(number))
return root**2 == number
def test_square():
""" Tests square method """
seq([1, 4, 9, 16, 25, 36, 49, 64, 81, 100], square, True,
"square number from test sequence is not square")
seq([8, 12, 14, 18, 20], square, False,
"not square number is square")
'''
CUBE
Definition: The number n is a cube if it is the cube of an integer.
First ten: 1, 8, 27, 64, 125, 216, 343, 512, 729, 1000
There are 21 cube numbers below 10,000.
'''
def cube(number):
""" Returns True if number is cube """
if number == 1:
return True
root = round(number ** (1. / 3))
return root**3 == number
def test_cube():
""" Tests cube method """
seq([1, 8, 27, 64, 125, 216, 343, 512, 729, 1000], cube, True,
"cube number from test sequence is not cube")
seq([4, 9, 16, 25, 36, 49, 81, 100], cube, False,
"not cube number is cube")
'''
ODD
Definition: A number is odd if it is not divisible by 2.
Numbers that are not odd are even. Compare with another pair -- evil and odious numbers.
First ten: 1, 3, 5, 7, 9, 11, 13, 15, 17, 19
There are 5000 odd numbers below 10,000.
'''
def odd(number):
""" Returns True if number is odd """
return number % 2 == 1
def test_odd():
""" Tests odd method """
seq([1, 3, 5, 7, 9, 11, 13, 15, 17, 19], odd, True,
"odd number from test sequence is not odd")
seq([2, 4, 6, 8, 10, 12, 14, 16, 18, 20], odd, False,
"not odd number is odd")
'''
EVEN
Definition: A number is even if it is divisible by 2.
Numbers that are not even are odd. Compare with another pair -- evil and odious numbers.
First ten: 2, 4, 6, 8, 10, 12, 14, 16, 18, 20
There are 4999 even numbers below 10,000.
'''
def even(number):
""" Returns True if number is even """
return number % 2 == 0
def test_even():
""" Tests even method """
seq([2, 4, 6, 8, 10, 12, 14, 16, 18, 20], even, True,
"even number from test sequence is not even")
seq([1, 3, 5, 7, 9, 11, 13, 15, 17, 19], even, False,
"not even number is even")
'''
REPUNIT
Definition: A repunit is an integer in which every digit is one.
The term repunit comes from combining "repeated" and "unit".
First ten: 1, 11, 111, 1111, 11111, 111111, 1111111, 11111111, 111111111, 1111111111
There are 4 repunits below 10,000.
'''
def repunit(number):
""" Returns True if number is repunit """
while number > 9:
if number % 10 != 1:
return False
else:
number = number // 10
return number == 1
def test_repunit():
""" Tests repunit method """
seq([1, 11, 111, 1111, 11111, 111111, 1111111, 11111111, 111111111, 1111111111], repunit, True,
"repunit number from test sequence is not repunit")
seq([3, 5, 7, 9, 13, 15, 17, 19], repunit, False,
"not repunit number is repunit")
'''
LAZY CATERER
Definition: The n-th lazy caterer number is the maximum number of pieces a (circular) pizza can be cut into with n (straight-line) cuts.
Unlike the situation with cake, everybody gets the toppings.
First ten: 2, 4, 7, 11, 16, 22, 29, 37, 46, 56
There are 140 lazy caterer numbers below 10,000.
Formula: https://en.wikipedia.org/wiki/Lazy_caterer%27s_sequence
'''
def lazy_caterer(number):
""" Returns True if number is lazy_caterer """
# n-th lazy caterer number is (n**2 + n + 2) / 2
n = 1
while True:
p = (n**2 + n + 2) / 2
if p == number:
return True
elif p > number:
return False
n = n + 1
def test_lazy_caterer():
""" Tests lazy_caterer method """
seq([2, 4, 7, 11, 16, 22, 29, 37, 46, 56], lazy_caterer, True,
"lazy_caterer number from test sequence is not lazy_caterer")
seq([3, 5, 9, 13, 15, 17, 19], lazy_caterer, False,
"not lazy_caterer number is lazy_caterer")
'''
ODIOUS
Definition: The number n is odious if it has an odd number of 1's in its binary expansion.
Guess what evil numbers are.
First ten: 1, 2, 4, 7, 8, 11, 13, 14, 16, 19
There are 5000 odious numbers below 10,000.
'''
def odious(number):
""" Returns True if number is odious """
return odd(binary_1(number))
def test_odious():
""" Tests odious method """
seq([1, 2, 4, 7, 8, 11, 13, 14, 16, 19], odious, True,
"odious number from test sequence is not odious")
seq([3, 5, 6, 9, 10, 12, 15, 17, 18, 20], odious, False,
"not odious number is odious")
'''
EVIL
Definition: The number n is evil if it has an even number of 1's in its binary expansion.
Guess what odious numbers are.
First ten: 3, 5, 6, 9, 10, 12, 15, 17, 18, 20
There are 4999 evil numbers below 10,000.
'''
def evil(number):
""" Returns True if number is evil """
return even(binary_1(number))
def test_evil():
""" Tests evil method """
seq([3, 5, 6, 9, 10, 12, 15, 17, 18, 20], evil, True,
"evil number from test sequence is not evil")
seq([1, 2, 4, 7, 8, 11, 13, 14, 16, 19], evil, False,
"not evil number is evil")
'''
UNDULATING
Definition: Undulating numbers are numbers of the form abababab... in base 10.
This property is significant starting from 3-digit numbers, so we will not consider numbers below 100.
First ten: 101, 111, 121, 131, 141, 151, 161, 171, 181, 191
There are 180 undulating numbers below 10,000.
'''
def undulating(number):
""" Returns True if number is undulating """
if number < 100:
return False
number = str(number)
for idx in range(len(number)-2):
if number[idx] != number[idx+2]:
return False
return True
def test_undulating():
""" Tests undulating method """
seq([101, 111, 121, 131, 141, 151, 161, 171, 181, 191], undulating, True,
"undulating number from test sequence is not undulating")
seq([1, 2, 4, 7, 8, 11, 13, 14, 16, 19, 345, 1234], undulating, False,
"not undulating number is undulating")
'''
TWIN
Definition: A prime number is called a twin prime if there exists another prime number differing from it by 2.
First ten: 3, 5, 7, 11, 13, 17, 19, 29, 31, 41
There are 409 twin primes below 10,000.
'''
def twin(number):
""" Returns True if number is twin """
if number < 3:
return False
if not prime(number):
return False
return prime(number - 2) or prime(number + 2)
def test_twin():
""" Tests twin method """
seq([3, 5, 7, 11, 13, 17, 19, 29, 31, 41], twin, True,
"twin number from test sequence is not twin")
seq([1, 2, 4, 8, 14, 16, 15, 21], twin, False,
"not twin number is twin")
'''
TETRAHEDRAL (PYRAMIDAL)
Definition: A tetrahedral number is the number of balls you can put in a triangular pyramid.
This is the space generalization of triangular and square numbers.
First ten: 1, 4, 10, 20, 35, 56, 84, 120, 165, 220
There are 38 tetrahedral numbers below 10,000.
Formula: https://en.wikipedia.org/wiki/Tetrahedral_number
'''
def tetrahedral(number):
""" Returns True if number is tetrahedral """
# n-th tetrahedral number is n * (n + 1) * (n + 2) / 6
n = 1
while True:
p = n * (n + 1) * (n + 2) / 6
if p == number:
return True
elif p > number:
return False
n = n + 1
def test_tetrahedral():
""" Tests tetrahedral method """
seq([1, 4, 10, 20, 35, 56, 84, 120, 165, 220], tetrahedral, True,
"tetrahedral number from test sequence is not tetrahedral")
seq([3, 5, 9, 13, 15, 17, 19], tetrahedral, False,
"not tetrahedral number is tetrahedral")
'''
PRONIC (HETEROMECIC)
Definition: The number is called pronic if it is the product of two consecutive numbers.
They are twice triangular numbers.
First ten: 2, 6, 12, 20, 30, 42, 56, 72, 90, 110
There are 99 pronic numbers below 10,000.
'''
def pronic(number):
""" Returns True if number is pronic """
root = round(math.sqrt(number))
return root * (root + 1) == number or root * (root - 1) == number
def test_pronic():
""" Tests pronic method """
seq([2, 6, 12, 20, 30, 42, 56, 72, 90, 110], pronic, True,
"pronic number from test sequence is not pronic")
seq([3, 5, 9, 13, 15, 17, 19], pronic, False,
"not pronic number is pronic")
'''
PRIMORIAL
Definition: The p-primorial is the product of all primes less than or equal to p. It is sometimes denoted by p#.
Compare to compositorials and factorials.
First ten: 2, 6, 30, 210, 2310, 30030, 510510, 9699690, 223092870, 6469693230
There are 5 primorials below 10,000.
'''
def primorial(number):
""" Returns True if number is primorial """
product = 1
n = 1
while True:
if prime(n):
product = product * n
if product == number:
return True
elif product > number:
return False
n = n + 1
def test_primorial():
""" Tests primorial method """
seq([2, 6, 30, 210, 2310, 30030, 510510, 9699690, 223092870, 6469693230], primorial, True,
"primorial number from test sequence is not primorial")
seq([3, 5, 9, 13, 15, 17, 19], primorial, False,
"not primorial number is primorial")
'''
PALINDROME
Definition: A palindrome is a number that reads the same forward or backward.
First ten: 1, 2, 3, 4, 5, 6, 7, 8, 9, 11
There are 198 palindromic numbers below 10,000.
'''
def palindrome(number):
""" Returns True if number is palindrome """
number = str(number)
for idx in range(len(number)//2):
if number[idx] != number[len(number)-idx-1]:
return False
return True
def test_palindrome():
""" Tests palindrome method """
seq([1, 2, 3, 4, 5, 6, 7, 8, 9, 11], palindrome, True,
"palindrome number from test sequence is not palindrome")
seq([15, 17, 19], palindrome, False,
"not palindrome number is palindrome")
'''
PALINDROMIC PRIME
Definition: A palindromic prime is a prime which is a palindrome.
In base 2 Mersenne primes are palindromic primes.
First ten: 2, 3, 5, 7, 11, 101, 131, 151, 181, 191
There are 20 palindromic primes below 10,000.
'''
def palindromic_prime(number):
""" Returns True if number is palindromic_prime """
return prime(number) and palindrome(number)
def test_palindromic_prime():
""" Tests palindromic_prime method """
seq([2, 3, 5, 7, 11, 101, 131, 151, 181, 191], palindromic_prime, True,
"palindromic_prime number from test sequence is not palindromic_prime")
seq([15, 17, 19], palindromic_prime, False,
"not palindromic_prime number is palindromic_prime")
'''
PENTAGONAL
Definition: Pentagonal numbers are of the form n(3n - 1)/2.
Pentagonal numbers are to pentagons what triangular numbers are to triangles and square numbers are to squares.
First ten: 1, 5, 12, 22, 35, 51, 70, 92, 117, 145
There are 81 pentagonal numbers below 10,000.
'''
def pentagonal(number):
""" Returns True if number is pentagonal """
n = 1
while True:
p = n * (3 * n - 1) / 2
if p == number:
return True
elif p > number:
return False
n = n + 1
def test_pentagonal():
""" Tests pentagonal method """
seq([1, 5, 12, 22, 35, 51, 70, 92, 117, 145], pentagonal, True,
"pentagonal number from test sequence is not pentagonal")
seq([3, 9, 13, 15, 17, 19], pentagonal, False,
"not pentagonal number is pentagonal")
'''
APOCALYPTIC POWER
Definition: The number n is called an apocalyptic power if 2n contains the consecutive digits 666 (in decimal).
First ten: 157, 192, 218, 220, 222, 224, 226, 243, 245, 247
There are 6485 apocalyptic powers below 10,000.
'''
def apocalyptic_power(number):
""" Returns True if number is apocalyptic_power """
number = str(2 ** number)
for idx in range(0, len(number)-2):
if number[idx] == "6" and number[idx+1] == "6" and number[idx+2] == "6":
return True
return False
def test_apocalyptic_power():
""" Tests apocalyptic_power method """
seq([157, 192, 218, 220, 222, 224, 226, 243, 245, 247], apocalyptic_power, True,
"apocalyptic_power number from test sequence is not apocalyptic_power")
seq([3, 9, 13, 15, 17, 19], apocalyptic_power, False,
"not apocalyptic_power number is apocalyptic_power")
'''
AUTOMORPHIC (CURIOUS)
Definition: The number n is called an automorphic number if (the decimal expansion of) n2 ends with n. These numbers are also called curious.
It is curious, how for a k-digit automorphic number n there is another automorphic number -- 10k + 1 - n. For this to work with n=1, you have to treat 1 as a zero-digit number.
First ten: 1, 5, 6, 25, 76, 376, 625, 9376, 90625, 109376
There are 8 automorphic numbers below 10,000.
'''
def automorphic(number):
""" Returns True if number is automorphic """
power = str(number ** 2)
number = str(number)
return number == power[-len(number):]
def test_automorphic():
""" Tests automorphic method """
seq([1, 5, 6, 25, 76, 376, 625, 9376, 90625, 109376], automorphic, True,
"automorphic number from test sequence is not automorphic")
seq([3, 9, 13, 15, 17, 19, 11111], automorphic, False,
"not automorphic number is automorphic")
'''
CAKE
Definition: The n-th cake number is the maximum number of pieces a (cylindrical) cake can be cut into with n (straight-plane) cuts.
Unfortunately, not everybody gets the frosting. If you cut pizza rather than cake, you get lazy caterer's numbers.
First ten: 2, 4, 8, 15, 26, 42, 64, 93, 130, 176
There are 39 cake numbers below 10,000.
Formula: https://en.wikipedia.org/wiki/Cake_number
'''
def cake(number):
""" Returns True if number is cake """
# n-th lazy caterer number is (n**3 + 5*n + 6) / 6
n = 1
while True:
p = (n**3 + 5*n + 6) / 6
if p == number:
return True
elif p > number:
return False
n = n + 1
def test_cake():
""" Tests cake method """
seq([2, 4, 8, 15, 26, 42, 64, 93, 130, 176], cake, True,
"cake number from test sequence is not cake")
seq([3, 5, 9, 13, 17, 19], cake, False,
"not cake number is cake")
'''
COMPOSITE
Definition: A positive integer greater than 1 that is not prime is called composite.
Composite numbers are opposite to prime numbers.
First ten: 4, 6, 8, 9, 10, 12, 14, 15, 16, 18
There are 8769 composite numbers below 10,000.
'''
def composite(number):
""" Returns True if number is composite """
return not prime(number)
def test_composite():
""" Tests composite method """
seq([4, 6, 8, 9, 10, 12, 14, 15, 16, 18], composite, True,
"composite number from test sequence is not composite")
seq([3, 5, 13, 17, 19], composite, False,
"not composite number is composite")
'''
FACTORIAL
Definition: The n-th factorial is the product of the first n natural numbers.
The factorial deserved an exclamation mark for its notation: k! = 1*2*3*...*k.
First ten: 1, 2, 6, 24, 120, 720, 5040, 40320, 362880, 3628800
There are 7 factorials below 10,000.
'''
def factorial(number):
""" Returns True if number is factorial """
product = 1
idx = 1
while product <= number:
product = product * idx
idx = idx + 1
if product == number:
return True
return False
def test_factorial():
""" Tests factorial method """
seq([1, 2, 6, 24, 120, 720, 5040, 40320, 362880, 3628800], factorial, True,
"factorial number from test sequence is not factorial")
seq([7, 9, 10, 11, 1111], factorial, False,
"not factorial number is factorial")
'''
NARCISSISTIC
Definition: A k-digit number n is called narcissistic if it is equal to the sum of k-th powers of its digits. They are also called Plus Perfect numbers.
First ten: 1, 2, 3, 4, 5, 6, 7, 8, 9, 153
There are 16 narcissistic numbers below 10,000.
'''
def narcissistic(number):
""" Returns True if number is narcissistic """
num = str(number)
k = len(num)
total = 0
for idx in range(0, k):
total = total + int(num[idx]) ** k
return total == number
def test_narcissistic():
""" Tests narcissistic method """
seq([1, 2, 3, 4, 5, 6, 7, 8, 9, 153], narcissistic, True,
"narcissistic number from test sequence is not narcissistic")
seq([10, 11, 111], narcissistic, False,
"not narcissistic number is narcissistic")
| |
import unittest
from mock import Mock
from cartodb_services.geocodio import GeocodioGeocoder
from cartodb_services.geocodio import GeocodioBulkGeocoder
from cartodb_services.tools.exceptions import ServiceException
from credentials import geocodio_api_key
INVALID_TOKEN = 'invalid_token'
VALID_ADDRESS_1 = 'Lexington Ave; New York; US'
VALID_ADDRESS_2 = 'E 14th St; New York; US'
VALID_ADDRESS_3 = '652 Lombard Street; San Francisco; California; United States'
VALID_SEARCH_TEXT_1='Lexington Ave'
VALID_CITY_1='New York'
VALID_STATE_PROVINCE_1='New York'
VALID_COUNTRY_1='US'
VALID_SEARCH_TEXT_2='E 14th St'
VALID_CITY_2='New York'
VALID_STATE_PROVINCE_2='New York'
VALID_COUNTRY_2='US'
VALID_SEARCH_TEXT_3='652 Lombard Street'
VALID_CITY_3='San Francisco'
VALID_STATE_PROVINCE_3='California'
VALID_COUNTRY_3='United States'
WELL_KNOWN_LONGITUDE_1 = -73.96
WELL_KNOWN_LATITUDE_1 = 40.77
WELL_KNOWN_LONGITUDE_2 = -74.00
WELL_KNOWN_LATITUDE_2 = 40.75
WELL_KNOWN_LONGITUDE_3 = -122.41
WELL_KNOWN_LATITUDE_3 = 37.80
SEARCH_ID_1 = 1
SEARCH_ID_2 = 2
PRECISION_FORMAT = '%.2f'
class GeocodioGeocoderTestCase(unittest.TestCase):
def setUp(self):
self.geocoder = GeocodioGeocoder(token=geocodio_api_key(), logger=Mock())
self.bulk_geocoder = GeocodioBulkGeocoder(token=geocodio_api_key(), logger=Mock())
### NON BULK
def test_invalid_token(self):
invalid_geocoder = GeocodioGeocoder(token=INVALID_TOKEN, logger=Mock())
with self.assertRaises(ServiceException):
invalid_geocoder.geocode(VALID_ADDRESS_1)
def test_valid_requests(self):
place = self.geocoder.geocode(VALID_ADDRESS_1)
self.assertEqual(PRECISION_FORMAT % place[0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_1)
self.assertEqual(PRECISION_FORMAT % place[1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_1)
place = self.geocoder.geocode(VALID_ADDRESS_2)
self.assertEqual(PRECISION_FORMAT % place[0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_2)
self.assertEqual(PRECISION_FORMAT % place[1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_2)
place = self.geocoder.geocode(VALID_ADDRESS_3)
self.assertEqual(PRECISION_FORMAT % place[0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_3)
self.assertEqual(PRECISION_FORMAT % place[1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_3)
def test_valid_request_components(self):
place = self.geocoder.geocode(searchtext=VALID_SEARCH_TEXT_1,
city=VALID_CITY_1,
state_province=VALID_STATE_PROVINCE_1,
country=VALID_COUNTRY_1)
self.assertEqual(PRECISION_FORMAT % place[0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_1)
self.assertEqual(PRECISION_FORMAT % place[1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_1)
place = self.geocoder.geocode(searchtext=VALID_SEARCH_TEXT_2,
city=VALID_CITY_2,
state_province=VALID_STATE_PROVINCE_2,
country=VALID_COUNTRY_2)
self.assertEqual(PRECISION_FORMAT % place[0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_2)
self.assertEqual(PRECISION_FORMAT % place[1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_2)
place = self.geocoder.geocode(searchtext=VALID_SEARCH_TEXT_3,
city=VALID_CITY_3,
state_province=VALID_STATE_PROVINCE_3,
country=VALID_COUNTRY_3)
self.assertEqual(PRECISION_FORMAT % place[0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_3)
self.assertEqual(PRECISION_FORMAT % place[1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_3)
def test_valid_request_namedplace(self):
place = self.geocoder.geocode(searchtext='New York')
assert place
def test_valid_request_namedplace2(self):
place = self.geocoder.geocode(searchtext='New York', country='us')
assert place
def test_odd_characters(self):
place = self.geocoder.geocode(searchtext='New York; "USA"')
assert place
def test_empty_request(self):
place = self.geocoder.geocode(searchtext='', country=None, city=None, state_province=None)
assert place == []
def test_empty_search_text_request(self):
place = self.geocoder.geocode(searchtext=' ', country='us', city=None, state_province="")
assert place == []
def test_unknown_place_request(self):
place = self.geocoder.geocode(searchtext='[unknown]', country='ch', state_province=None, city=None)
assert place == []
### BULK ONE
def test_invalid_token_bulk_one(self):
invalid_geocoder = GeocodioBulkGeocoder(token=INVALID_TOKEN, logger=Mock())
with self.assertRaises(ServiceException):
invalid_geocoder._batch_geocode([(SEARCH_ID_1, VALID_ADDRESS_1, None, None, None)])
def test_valid_request_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_ADDRESS_1, None, None, None)])
self.assertEqual(place[0][0], SEARCH_ID_1)
self.assertEqual(PRECISION_FORMAT % place[0][1], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_1)
self.assertEqual(PRECISION_FORMAT % place[0][2], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_1)
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_ADDRESS_2, None, None, None)])
self.assertEqual(place[0][0], SEARCH_ID_1)
self.assertEqual(PRECISION_FORMAT % place[0][1], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_2)
self.assertEqual(PRECISION_FORMAT % place[0][2], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_2)
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_ADDRESS_3, None, None, None)])
self.assertEqual(place[0][0], SEARCH_ID_1)
self.assertEqual(PRECISION_FORMAT % place[0][1], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_3)
self.assertEqual(PRECISION_FORMAT % place[0][2], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_3)
def test_valid_request_components_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_SEARCH_TEXT_1, VALID_CITY_1, VALID_STATE_PROVINCE_1, VALID_COUNTRY_1)])
self.assertEqual(place[0][0], SEARCH_ID_1)
self.assertEqual(PRECISION_FORMAT % place[0][1], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_1)
self.assertEqual(PRECISION_FORMAT % place[0][2], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_1)
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_SEARCH_TEXT_2, VALID_CITY_2, VALID_STATE_PROVINCE_2, VALID_COUNTRY_2)])
self.assertEqual(place[0][0], SEARCH_ID_1)
self.assertEqual(PRECISION_FORMAT % place[0][1], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_2)
self.assertEqual(PRECISION_FORMAT % place[0][2], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_2)
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_SEARCH_TEXT_3, VALID_CITY_3, VALID_STATE_PROVINCE_3, VALID_COUNTRY_3)])
self.assertEqual(place[0][0], SEARCH_ID_1)
self.assertEqual(PRECISION_FORMAT % place[0][1], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_3)
self.assertEqual(PRECISION_FORMAT % place[0][2], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_3)
def test_valid_request_namedplace_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, 'New York', None, None, None)])
assert place
def test_valid_request_namedplace2_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, 'New York', 'us', None, None)])
assert place
def test_odd_characters_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, 'New York; "USA"', None, None, None)])
assert place
def test_empty_request_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, '', None, None, None)])
assert place == [(SEARCH_ID_1, None, None)]
def test_empty_search_text_request_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, ' ', 'us', None, "")])
assert place == [(SEARCH_ID_1, None, None)]
def test_unknown_place_request_bulk_one(self):
place = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, '[unknown]', 'ch', None, None)])
assert place == [(SEARCH_ID_1, None, None)]
### BULK MANY
def test_invalid_token_bulk_many(self):
invalid_geocoder = GeocodioBulkGeocoder(token=INVALID_TOKEN, logger=Mock())
with self.assertRaises(ServiceException):
invalid_geocoder._batch_geocode([(SEARCH_ID_1, VALID_ADDRESS_1, None, None, None),
(SEARCH_ID_2, VALID_ADDRESS_2, None, None, None)])
def test_valid_request_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_ADDRESS_1, None, None, None),
(SEARCH_ID_2, VALID_ADDRESS_2, None, None, None)])
self.assertEqual(places[0][0], SEARCH_ID_1)
self.assertEqual(PRECISION_FORMAT % places[0][1][0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_1)
self.assertEqual(PRECISION_FORMAT % places[0][1][1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_1)
self.assertEqual(places[1][0], SEARCH_ID_2)
self.assertEqual(PRECISION_FORMAT % places[1][1][0], PRECISION_FORMAT % WELL_KNOWN_LONGITUDE_2)
self.assertEqual(PRECISION_FORMAT % places[1][1][1], PRECISION_FORMAT % WELL_KNOWN_LATITUDE_2)
def test_valid_request_components_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, VALID_SEARCH_TEXT_1, VALID_CITY_1, VALID_STATE_PROVINCE_1, VALID_COUNTRY_1),
(SEARCH_ID_2, VALID_SEARCH_TEXT_2, VALID_CITY_2, VALID_STATE_PROVINCE_2, VALID_COUNTRY_2)])
self.assertEqual(places[0][0], SEARCH_ID_1)
self.assertEqual(places[1][0], SEARCH_ID_2)
def test_valid_request_namedplace_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, 'New York', None, None, None),
(SEARCH_ID_2, 'Los Angeles', None, None, None)])
assert places
self.assertEqual(places[0][0], SEARCH_ID_1)
self.assertEqual(places[1][0], SEARCH_ID_2)
def test_valid_request_namedplace2_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, 'New York', 'us', None, None),
(SEARCH_ID_2, 'Los Angeles', None, None, None)])
assert places
self.assertEqual(places[0][0], SEARCH_ID_1)
self.assertEqual(places[1][0], SEARCH_ID_2)
def test_odd_characters_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, 'New York; "USA"', None, None, None),
(SEARCH_ID_2, 'Los Angeles', None, None, None)])
assert places
self.assertEqual(places[0][0], SEARCH_ID_1)
self.assertEqual(places[1][0], SEARCH_ID_2)
def test_empty_request_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, '', None, None, None),
(SEARCH_ID_2, '', None, None, None)])
assert places == [(SEARCH_ID_1, [], {}), (SEARCH_ID_2, [], {})]
def test_empty_search_text_request_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, ' ', 'us', None, ""),
(SEARCH_ID_2, ' ', 'us', None, "")])
assert places == [(SEARCH_ID_1, [], {}), (SEARCH_ID_2, [], {})]
def test_unknown_place_request_bulk_many(self):
places = self.bulk_geocoder._batch_geocode([(SEARCH_ID_1, '[unknown]', 'ch', None, None),
(SEARCH_ID_2, '[unknown]', 'ch', None, None)])
assert places == [(SEARCH_ID_1, [], {}), (SEARCH_ID_2, [], {})]
| |
#!/usr/bin/python
#
# Copyright (C) 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Builds a VoltAir.apk, currently in "Release" mode."""
import argparse
import os
import shutil
import subprocess
import sys
class RunProcessError(Exception):
"""Raised when a process returns an error code.
Attributes:
process_str: process run, as a string.
return_code: Non-zero status code returned by a process.
"""
def __init__(self, process_str, return_code):
"""Constructor for RunProcessError."""
Exception.__init__(self)
self.process_str = process_str
self.return_code = return_code
def __str__(self):
fmtstr = "Failed subprocess (returned: %d, CWD: %s): %s"
return fmtstr % (self.return_code, os.getcwd(), self.process_str)
def RunSubprocess(argv, rundir=None):
"""Runs a process, printing important info, such as the command.
Args:
argv: list of arguments to process. Zeroeth arg is process to run.
rundir: (optional) Directory in which to run the process.
Raises:
RunProcessError: If the subprocess returns fails
"""
try:
if rundir is not None:
Pushd(rundir)
joined_argv = " ".join(argv)
print >> sys.stderr, "Running subprocess: %s" % joined_argv
process = subprocess.Popen(argv)
process.wait()
print >> sys.stderr, "Subprocess succeeded."
finally:
if rundir is not None:
Popd()
#if process.returncode:
# raise RunProcessError(joined_argv, process.returncode)
def SetEnvironment(sdk_root, ndk_root, ndk_platform):
"""Set up the environment variables we need to run the various commands.
Args:
sdk_root: Android SDK location
ndk_root: Android NDK location
ndk_platform: Android platform (e.g. android-18)
"""
os.environ["ANDROID_NDK_PLATFORM"] = ndk_platform
os.environ["ANDROID_HOME"] = sdk_root
os.environ["ANDROID_SDK_ROOT"] = sdk_root
os.environ["ANDROID_NDK_ROOT"] = ndk_root
_dirstack = []
def Pushd(directory):
"""Pushes directory onto a stack.
Args:
directory: directory to push
"""
_dirstack.append(directory)
os.chdir(directory)
def Popd():
"""Pops and returns the top of the directory stack.
"""
os.chdir(_dirstack.pop())
def RunLiquidFunNdkBuild(cwd, ndk_root):
"""Builds the liquidfun module.
Args:
cwd: working directory in which to run build.
ndk_root: Android NDK location
"""
RunSubprocess([ndk_root + "/ndk-build", "V=1"], cwd)
def RunQmake(qt_root, voltair_root):
"""Run the 'qmake' command.
Args:
qt_root: Qt framework location
voltair_root: VoltAir project location.
"""
qmake = [
qt_root + "/android_armv7/bin/qmake",
voltair_root + "/VoltAir/VoltAir.pro",
"-r",
"-spec",
"android-g++"
]
RunSubprocess(qmake)
def RunMake(num_processes):
"""Run the 'make' command.
Args:
num_processes: How many processes to use to run 'make' command
"""
RunSubprocess(["make", "-j", num_processes])
def RunMakeInstall():
"""Run the 'make install' command."""
RunSubprocess(["make",
"install", "INSTALL_ROOT=%s/android-build" % os.getcwd()])
def RunAndroidDeployQt(qt_root, voltair_root, build_dir, ant, ndk_platform,
jdk):
"""Run the 'androiddeployqt' command.
Args:
qt_root: Qt framework location
voltair_root: VoltAir project location.
build_dir: Directory in which app was built. Need to run command here.
ant: And location
ndk_platform: Android platform (e.g. android-18)
jdk: Java Development Kit location
"""
androiddeployqt = [
qt_root + "/android_armv7/bin/androiddeployqt",
"--input",
os.getcwd() + "/android-libVoltAir.so-deployment-settings.json",
"--output",
os.getcwd() + "/android-build",
"--deployment",
"bundled",
"--ant",
ant,
"--debug",
"--android-platform",
ndk_platform,
"--jdk",
jdk
]
RunSubprocess(androiddeployqt, build_dir)
def RenameApk(dst_apk):
"""Run the 'mv' command which moves the APK to a more reasonable name.
Args:
dst_apk: name of destination apk
"""
src_apk = os.getcwd() + "/android-build/bin/QtApp-debug-unaligned.apk"
print >> sys.stderr, "Renaming %s to %s" % (src_apk, dst_apk)
os.rename(src_apk, dst_apk)
def RemoveBuildDir(build_dir):
"""Remove the entire build directory.
Args:
build_dir: Directory in which app was built. Need to run command here.
"""
shutil.rmtree(build_dir)
def main():
parser = argparse.ArgumentParser(description="Build VoltAir.apk")
parser.add_argument("--build-dir", required=True,
help="directory in which to build")
parser.add_argument("--voltair-root", default=".",
help="Location of VoltAir source code")
parser.add_argument("--liquidfun-root", required=True,
help="directory in which liquidfun is located")
parser.add_argument("--qt-root", required=True, help="Dir for Qt (>= Qt 5.3)")
parser.add_argument("--jdk", required=True,
help="Java Development Kit location")
parser.add_argument("--sdk-root", required=True, help="Android SDK location")
parser.add_argument("--ndk-root", required=True, help="Android NDK location")
parser.add_argument("--ndk-platform", default="android-18",
help="Android platform (must be >= android-18)")
parser.add_argument("--ant", required=True, help="ant binary location")
parser.add_argument("--num-processes", default="1",
help="Number of processes to use for 'make'")
parser.add_argument("--output-apk", "-o", required=True,
help="Destination of produced APK")
class ParsedArgs(object):
"""Class used to collect arguments from argparse."""
pass
# These calls will leave the parsed_args object with one attr per option. It
# also will exit if there were issues parsing the arguments.
parsed_args = ParsedArgs()
parser.parse_args(namespace=parsed_args)
# Make sure the root paths are absolute.
orig_cwd = os.getcwd()
voltair_root = os.path.join(orig_cwd, parsed_args.voltair_root)
liquidfun_root = os.path.join(orig_cwd, parsed_args.liquidfun_root)
qt_root = os.path.join(orig_cwd, parsed_args.qt_root)
# Qt's deployment script requires you build into subdir of voltair_root.
build_dir = os.path.join(voltair_root, parsed_args.build_dir)
#if os.path.exists(build_dir):
# err = "Error: Target directory %s exists. Exiting" % build_dir
# print >> sys.stderr, err
# return -1
try:
# Move to liquidfun directory and build it.
print "liquidfun_root" + liquidfun_root
RunLiquidFunNdkBuild(liquidfun_root + "/Box2D", parsed_args.ndk_root)
# Create and move to voltair build target dir and start building.
if not os.path.exists(build_dir):
os.mkdir(build_dir)
Pushd(build_dir)
# What follows are the required steps for building the an APK in Qt. First
# we need to set up the environment as certain of the steps read it.
SetEnvironment(parsed_args.sdk_root, parsed_args.ndk_root,
parsed_args.ndk_platform)
# 'qmake' is Qt's project builder. It will construct a Makefile that is then
# run.
RunQmake(qt_root, voltair_root)
# Run the makefile, which results in the building of the libVoltAir.so file.
RunMake(parsed_args.num_processes)
# This places the libVoltAir.so file in the appropriate directory.
RunMakeInstall()
# Runs 'androiddeployqt' which constructs the APK from the libVoltAir.so and
# the other files specified in the Qt project (e.g. java files,
# AndroidManifest.xml, and so on).
RunAndroidDeployQt(qt_root, voltair_root, build_dir, parsed_args.ant,
parsed_args.ndk_platform, parsed_args.jdk)
# The 'androiddeployqt' executable leaves the APK in
# QtApp-release.apk. Move it to a more descriptive location (i.e.
# VoltAir.apk).
RenameApk(parsed_args.output_apk)
except RunProcessError, error:
print >> sys.stderr, str(error)
return error.return_code
finally:
#Popd()
# Remove the build dir in order to leave qt tree as we found it.
#RemoveBuildDir(build_dir)
print ""
return 0
if __name__ == "__main__":
sys.exit(main())
| |
"""feedfinder: Find the Web feed for a Web page
http://www.aaronsw.com/2002/feedfinder/
Usage:
feed(uri) - returns feed found for a URI
feeds(uri) - returns all feeds found for a URI
>>> import feedfinder
>>> feedfinder.feed('scripting.com')
'http://scripting.com/rss.xml'
>>>
>>> feedfinder.feeds('scripting.com')
['http://delong.typepad.com/sdj/atom.xml',
'http://delong.typepad.com/sdj/index.rdf',
'http://delong.typepad.com/sdj/rss.xml']
>>>
Can also use from the command line. Feeds are returned one per line:
$ python feedfinder.py diveintomark.org
http://diveintomark.org/xml/atom.xml
How it works:
0. At every step, feeds are minimally verified to make sure they are really feeds.
1. If the URI points to a feed, it is simply returned; otherwise
the page is downloaded and the real fun begins.
2. Feeds pointed to by LINK tags in the header of the page (autodiscovery)
3. <A> links to feeds on the same server ending in ".rss", ".rdf", ".xml", or
".atom"
4. <A> links to feeds on the same server containing "rss", "rdf", "xml", or "atom"
5. <A> links to feeds on external servers ending in ".rss", ".rdf", ".xml", or
".atom"
6. <A> links to feeds on external servers containing "rss", "rdf", "xml", or "atom"
7. Try some guesses about common places for feeds (index.xml, atom.xml, etc.).
8. As a last ditch effort, we search Syndic8 for feeds matching the URI
"""
__version__ = "1.371"
__date__ = "2006-04-24"
__maintainer__ = "Aaron Swartz (me@aaronsw.com)"
__author__ = "Mark Pilgrim (http://diveintomark.org)"
__copyright__ = "Copyright 2002-4, Mark Pilgrim; 2006 Aaron Swartz"
__license__ = "Python"
__credits__ = """Abe Fettig for a patch to sort Syndic8 feeds by popularity
Also Jason Diamond, Brian Lalor for bug reporting and patches"""
_debug = 0
import sgmllib, urllib.request, urllib.parse, urllib.error, urllib.parse, re, sys, urllib.robotparser
import requests
from io import StringIO
from lxml import etree
# XML-RPC support allows feedfinder to query Syndic8 for possible matches.
# Python 2.3 now comes with this module by default, otherwise you can download it
try:
import xmlrpc.client # http://www.pythonware.com/products/xmlrpc/
except ImportError:
xmlrpclib = None
if not dict:
def dict(aList):
rc = {}
for k, v in aList:
rc[k] = v
return rc
def _debuglog(message):
if _debug: print(message)
class URLGatekeeper:
"""a class to track robots.txt rules across multiple servers"""
def __init__(self):
self.rpcache = {} # a dictionary of RobotFileParser objects, by domain
self.urlopener = urllib.request.build_opener()
self.urlopener.version = "NewsBlur Feed Finder (Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_1) AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 Safari/534.48.3)"
_debuglog(self.urlopener.version)
self.urlopener.addheaders = [('User-Agent', self.urlopener.version)]
# self.urlopener.addheaders = [('User-Agent', self.urlopener.version), ('Accept', '*')]
#urllib.robotparser.URLopener.version = self.urlopener.version
#urllib.robotparser.URLopener.addheaders = self.urlopener.addheaders
def _getrp(self, url):
protocol, domain = urllib.parse.urlparse(url)[:2]
if domain in self.rpcache:
return self.rpcache[domain]
baseurl = '%s://%s' % (protocol, domain)
robotsurl = urllib.parse.urljoin(baseurl, 'robots.txt')
_debuglog('fetching %s' % robotsurl)
rp = urllib.robotparser.RobotFileParser(robotsurl)
try:
rp.read()
except:
pass
self.rpcache[domain] = rp
return rp
def can_fetch(self, url):
rp = self._getrp(url)
allow = rp.can_fetch(self.urlopener.version, url)
_debuglog("gatekeeper of %s says %s" % (url, allow))
return allow
def get(self, url, check=False):
if check and not self.can_fetch(url): return ''
try:
return requests.get(url, headers=dict(self.urlopener.addheaders)).text
except:
return ''
_gatekeeper = URLGatekeeper()
class BaseParser(sgmllib.SGMLParser):
def __init__(self, baseuri):
sgmllib.SGMLParser.__init__(self)
self.links = []
self.baseuri = baseuri
def normalize_attrs(self, attrs):
def cleanattr(v):
v = sgmllib.charref.sub(lambda m: chr(int(m.groups()[0])), v)
if not v: return
v = v.strip()
v = v.replace('<', '<').replace('>', '>').replace(''', "'").replace('"', '"').replace('&', '&')
return v
attrs = [(k.lower(), cleanattr(v)) for k, v in attrs if cleanattr(v)]
attrs = [(k, k in ('rel','type') and v.lower() or v) for k, v in attrs if cleanattr(v)]
return attrs
def do_base(self, attrs):
attrsD = dict(self.normalize_attrs(attrs))
if 'href' not in attrsD: return
self.baseuri = attrsD['href']
def error(self, *a, **kw): pass # we're not picky
class LinkParser(BaseParser):
FEED_TYPES = ('application/rss+xml',
'text/xml',
'application/atom+xml',
'application/x.atom+xml',
'application/x-atom+xml')
def do_link(self, attrs):
attrsD = dict(self.normalize_attrs(attrs))
if 'rel' not in attrsD: return
rels = attrsD['rel'].split()
if 'alternate' not in rels: return
if attrsD.get('type') not in self.FEED_TYPES: return
if 'href' not in attrsD: return
self.links.append(urllib.parse.urljoin(self.baseuri, attrsD['href']))
class ALinkParser(BaseParser):
def start_a(self, attrs):
attrsD = dict(self.normalize_attrs(attrs))
if 'href' not in attrsD: return
self.links.append(urllib.parse.urljoin(self.baseuri, attrsD['href']))
def makeFullURI(uri):
if not uri: return
uri = uri.strip()
if uri.startswith('feed://'):
uri = 'http://' + uri.split('feed://', 1).pop()
for x in ['http', 'https']:
if uri.startswith('%s://' % x):
return uri
return 'http://%s' % uri
def getLinks(data, baseuri):
p = LinkParser(baseuri)
p.feed(data)
return p.links
def getLinksLXML(data, baseuri):
parser = etree.HTMLParser(recover=True)
tree = etree.parse(StringIO(data), parser)
links = []
for link in tree.findall('.//link'):
if link.attrib.get('type') in LinkParser.FEED_TYPES:
href = link.attrib['href']
if href: links.append(href)
return links
def getALinks(data, baseuri):
p = ALinkParser(baseuri)
p.feed(data)
return p.links
def getLocalLinks(links, baseuri):
found_links = []
if not baseuri: return found_links
baseuri = baseuri.lower()
for l in links:
try:
if l.lower().startswith(baseuri):
found_links.append(l)
except (AttributeError, UnicodeDecodeError):
pass
return found_links
def isFeedLink(link):
return link[-4:].lower() in ('.rss', '.rdf', '.xml', '.atom')
def isXMLRelatedLink(link):
link = link.lower()
return link.count('rss') + link.count('rdf') + link.count('xml') + link.count('atom')
r_brokenRedirect = re.compile('<newLocation[^>]*>(.*?)</newLocation>', re.S)
def tryBrokenRedirect(data):
if '<newLocation' in data:
newuris = r_brokenRedirect.findall(data)
if newuris and newuris[0]: return newuris[0].strip()
def couldBeFeedData(data):
data = data.lower()
if data.count('<html'): return 0
return data.count('<rss') + data.count('<rdf') + data.count('<feed')
def isFeed(uri):
_debuglog('seeing if %s is a feed' % uri)
protocol = urllib.parse.urlparse(uri)
if protocol[0] not in ('http', 'https'): return 0
try:
data = _gatekeeper.get(uri, check=False)
except (KeyError, UnicodeDecodeError):
return False
count = couldBeFeedData(data)
return count
def cmp_(a, b):
return (a > b) - (a < b)
def sortFeeds(feed1Info, feed2Info):
return cmp_(feed2Info['headlines_rank'], feed1Info['headlines_rank'])
def getFeedsFromSyndic8(uri):
feeds = []
try:
server = xmlrpc.client.Server('http://www.syndic8.com/xmlrpc.php')
feedids = server.syndic8.FindFeeds(uri)
infolist = server.syndic8.GetFeedInfo(feedids, ['headlines_rank','status','dataurl'])
infolist.sort(sortFeeds)
feeds = [f['dataurl'] for f in infolist if f['status']=='Syndicated']
_debuglog('found %s feeds through Syndic8' % len(feeds))
except:
pass
return feeds
def feeds(uri, all=False, querySyndic8=False, _recurs=None):
if _recurs is None: _recurs = [uri]
fulluri = makeFullURI(uri)
try:
data = _gatekeeper.get(fulluri, check=False)
except:
return []
# is this already a feed?
if couldBeFeedData(data):
return [fulluri]
newuri = tryBrokenRedirect(data)
if newuri and newuri not in _recurs:
_recurs.append(newuri)
return feeds(newuri, all=all, querySyndic8=querySyndic8, _recurs=_recurs)
# nope, it's a page, try LINK tags first
_debuglog('looking for LINK tags')
try:
outfeeds = getLinks(data, fulluri)
except:
outfeeds = []
if not outfeeds:
_debuglog('using lxml to look for LINK tags')
try:
outfeeds = getLinksLXML(data, fulluri)
except:
outfeeds = []
_debuglog('found %s feeds through LINK tags' % len(outfeeds))
outfeeds = list(filter(isFeed, outfeeds))
if all or not outfeeds:
# no LINK tags, look for regular <A> links that point to feeds
_debuglog('no LINK tags, looking at A tags')
try:
links = getALinks(data, fulluri)
except:
links = []
_debuglog('no LINK tags, looking at local links')
locallinks = getLocalLinks(links, fulluri)
# look for obvious feed links on the same server
outfeeds.extend(list(filter(isFeed, list(filter(isFeedLink, locallinks)))))
if all or not outfeeds:
# look harder for feed links on the same server
outfeeds.extend(list(filter(isFeed, list(filter(isXMLRelatedLink, locallinks)))))
if all or not outfeeds:
# look for obvious feed links on another server
outfeeds.extend(list(filter(isFeed, list(filter(isFeedLink, links)))))
if all or not outfeeds:
# look harder for feed links on another server
outfeeds.extend(list(filter(isFeed, list(filter(isXMLRelatedLink, links)))))
if all or not outfeeds:
_debuglog('no A tags, guessing')
suffixes = [ # filenames used by popular software:
'feed/', # obvious
'atom.xml', # blogger, TypePad
'index.atom', # MT, apparently
'index.rdf', # MT
'rss.xml', # Dave Winer/Manila
'index.xml', # MT
'index.rss' # Slash
]
outfeeds.extend(list(filter(isFeed, [urllib.parse.urljoin(fulluri, x) for x in suffixes])))
if (all or not outfeeds) and querySyndic8:
# still no luck, search Syndic8 for feeds (requires xmlrpclib)
_debuglog('still no luck, searching Syndic8')
outfeeds.extend(getFeedsFromSyndic8(uri))
if hasattr(__builtins__, 'set') or 'set' in __builtins__:
outfeeds = list(set(outfeeds))
return outfeeds
getFeeds = feeds # backwards-compatibility
def feed(uri):
#todo: give preference to certain feed formats
feedlist = feeds(uri)
if feedlist:
feeds_no_comments = [f for f in feedlist if 'comments' not in f.lower()]
if feeds_no_comments:
return feeds_no_comments[0]
return feedlist[0]
else:
return None
##### test harness ######
def test():
uri = 'http://diveintomark.org/tests/client/autodiscovery/html4-001.html'
failed = []
count = 0
while 1:
data = _gatekeeper.get(uri)
if data.find('Atom autodiscovery test') == -1: break
sys.stdout.write('.')
sys.stdout.flush()
count += 1
links = getLinks(data, uri)
if not links:
print(('\n*** FAILED ***', uri, 'could not find link'))
failed.append(uri)
elif len(links) > 1:
print(('\n*** FAILED ***', uri, 'found too many links'))
failed.append(uri)
else:
atomdata = urllib.request.urlopen(links[0]).read()
if atomdata.find('<link rel="alternate"') == -1:
print(('\n*** FAILED ***', uri, 'retrieved something that is not a feed'))
failed.append(uri)
else:
backlink = atomdata.split('href="').pop().split('"')[0]
if backlink != uri:
print(('\n*** FAILED ***', uri, 'retrieved wrong feed'))
failed.append(uri)
if data.find('<link rel="next" href="') == -1: break
uri = urllib.parse.urljoin(uri, data.split('<link rel="next" href="').pop().split('"')[0])
print()
print((count, 'tests executed,', len(failed), 'failed'))
if __name__ == '__main__':
args = sys.argv[1:]
if args and args[0] == '--debug':
_debug = 1
args.pop(0)
if args:
uri = args[0]
else:
uri = 'http://diveintomark.org/'
if uri == 'test':
test()
else:
print(("\n".join(getFeeds(uri))))
| |
#
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Dan Lovell and Jay Baxter
# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
#
import crosscat.utils.file_utils as fu
import crosscat.utils.general_utils as gu
import crosscat.utils.xnet_utils as xu
import crosscat.utils.hadoop_utils as hu
from crosscat.settings import Hadoop as hs
class HadoopEngine(object):
"""A class to dispatch jobs to a Hadoop cluster
Requires that a binary, to be run by Hadoop streaming, already exists on the
cluster.
Requires specfication of write-able file locations where intermediate Hadoop
output will be stored before being parsed and returned as X_L and X_D
"""
def __init__(self, seed=0,
which_engine_binary=hs.default_engine_binary,
hdfs_dir=hs.default_hdfs_dir,
jobtracker_uri=hs.default_jobtracker_uri,
hdfs_uri=hs.default_hdfs_uri,
which_hadoop_jar=hs.default_hadoop_jar,
which_hadoop_binary=hs.default_hadoop_binary,
output_path=hs.default_output_path,
input_filename=hs.default_input_filename,
table_data_filename=hs.default_table_data_filename,
command_dict_filename=hs.default_command_dict_filename,
one_map_task_per_line=True,
):
xu.assert_vpn_is_connected()
#
self.which_hadoop_binary = which_hadoop_binary
#
self.seed_generator = gu.int_generator(seed)
self.which_engine_binary = which_engine_binary
self.hdfs_dir = hdfs_dir
self.jobtracker_uri = jobtracker_uri
self.hdfs_uri = hdfs_uri
self.which_hadoop_jar = which_hadoop_jar
self.output_path = output_path
self.input_filename = input_filename
self.table_data_filename = table_data_filename
self.one_map_task_per_line = one_map_task_per_line
self.command_dict_filename = command_dict_filename
return
def send_hadoop_command(self, n_tasks=1):
hu.send_hadoop_command(
self.hdfs_uri, self.hdfs_dir, self.jobtracker_uri,
self.which_engine_binary, self.which_hadoop_binary, self.which_hadoop_jar,
self.input_filename, self.table_data_filename,
self.command_dict_filename, self.output_path,
n_tasks, self.one_map_task_per_line)
return
def get_hadoop_results(self):
was_successful = hu.get_hadoop_results(self.hdfs_uri, self.output_path, self.hdfs_dir)
print 'was_successful: %s' % was_successful
return was_successful
def initialize(self, M_c, M_r, T, initialization='from_the_prior',
n_chains=1):
"""Sample a latent state from prior
:param M_c: The column metadata
:type M_c: dict
:param M_r: The row metadata
:type M_r: dict
:param T: The data table in mapped representation (all floats, generated
by data_utils.read_data_objects)
:type T: list of lists
:returns: X_L, X_D -- the latent state
"""
output_path = self.output_path
input_filename = self.input_filename
table_data_filename = self.table_data_filename
intialize_args_dict_filename = self.command_dict_filename
xu.assert_vpn_is_connected()
#
table_data = dict(M_c=M_c, M_r=M_r, T=T)
initialize_args_dict = dict(command='initialize',
initialization=initialization)
xu.write_initialization_files(input_filename,
table_data, table_data_filename,
initialize_args_dict,
intialize_args_dict_filename,
n_chains)
os.system('cp %s initialize_input' % input_filename)
self.send_hadoop_command(n_tasks=n_chains)
was_successful = self.get_hadoop_results()
hadoop_output = None
if was_successful:
hu.copy_hadoop_output(output_path, 'initialize_output')
X_L_list, X_D_list = hu.read_hadoop_output(output_path)
hadoop_output = X_L_list, X_D_list
return hadoop_output
def analyze(self, M_c, T, X_L, X_D, kernel_list=(), n_steps=1, c=(), r=(),
max_iterations=-1, max_time=-1, **kwargs):
"""Evolve the latent state by running MCMC transition kernels
:param M_c: The column metadata
:type M_c: dict
:param T: The data table in mapped representation (all floats, generated
by data_utils.read_data_objects)
:type T: list of lists
:param X_L: the latent variables associated with the latent state
:type X_L: dict
:param X_D: the particular cluster assignments of each row in each view
:type X_D: list of lists
:param kernel_list: names of the MCMC transition kernels to run
:type kernel_list: list of strings
:param n_steps: the number of times to run each MCMC transition kernel
:type n_steps: int
:param c: the (global) column indices to run MCMC transition kernels on
:type c: list of ints
:param r: the (global) row indices to run MCMC transition kernels on
:type r: list of ints
:param max_iterations: the maximum number of times ot run each MCMC
transition kernel. Applicable only if
max_time != -1.
:type max_iterations: int
:param max_time: the maximum amount of time (seconds) to run MCMC
transition kernels for before stopping to return
progress
:type max_time: float
:param kwargs: optional arguments to pass to hadoop_line_processor.jar.
Currently, presence of a 'chunk_size' kwarg causes
different behavior.
:returns: X_L, X_D -- the evolved latent state
"""
output_path = self.output_path
input_filename = self.input_filename
table_data_filename = self.table_data_filename
analyze_args_dict_filename = self.command_dict_filename
xu.assert_vpn_is_connected()
#
table_data = dict(M_c=M_c, T=T)
analyze_args_dict = dict(command='analyze', kernel_list=kernel_list,
n_steps=n_steps, c=c, r=r, max_time=max_time)
# chunk_analyze is a special case of analyze
if 'chunk_size' in kwargs:
chunk_size = kwargs['chunk_size']
chunk_filename_prefix = kwargs['chunk_filename_prefix']
chunk_dest_dir = kwargs['chunk_dest_dir']
analyze_args_dict['command'] = 'chunk_analyze'
analyze_args_dict['chunk_size'] = chunk_size
analyze_args_dict['chunk_filename_prefix'] = chunk_filename_prefix
# WARNING: chunk_dest_dir MUST be writeable by hadoop user mapred
analyze_args_dict['chunk_dest_dir'] = chunk_dest_dir
if not su.get_is_multistate(X_L, X_D):
X_L = [X_L]
X_D = [X_D]
#
SEEDS = kwargs.get('SEEDS', None)
xu.write_analyze_files(input_filename, X_L, X_D,
table_data, table_data_filename,
analyze_args_dict, analyze_args_dict_filename,
SEEDS)
os.system('cp %s analyze_input' % input_filename)
n_tasks = len(X_L)
self.send_hadoop_command(n_tasks)
was_successful = self.get_hadoop_results()
hadoop_output = None
if was_successful:
hu.copy_hadoop_output(output_path, 'analyze_output')
X_L_list, X_D_list = hu.read_hadoop_output(output_path)
hadoop_output = X_L_list, X_D_list
return hadoop_output
def simple_predictive_sample(self, M_c, X_L, X_D, Y, Q, n=1):
pass
def impute(self, M_c, X_L, X_D, Y, Q, n):
pass
def impute_and_confidence(self, M_c, X_L, X_D, Y, Q, n):
pass
if __name__ == '__main__':
import argparse
#
import crosscat.utils.data_utils as du
#
parser = argparse.ArgumentParser()
parser.add_argument('command', type=str)
parser.add_argument('--base_uri', type=str, default=None)
parser.add_argument('--hdfs_uri', type=str, default=hs.default_hdfs_uri)
parser.add_argument('--jobtracker_uri', type=str,
default=hs.default_jobtracker_uri)
parser.add_argument('--hdfs_dir', type=str, default=hs.default_hdfs_dir)
parser.add_argument('-DEBUG', action='store_true')
parser.add_argument('--which_engine_binary', type=str, default=hs.default_engine_binary)
parser.add_argument('--which_hadoop_binary', type=str, default=hs.default_hadoop_binary)
parser.add_argument('--which_hadoop_jar', type=str, default=hs.default_hadoop_jar)
parser.add_argument('--n_chains', type=int, default=4)
parser.add_argument('--n_steps', type=int, default=1)
parser.add_argument('--chunk_size', type=int, default=1)
parser.add_argument('--chunk_filename_prefix', type=str, default='chunk')
parser.add_argument('--chunk_dest_dir', type=str, default='/user/bigdata/SSCI/chunk_dir')
parser.add_argument('--max_time', type=float, default=-1)
parser.add_argument('--table_filename', type=str, default='../www/data/dha_small.csv')
parser.add_argument('--resume_filename', type=str, default=None)
parser.add_argument('--pkl_filename', type=str, default=None)
parser.add_argument('--cctypes_filename', type=str, default=None)
#
args = parser.parse_args()
base_uri = args.base_uri
hdfs_uri = args.hdfs_uri
jobtracker_uri = args.jobtracker_uri
hdfs_dir = args.hdfs_dir
DEBUG = args.DEBUG
which_engine_binary = args.which_engine_binary
which_hadoop_binary = args.which_hadoop_binary
which_hadoop_jar= args.which_hadoop_jar
n_chains = args.n_chains
n_steps = args.n_steps
chunk_size = args.chunk_size
chunk_filename_prefix = args.chunk_filename_prefix
chunk_dest_dir = args.chunk_dest_dir
max_time = args.max_time
table_filename = args.table_filename
resume_filename = args.resume_filename
pkl_filename = args.pkl_filename
#
command = args.command
# assert command in set(gu.get_method_names(HadoopEngine))
#
cctypes_filename = args.cctypes_filename
cctypes = None
if cctypes_filename is not None:
cctypes = fu.unpickle(cctypes_filename)
hdfs_uri, jobtracker_uri = hu.get_uris(base_uri, hdfs_uri, jobtracker_uri)
T, M_r, M_c = du.read_model_data_from_csv(table_filename, gen_seed=0,
cctypes=cctypes)
he = HadoopEngine(which_engine_binary=which_engine_binary,
which_hadoop_binary=which_hadoop_binary,
which_hadoop_jar=which_hadoop_jar,
hdfs_dir=hdfs_dir, hdfs_uri=hdfs_uri,
jobtracker_uri=jobtracker_uri)
X_L_list, X_D_list = None, None
if command == 'initialize':
hadoop_output = he.initialize(M_c, M_r, T,
initialization='from_the_prior',
n_chains=n_chains)
if hadoop_output is not None:
X_L_list, X_D_list = hadoop_output
elif command == 'analyze':
assert resume_filename is not None
if fu.is_pkl(resume_filename):
resume_dict = fu.unpickle(resume_filename)
else:
resume_dict = hu.read_hadoop_output_file(resume_filename)
X_L_list = resume_dict['X_L_list']
X_D_list = resume_dict['X_D_list']
hadoop_output = he.analyze(M_c, T, X_L_list, X_D_list,
n_steps=n_steps, max_time=max_time)
if hadoop_output is not None:
X_L_list, X_D_list = hadoop_output
elif command == 'chunk_analyze':
assert resume_filename is not None
if fu.is_pkl(resume_filename):
resume_dict = fu.unpickle(resume_filename)
X_L_list = resume_dict['X_L_list']
X_D_list = resume_dict['X_D_list']
else:
X_L_list, X_D_list = hu.read_hadoop_output(resume_filename)
hadoop_output = he.analyze(M_c, T, X_L_list, X_D_list,
n_steps=n_steps, max_time=max_time,
chunk_size=chunk_size,
chunk_filename_prefix=chunk_filename_prefix,
chunk_dest_dir=chunk_dest_dir)
if hadoop_output is not None:
X_L_list, X_D_list = hadoop_output
else:
print 'Unknown command: %s' % command
import sys
sys.exit()
if pkl_filename is not None:
to_pkl_dict = dict(
T=T,
M_c=M_c,
M_r=M_r,
X_L_list=X_L_list,
X_D_list=X_D_list,
)
fu.pickle(to_pkl_dict, filename=pkl_filename)
| |
# coding: utf-8
from sqlalchemy.testing import eq_, assert_raises_message, expect_warnings
from sqlalchemy import sql, exc, schema, types as sqltypes
from sqlalchemy import Table, MetaData, Column, select, String, \
Index, Integer, ForeignKey, PrimaryKeyConstraint, extract, \
VARCHAR, NVARCHAR, Unicode, UnicodeText, \
NUMERIC, DECIMAL, Numeric, Float, FLOAT, TIMESTAMP, DATE, \
DATETIME, TIME, \
DateTime, Time, Date, Interval, NCHAR, CHAR, CLOB, TEXT, Boolean, \
BOOLEAN, LargeBinary, BLOB, SmallInteger, INT, func, cast
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy.sql import table, column
import re
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = mysql.dialect()
def test_reserved_words(self):
table = Table("mysql_table", MetaData(),
Column("col1", Integer),
Column("master_ssl_verify_server_cert", Integer))
x = select([table.c.col1, table.c.master_ssl_verify_server_cert])
self.assert_compile(
x,
"SELECT mysql_table.col1, "
"mysql_table.`master_ssl_verify_server_cert` FROM mysql_table")
def test_create_index_simple(self):
m = MetaData()
tbl = Table('testtbl', m, Column('data', String(255)))
idx = Index('test_idx1', tbl.c.data)
self.assert_compile(schema.CreateIndex(idx),
'CREATE INDEX test_idx1 ON testtbl (data)')
def test_create_index_with_length(self):
m = MetaData()
tbl = Table('testtbl', m, Column('data', String(255)))
idx1 = Index('test_idx1', tbl.c.data, mysql_length=10)
idx2 = Index('test_idx2', tbl.c.data, mysql_length=5)
self.assert_compile(schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl (data(10))')
self.assert_compile(schema.CreateIndex(idx2),
'CREATE INDEX test_idx2 ON testtbl (data(5))')
def test_create_index_with_length_quoted(self):
m = MetaData()
tbl = Table('testtbl', m, Column('some quoted data',
String(255), key='s'))
idx1 = Index('test_idx1', tbl.c.s, mysql_length=10)
self.assert_compile(
schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl (`some quoted data`(10))')
def test_create_composite_index_with_length_quoted(self):
m = MetaData()
tbl = Table('testtbl', m,
Column('some Quoted a', String(255), key='a'),
Column('some Quoted b', String(255), key='b'))
idx1 = Index('test_idx1', tbl.c.a, tbl.c.b,
mysql_length={'some Quoted a': 10, 'some Quoted b': 20})
self.assert_compile(schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl '
'(`some Quoted a`(10), `some Quoted b`(20))')
def test_create_composite_index_with_length_quoted_3085_workaround(self):
m = MetaData()
tbl = Table('testtbl', m,
Column('some quoted a', String(255), key='a'),
Column('some quoted b', String(255), key='b'))
idx1 = Index(
'test_idx1', tbl.c.a, tbl.c.b,
mysql_length={'`some quoted a`': 10, '`some quoted b`': 20}
)
self.assert_compile(schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl '
'(`some quoted a`(10), `some quoted b`(20))')
def test_create_composite_index_with_length(self):
m = MetaData()
tbl = Table('testtbl', m,
Column('a', String(255)),
Column('b', String(255)))
idx1 = Index('test_idx1', tbl.c.a, tbl.c.b,
mysql_length={'a': 10, 'b': 20})
idx2 = Index('test_idx2', tbl.c.a, tbl.c.b,
mysql_length={'a': 15})
idx3 = Index('test_idx3', tbl.c.a, tbl.c.b,
mysql_length=30)
self.assert_compile(
schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl (a(10), b(20))'
)
self.assert_compile(
schema.CreateIndex(idx2),
'CREATE INDEX test_idx2 ON testtbl (a(15), b)'
)
self.assert_compile(
schema.CreateIndex(idx3),
'CREATE INDEX test_idx3 ON testtbl (a(30), b(30))'
)
def test_create_index_with_using(self):
m = MetaData()
tbl = Table('testtbl', m, Column('data', String(255)))
idx1 = Index('test_idx1', tbl.c.data, mysql_using='btree')
idx2 = Index('test_idx2', tbl.c.data, mysql_using='hash')
self.assert_compile(
schema.CreateIndex(idx1),
'CREATE INDEX test_idx1 ON testtbl (data) USING btree')
self.assert_compile(
schema.CreateIndex(idx2),
'CREATE INDEX test_idx2 ON testtbl (data) USING hash')
def test_create_pk_plain(self):
m = MetaData()
tbl = Table('testtbl', m, Column('data', String(255)),
PrimaryKeyConstraint('data'))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE testtbl (data VARCHAR(255) NOT NULL, "
"PRIMARY KEY (data))")
def test_create_pk_with_using(self):
m = MetaData()
tbl = Table('testtbl', m, Column('data', String(255)),
PrimaryKeyConstraint('data', mysql_using='btree'))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE testtbl (data VARCHAR(255) NOT NULL, "
"PRIMARY KEY (data) USING btree)")
def test_create_index_expr(self):
m = MetaData()
t1 = Table('foo', m,
Column('x', Integer)
)
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x > 5)),
"CREATE INDEX bar ON foo (x > 5)"
)
def test_deferrable_initially_kw_not_ignored(self):
m = MetaData()
Table('t1', m, Column('id', Integer, primary_key=True))
t2 = Table(
't2', m, Column(
'id', Integer,
ForeignKey('t1.id', deferrable=True, initially="XYZ"),
primary_key=True))
self.assert_compile(
schema.CreateTable(t2),
"CREATE TABLE t2 (id INTEGER NOT NULL, "
"PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id) "
"DEFERRABLE INITIALLY XYZ)"
)
def test_match_kw_raises(self):
m = MetaData()
Table('t1', m, Column('id', Integer, primary_key=True))
t2 = Table('t2', m, Column('id', Integer,
ForeignKey('t1.id', match="XYZ"),
primary_key=True))
assert_raises_message(
exc.CompileError,
"MySQL ignores the 'MATCH' keyword while at the same time causes "
"ON UPDATE/ON DELETE clauses to be ignored.",
schema.CreateTable(t2).compile, dialect=mysql.dialect()
)
def test_for_update(self):
table1 = table('mytable',
column('myid'), column('name'), column('description'))
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = %s FOR UPDATE")
self.assert_compile(
table1.select(table1.c.myid == 7).with_for_update(read=True),
"SELECT mytable.myid, mytable.name, mytable.description "
"FROM mytable WHERE mytable.myid = %s LOCK IN SHARE MODE")
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
"""Tests MySQL-dialect specific compilation."""
__dialect__ = mysql.dialect()
def test_precolumns(self):
dialect = self.__dialect__
def gen(distinct=None, prefixes=None):
kw = {}
if distinct is not None:
kw['distinct'] = distinct
if prefixes is not None:
kw['prefixes'] = prefixes
return str(select([column('q')], **kw).compile(dialect=dialect))
eq_(gen(None), 'SELECT q')
eq_(gen(True), 'SELECT DISTINCT q')
eq_(gen(prefixes=['ALL']), 'SELECT ALL q')
eq_(gen(prefixes=['DISTINCTROW']),
'SELECT DISTINCTROW q')
# Interaction with MySQL prefix extensions
eq_(
gen(None, ['straight_join']),
'SELECT straight_join q')
eq_(
gen(False, ['HIGH_PRIORITY', 'SQL_SMALL_RESULT', 'ALL']),
'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q')
eq_(
gen(True, ['high_priority', sql.text('sql_cache')]),
'SELECT high_priority sql_cache DISTINCT q')
def test_backslash_escaping(self):
self.assert_compile(
sql.column('foo').like('bar', escape='\\'),
"foo LIKE %s ESCAPE '\\\\'"
)
dialect = mysql.dialect()
dialect._backslash_escapes = False
self.assert_compile(
sql.column('foo').like('bar', escape='\\'),
"foo LIKE %s ESCAPE '\\'",
dialect=dialect
)
def test_limit(self):
t = sql.table('t', sql.column('col1'), sql.column('col2'))
self.assert_compile(
select([t]).limit(10).offset(20),
"SELECT t.col1, t.col2 FROM t LIMIT %s, %s",
{'param_1': 20, 'param_2': 10}
)
self.assert_compile(
select([t]).limit(10),
"SELECT t.col1, t.col2 FROM t LIMIT %s",
{'param_1': 10})
self.assert_compile(
select([t]).offset(10),
"SELECT t.col1, t.col2 FROM t LIMIT %s, 18446744073709551615",
{'param_1': 10}
)
def test_varchar_raise(self):
for type_ in (
String,
VARCHAR,
String(),
VARCHAR(),
NVARCHAR(),
Unicode,
Unicode(),
):
type_ = sqltypes.to_instance(type_)
assert_raises_message(
exc.CompileError,
"VARCHAR requires a length on dialect mysql",
type_.compile,
dialect=mysql.dialect()
)
t1 = Table('sometable', MetaData(),
Column('somecolumn', type_)
)
assert_raises_message(
exc.CompileError,
r"\(in table 'sometable', column 'somecolumn'\)\: "
r"(?:N)?VARCHAR requires a length on dialect mysql",
schema.CreateTable(t1).compile,
dialect=mysql.dialect()
)
def test_update_limit(self):
t = sql.table('t', sql.column('col1'), sql.column('col2'))
self.assert_compile(
t.update(values={'col1': 123}),
"UPDATE t SET col1=%s"
)
self.assert_compile(
t.update(values={'col1': 123}, mysql_limit=5),
"UPDATE t SET col1=%s LIMIT 5"
)
self.assert_compile(
t.update(values={'col1': 123}, mysql_limit=None),
"UPDATE t SET col1=%s"
)
self.assert_compile(
t.update(t.c.col2 == 456, values={'col1': 123}, mysql_limit=1),
"UPDATE t SET col1=%s WHERE t.col2 = %s LIMIT 1"
)
def test_utc_timestamp(self):
self.assert_compile(func.utc_timestamp(), "UTC_TIMESTAMP")
def test_sysdate(self):
self.assert_compile(func.sysdate(), "SYSDATE()")
def test_cast(self):
t = sql.table('t', sql.column('col'))
m = mysql
specs = [
(Integer, "CAST(t.col AS SIGNED INTEGER)"),
(INT, "CAST(t.col AS SIGNED INTEGER)"),
(m.MSInteger, "CAST(t.col AS SIGNED INTEGER)"),
(m.MSInteger(unsigned=True), "CAST(t.col AS UNSIGNED INTEGER)"),
(SmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
(m.MSSmallInteger, "CAST(t.col AS SIGNED INTEGER)"),
(m.MSTinyInteger, "CAST(t.col AS SIGNED INTEGER)"),
# 'SIGNED INTEGER' is a bigint, so this is ok.
(m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"),
(m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"),
(m.MSBigInteger(unsigned=True),
"CAST(t.col AS UNSIGNED INTEGER)"),
# this is kind of sucky. thank you default arguments!
(NUMERIC, "CAST(t.col AS DECIMAL)"),
(DECIMAL, "CAST(t.col AS DECIMAL)"),
(Numeric, "CAST(t.col AS DECIMAL)"),
(m.MSNumeric, "CAST(t.col AS DECIMAL)"),
(m.MSDecimal, "CAST(t.col AS DECIMAL)"),
(TIMESTAMP, "CAST(t.col AS DATETIME)"),
(DATETIME, "CAST(t.col AS DATETIME)"),
(DATE, "CAST(t.col AS DATE)"),
(TIME, "CAST(t.col AS TIME)"),
(DateTime, "CAST(t.col AS DATETIME)"),
(Date, "CAST(t.col AS DATE)"),
(Time, "CAST(t.col AS TIME)"),
(DateTime, "CAST(t.col AS DATETIME)"),
(Date, "CAST(t.col AS DATE)"),
(m.MSTime, "CAST(t.col AS TIME)"),
(m.MSTimeStamp, "CAST(t.col AS DATETIME)"),
(String, "CAST(t.col AS CHAR)"),
(Unicode, "CAST(t.col AS CHAR)"),
(UnicodeText, "CAST(t.col AS CHAR)"),
(VARCHAR, "CAST(t.col AS CHAR)"),
(NCHAR, "CAST(t.col AS CHAR)"),
(CHAR, "CAST(t.col AS CHAR)"),
(m.CHAR(charset='utf8'), "CAST(t.col AS CHAR CHARACTER SET utf8)"),
(CLOB, "CAST(t.col AS CHAR)"),
(TEXT, "CAST(t.col AS CHAR)"),
(m.TEXT(charset='utf8'), "CAST(t.col AS CHAR CHARACTER SET utf8)"),
(String(32), "CAST(t.col AS CHAR(32))"),
(Unicode(32), "CAST(t.col AS CHAR(32))"),
(CHAR(32), "CAST(t.col AS CHAR(32))"),
(m.MSString, "CAST(t.col AS CHAR)"),
(m.MSText, "CAST(t.col AS CHAR)"),
(m.MSTinyText, "CAST(t.col AS CHAR)"),
(m.MSMediumText, "CAST(t.col AS CHAR)"),
(m.MSLongText, "CAST(t.col AS CHAR)"),
(m.MSNChar, "CAST(t.col AS CHAR)"),
(m.MSNVarChar, "CAST(t.col AS CHAR)"),
(LargeBinary, "CAST(t.col AS BINARY)"),
(BLOB, "CAST(t.col AS BINARY)"),
(m.MSBlob, "CAST(t.col AS BINARY)"),
(m.MSBlob(32), "CAST(t.col AS BINARY)"),
(m.MSTinyBlob, "CAST(t.col AS BINARY)"),
(m.MSMediumBlob, "CAST(t.col AS BINARY)"),
(m.MSLongBlob, "CAST(t.col AS BINARY)"),
(m.MSBinary, "CAST(t.col AS BINARY)"),
(m.MSBinary(32), "CAST(t.col AS BINARY)"),
(m.MSVarBinary, "CAST(t.col AS BINARY)"),
(m.MSVarBinary(32), "CAST(t.col AS BINARY)"),
(Interval, "CAST(t.col AS DATETIME)"),
]
for type_, expected in specs:
self.assert_compile(cast(t.c.col, type_), expected)
def test_cast_type_decorator(self):
class MyInteger(sqltypes.TypeDecorator):
impl = Integer
type_ = MyInteger()
t = sql.table('t', sql.column('col'))
self.assert_compile(
cast(t.c.col, type_), "CAST(t.col AS SIGNED INTEGER)")
def test_unsupported_casts(self):
t = sql.table('t', sql.column('col'))
m = mysql
specs = [
(m.MSBit, "t.col"),
(FLOAT, "t.col"),
(Float, "t.col"),
(m.MSFloat, "t.col"),
(m.MSDouble, "t.col"),
(m.MSReal, "t.col"),
(m.MSYear, "t.col"),
(m.MSYear(2), "t.col"),
(Boolean, "t.col"),
(BOOLEAN, "t.col"),
(m.MSEnum, "t.col"),
(m.MSEnum("1", "2"), "t.col"),
(m.MSSet, "t.col"),
(m.MSSet("1", "2"), "t.col"),
]
for type_, expected in specs:
with expect_warnings(
"Datatype .* does not support CAST on MySQL;"
):
self.assert_compile(cast(t.c.col, type_), expected)
def test_no_cast_pre_4(self):
self.assert_compile(
cast(Column('foo', Integer), String),
"CAST(foo AS CHAR)",
)
dialect = mysql.dialect()
dialect.server_version_info = (3, 2, 3)
with expect_warnings("Current MySQL version does not support CAST;"):
self.assert_compile(
cast(Column('foo', Integer), String),
"foo",
dialect=dialect
)
def test_cast_grouped_expression_non_castable(self):
with expect_warnings("Datatype FLOAT does not support CAST on MySQL;"):
self.assert_compile(
cast(sql.column('x') + sql.column('y'), Float),
"(x + y)"
)
def test_cast_grouped_expression_pre_4(self):
dialect = mysql.dialect()
dialect.server_version_info = (3, 2, 3)
with expect_warnings("Current MySQL version does not support CAST;"):
self.assert_compile(
cast(sql.column('x') + sql.column('y'), Integer),
"(x + y)",
dialect=dialect
)
def test_extract(self):
t = sql.table('t', sql.column('col1'))
for field in 'year', 'month', 'day':
self.assert_compile(
select([extract(field, t.c.col1)]),
"SELECT EXTRACT(%s FROM t.col1) AS anon_1 FROM t" % field)
# millsecondS to millisecond
self.assert_compile(
select([extract('milliseconds', t.c.col1)]),
"SELECT EXTRACT(millisecond FROM t.col1) AS anon_1 FROM t")
def test_too_long_index(self):
exp = 'ix_zyrenian_zyme_zyzzogeton_zyzzogeton_zyrenian_zyme_zyz_5cd2'
tname = 'zyrenian_zyme_zyzzogeton_zyzzogeton'
cname = 'zyrenian_zyme_zyzzogeton_zo'
t1 = Table(tname, MetaData(),
Column(cname, Integer, index=True),
)
ix1 = list(t1.indexes)[0]
self.assert_compile(
schema.CreateIndex(ix1),
"CREATE INDEX %s "
"ON %s (%s)" % (exp, tname, cname)
)
def test_innodb_autoincrement(self):
t1 = Table(
'sometable', MetaData(),
Column(
'assigned_id', Integer(), primary_key=True,
autoincrement=False),
Column('id', Integer(), primary_key=True, autoincrement=True),
mysql_engine='InnoDB')
self.assert_compile(schema.CreateTable(t1),
'CREATE TABLE sometable (assigned_id '
'INTEGER NOT NULL, id INTEGER NOT NULL '
'AUTO_INCREMENT, PRIMARY KEY (id, assigned_id)'
')ENGINE=InnoDB')
t1 = Table('sometable', MetaData(),
Column('assigned_id', Integer(), primary_key=True,
autoincrement=True),
Column('id', Integer(), primary_key=True,
autoincrement=False), mysql_engine='InnoDB')
self.assert_compile(schema.CreateTable(t1),
'CREATE TABLE sometable (assigned_id '
'INTEGER NOT NULL AUTO_INCREMENT, id '
'INTEGER NOT NULL, PRIMARY KEY '
'(assigned_id, id))ENGINE=InnoDB')
def test_innodb_autoincrement_reserved_word_column_name(self):
t1 = Table(
'sometable', MetaData(),
Column('id', Integer(), primary_key=True, autoincrement=False),
Column('order', Integer(), primary_key=True, autoincrement=True),
mysql_engine='InnoDB')
self.assert_compile(
schema.CreateTable(t1),
'CREATE TABLE sometable ('
'id INTEGER NOT NULL, '
'`order` INTEGER NOT NULL AUTO_INCREMENT, '
'PRIMARY KEY (`order`, id)'
')ENGINE=InnoDB')
def test_create_table_with_partition(self):
t1 = Table(
'testtable', MetaData(),
Column('id', Integer(), primary_key=True, autoincrement=True),
Column('other_id', Integer(), primary_key=True,
autoincrement=False),
mysql_partitions='2', mysql_partition_by='KEY(other_id)')
self.assert_compile(
schema.CreateTable(t1),
'CREATE TABLE testtable ('
'id INTEGER NOT NULL AUTO_INCREMENT, '
'other_id INTEGER NOT NULL, '
'PRIMARY KEY (id, other_id)'
')PARTITION BY KEY(other_id) PARTITIONS 2'
)
def test_create_table_with_partition_hash(self):
t1 = Table(
'testtable', MetaData(),
Column('id', Integer(), primary_key=True, autoincrement=True),
Column('other_id', Integer(), primary_key=True,
autoincrement=False),
mysql_partitions='2', mysql_partition_by='HASH(other_id)')
self.assert_compile(
schema.CreateTable(t1),
'CREATE TABLE testtable ('
'id INTEGER NOT NULL AUTO_INCREMENT, '
'other_id INTEGER NOT NULL, '
'PRIMARY KEY (id, other_id)'
')PARTITION BY HASH(other_id) PARTITIONS 2'
)
| |
# -*- coding: utf-8 -*-
"""
hyperframe/frame
~~~~~~~~~~~~~~~~
Defines framing logic for HTTP/2. Provides both classes to represent framed
data and logic for aiding the connection when it comes to reading from the
socket.
"""
import struct
import binascii
from .exceptions import (
UnknownFrameError, InvalidPaddingError, InvalidFrameError, InvalidDataError
)
from .flags import Flag, Flags
from typing import Optional, Tuple, List, Iterable, Any, Dict, Type
# The maximum initial length of a frame. Some frames have shorter maximum
# lengths.
FRAME_MAX_LEN = (2 ** 14)
# The maximum allowed length of a frame.
FRAME_MAX_ALLOWED_LEN = (2 ** 24) - 1
# Stream association enumerations.
_STREAM_ASSOC_HAS_STREAM = "has-stream"
_STREAM_ASSOC_NO_STREAM = "no-stream"
_STREAM_ASSOC_EITHER = "either"
# Structs for packing and unpacking
_STRUCT_HBBBL = struct.Struct(">HBBBL")
_STRUCT_LL = struct.Struct(">LL")
_STRUCT_HL = struct.Struct(">HL")
_STRUCT_LB = struct.Struct(">LB")
_STRUCT_L = struct.Struct(">L")
_STRUCT_H = struct.Struct(">H")
_STRUCT_B = struct.Struct(">B")
class Frame:
"""
The base class for all HTTP/2 frames.
"""
#: The flags defined on this type of frame.
defined_flags: List[Flag] = []
#: The byte used to define the type of the frame.
type: Optional[int] = None
# If 'has-stream', the frame's stream_id must be non-zero. If 'no-stream',
# it must be zero. If 'either', it's not checked.
stream_association: Optional[str] = None
def __init__(self, stream_id: int, flags: Iterable[str] = ()) -> None:
#: The stream identifier for the stream this frame was received on.
#: Set to 0 for frames sent on the connection (stream-id 0).
self.stream_id = stream_id
#: The flags set for this frame.
self.flags = Flags(self.defined_flags)
#: The frame length, excluding the nine-byte header.
self.body_len = 0
for flag in flags:
self.flags.add(flag)
if (not self.stream_id and
self.stream_association == _STREAM_ASSOC_HAS_STREAM):
raise InvalidDataError(
'Stream ID must be non-zero for {}'.format(
type(self).__name__,
)
)
if (self.stream_id and
self.stream_association == _STREAM_ASSOC_NO_STREAM):
raise InvalidDataError(
'Stream ID must be zero for {} with stream_id={}'.format(
type(self).__name__,
self.stream_id,
)
)
def __repr__(self) -> str:
return (
"{}(stream_id={}, flags={}): {}"
).format(
type(self).__name__,
self.stream_id,
repr(self.flags),
self._body_repr(),
)
def _body_repr(self) -> str:
# More specific implementation may be provided by subclasses of Frame.
# This fallback shows the serialized (and truncated) body content.
return _raw_data_repr(self.serialize_body())
@staticmethod
def explain(data: memoryview) -> Tuple["Frame", int]:
"""
Takes a bytestring and tries to parse a single frame and print it.
This function is only provided for debugging purposes.
:param data: A memoryview object containing the raw data of at least
one complete frame (header and body).
.. versionadded:: 6.0.0
"""
frame, length = Frame.parse_frame_header(data[:9])
frame.parse_body(data[9:9 + length])
print(frame)
return frame, length
@staticmethod
def parse_frame_header(header: memoryview, strict: bool = False) -> Tuple["Frame", int]:
"""
Takes a 9-byte frame header and returns a tuple of the appropriate
Frame object and the length that needs to be read from the socket.
This populates the flags field, and determines how long the body is.
:param header: A memoryview object containing the 9-byte frame header
data of a frame. Must not contain more or less.
:param strict: Whether to raise an exception when encountering a frame
not defined by spec and implemented by hyperframe.
:raises hyperframe.exceptions.UnknownFrameError: If a frame of unknown
type is received.
.. versionchanged:: 5.0.0
Added ``strict`` parameter to accommodate :class:`ExtensionFrame`
"""
try:
fields = _STRUCT_HBBBL.unpack(header)
except struct.error:
raise InvalidFrameError("Invalid frame header")
# First 24 bits are frame length.
length = (fields[0] << 8) + fields[1]
type = fields[2]
flags = fields[3]
stream_id = fields[4] & 0x7FFFFFFF
try:
frame = FRAMES[type](stream_id)
except KeyError:
if strict:
raise UnknownFrameError(type, length)
frame = ExtensionFrame(type=type, stream_id=stream_id)
frame.parse_flags(flags)
return (frame, length)
def parse_flags(self, flag_byte: int) -> Flags:
for flag, flag_bit in self.defined_flags:
if flag_byte & flag_bit:
self.flags.add(flag)
return self.flags
def serialize(self) -> bytes:
"""
Convert a frame into a bytestring, representing the serialized form of
the frame.
"""
body = self.serialize_body()
self.body_len = len(body)
# Build the common frame header.
# First, get the flags.
flags = 0
for flag, flag_bit in self.defined_flags:
if flag in self.flags:
flags |= flag_bit
header = _STRUCT_HBBBL.pack(
(self.body_len >> 8) & 0xFFFF, # Length spread over top 24 bits
self.body_len & 0xFF,
self.type,
flags,
self.stream_id & 0x7FFFFFFF # Stream ID is 32 bits.
)
return header + body
def serialize_body(self) -> bytes:
raise NotImplementedError()
def parse_body(self, data: memoryview) -> None:
"""
Given the body of a frame, parses it into frame data. This populates
the non-header parts of the frame: that is, it does not populate the
stream ID or flags.
:param data: A memoryview object containing the body data of the frame.
Must not contain *more* data than the length returned by
:meth:`parse_frame_header
<hyperframe.frame.Frame.parse_frame_header>`.
"""
raise NotImplementedError()
class Padding:
"""
Mixin for frames that contain padding. Defines extra fields that can be
used and set by frames that can be padded.
"""
def __init__(self, stream_id: int, pad_length: int = 0, **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs) # type: ignore
#: The length of the padding to use.
self.pad_length = pad_length
def serialize_padding_data(self) -> bytes:
if 'PADDED' in self.flags: # type: ignore
return _STRUCT_B.pack(self.pad_length)
return b''
def parse_padding_data(self, data: memoryview) -> int:
if 'PADDED' in self.flags: # type: ignore
try:
self.pad_length = struct.unpack('!B', data[:1])[0]
except struct.error:
raise InvalidFrameError("Invalid Padding data")
return 1
return 0
#: .. deprecated:: 5.2.1
#: Use self.pad_length instead.
@property
def total_padding(self) -> int: # pragma: no cover
import warnings
warnings.warn(
"total_padding contains the same information as pad_length.",
DeprecationWarning
)
return self.pad_length
class Priority:
"""
Mixin for frames that contain priority data. Defines extra fields that can
be used and set by frames that contain priority data.
"""
def __init__(self,
stream_id: int,
depends_on: int = 0x0,
stream_weight: int = 0x0,
exclusive: bool = False,
**kwargs: Any) -> None:
super().__init__(stream_id, **kwargs) # type: ignore
#: The stream ID of the stream on which this stream depends.
self.depends_on = depends_on
#: The weight of the stream. This is an integer between 0 and 256.
self.stream_weight = stream_weight
#: Whether the exclusive bit was set.
self.exclusive = exclusive
def serialize_priority_data(self) -> bytes:
return _STRUCT_LB.pack(
self.depends_on + (0x80000000 if self.exclusive else 0),
self.stream_weight
)
def parse_priority_data(self, data: memoryview) -> int:
try:
self.depends_on, self.stream_weight = _STRUCT_LB.unpack(data[:5])
except struct.error:
raise InvalidFrameError("Invalid Priority data")
self.exclusive = True if self.depends_on >> 31 else False
self.depends_on &= 0x7FFFFFFF
return 5
class DataFrame(Padding, Frame):
"""
DATA frames convey arbitrary, variable-length sequences of octets
associated with a stream. One or more DATA frames are used, for instance,
to carry HTTP request or response payloads.
"""
#: The flags defined for DATA frames.
defined_flags = [
Flag('END_STREAM', 0x01),
Flag('PADDED', 0x08),
]
#: The type byte for data frames.
type = 0x0
stream_association = _STREAM_ASSOC_HAS_STREAM
def __init__(self, stream_id: int, data: bytes = b'', **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The data contained on this frame.
self.data = data
def serialize_body(self) -> bytes:
padding_data = self.serialize_padding_data()
padding = b'\0' * self.pad_length
if isinstance(self.data, memoryview):
self.data = self.data.tobytes()
return b''.join([padding_data, self.data, padding])
def parse_body(self, data: memoryview) -> None:
padding_data_length = self.parse_padding_data(data)
self.data = (
data[padding_data_length:len(data)-self.pad_length].tobytes()
)
self.body_len = len(data)
if self.pad_length and self.pad_length >= self.body_len:
raise InvalidPaddingError("Padding is too long.")
@property
def flow_controlled_length(self) -> int:
"""
The length of the frame that needs to be accounted for when considering
flow control.
"""
padding_len = 0
if 'PADDED' in self.flags:
# Account for extra 1-byte padding length field, which is still
# present if possibly zero-valued.
padding_len = self.pad_length + 1
return len(self.data) + padding_len
class PriorityFrame(Priority, Frame):
"""
The PRIORITY frame specifies the sender-advised priority of a stream. It
can be sent at any time for an existing stream. This enables
reprioritisation of existing streams.
"""
#: The flags defined for PRIORITY frames.
defined_flags: List[Flag] = []
#: The type byte defined for PRIORITY frames.
type = 0x02
stream_association = _STREAM_ASSOC_HAS_STREAM
def _body_repr(self) -> str:
return "exclusive={}, depends_on={}, stream_weight={}".format(
self.exclusive,
self.depends_on,
self.stream_weight
)
def serialize_body(self) -> bytes:
return self.serialize_priority_data()
def parse_body(self, data: memoryview) -> None:
if len(data) > 5:
raise InvalidFrameError(
"PRIORITY must have 5 byte body: actual length %s." %
len(data)
)
self.parse_priority_data(data)
self.body_len = 5
class RstStreamFrame(Frame):
"""
The RST_STREAM frame allows for abnormal termination of a stream. When sent
by the initiator of a stream, it indicates that they wish to cancel the
stream or that an error condition has occurred. When sent by the receiver
of a stream, it indicates that either the receiver is rejecting the stream,
requesting that the stream be cancelled or that an error condition has
occurred.
"""
#: The flags defined for RST_STREAM frames.
defined_flags: List[Flag] = []
#: The type byte defined for RST_STREAM frames.
type = 0x03
stream_association = _STREAM_ASSOC_HAS_STREAM
def __init__(self, stream_id: int, error_code: int = 0, **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The error code used when resetting the stream.
self.error_code = error_code
def _body_repr(self) -> str:
return "error_code={}".format(
self.error_code,
)
def serialize_body(self) -> bytes:
return _STRUCT_L.pack(self.error_code)
def parse_body(self, data: memoryview) -> None:
if len(data) != 4:
raise InvalidFrameError(
"RST_STREAM must have 4 byte body: actual length %s." %
len(data)
)
try:
self.error_code = _STRUCT_L.unpack(data)[0]
except struct.error: # pragma: no cover
raise InvalidFrameError("Invalid RST_STREAM body")
self.body_len = 4
class SettingsFrame(Frame):
"""
The SETTINGS frame conveys configuration parameters that affect how
endpoints communicate. The parameters are either constraints on peer
behavior or preferences.
Settings are not negotiated. Settings describe characteristics of the
sending peer, which are used by the receiving peer. Different values for
the same setting can be advertised by each peer. For example, a client
might set a high initial flow control window, whereas a server might set a
lower value to conserve resources.
"""
#: The flags defined for SETTINGS frames.
defined_flags = [Flag('ACK', 0x01)]
#: The type byte defined for SETTINGS frames.
type = 0x04
stream_association = _STREAM_ASSOC_NO_STREAM
# We need to define the known settings, they may as well be class
# attributes.
#: The byte that signals the SETTINGS_HEADER_TABLE_SIZE setting.
HEADER_TABLE_SIZE = 0x01
#: The byte that signals the SETTINGS_ENABLE_PUSH setting.
ENABLE_PUSH = 0x02
#: The byte that signals the SETTINGS_MAX_CONCURRENT_STREAMS setting.
MAX_CONCURRENT_STREAMS = 0x03
#: The byte that signals the SETTINGS_INITIAL_WINDOW_SIZE setting.
INITIAL_WINDOW_SIZE = 0x04
#: The byte that signals the SETTINGS_MAX_FRAME_SIZE setting.
MAX_FRAME_SIZE = 0x05
#: The byte that signals the SETTINGS_MAX_HEADER_LIST_SIZE setting.
MAX_HEADER_LIST_SIZE = 0x06
#: The byte that signals SETTINGS_ENABLE_CONNECT_PROTOCOL setting.
ENABLE_CONNECT_PROTOCOL = 0x08
def __init__(self, stream_id: int = 0, settings: Optional[Dict[int, int]] = None, **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
if settings and "ACK" in kwargs.get("flags", ()):
raise InvalidDataError(
"Settings must be empty if ACK flag is set."
)
#: A dictionary of the setting type byte to the value of the setting.
self.settings = settings or {}
def _body_repr(self) -> str:
return "settings={}".format(
self.settings,
)
def serialize_body(self) -> bytes:
return b''.join([_STRUCT_HL.pack(setting & 0xFF, value)
for setting, value in self.settings.items()])
def parse_body(self, data: memoryview) -> None:
if 'ACK' in self.flags and len(data) > 0:
raise InvalidDataError(
"SETTINGS ack frame must not have payload: got %s bytes" %
len(data)
)
body_len = 0
for i in range(0, len(data), 6):
try:
name, value = _STRUCT_HL.unpack(data[i:i+6])
except struct.error:
raise InvalidFrameError("Invalid SETTINGS body")
self.settings[name] = value
body_len += 6
self.body_len = body_len
class PushPromiseFrame(Padding, Frame):
"""
The PUSH_PROMISE frame is used to notify the peer endpoint in advance of
streams the sender intends to initiate.
"""
#: The flags defined for PUSH_PROMISE frames.
defined_flags = [
Flag('END_HEADERS', 0x04),
Flag('PADDED', 0x08)
]
#: The type byte defined for PUSH_PROMISE frames.
type = 0x05
stream_association = _STREAM_ASSOC_HAS_STREAM
def __init__(self, stream_id: int, promised_stream_id: int = 0, data: bytes = b'', **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The stream ID that is promised by this frame.
self.promised_stream_id = promised_stream_id
#: The HPACK-encoded header block for the simulated request on the new
#: stream.
self.data = data
def _body_repr(self) -> str:
return "promised_stream_id={}, data={}".format(
self.promised_stream_id,
_raw_data_repr(self.data),
)
def serialize_body(self) -> bytes:
padding_data = self.serialize_padding_data()
padding = b'\0' * self.pad_length
data = _STRUCT_L.pack(self.promised_stream_id)
return b''.join([padding_data, data, self.data, padding])
def parse_body(self, data: memoryview) -> None:
padding_data_length = self.parse_padding_data(data)
try:
self.promised_stream_id = _STRUCT_L.unpack(
data[padding_data_length:padding_data_length + 4]
)[0]
except struct.error:
raise InvalidFrameError("Invalid PUSH_PROMISE body")
self.data = (
data[padding_data_length + 4:len(data)-self.pad_length].tobytes()
)
self.body_len = len(data)
if self.promised_stream_id == 0 or self.promised_stream_id % 2 != 0:
raise InvalidDataError(
"Invalid PUSH_PROMISE promised stream id: %s" %
self.promised_stream_id
)
if self.pad_length and self.pad_length >= self.body_len:
raise InvalidPaddingError("Padding is too long.")
class PingFrame(Frame):
"""
The PING frame is a mechanism for measuring a minimal round-trip time from
the sender, as well as determining whether an idle connection is still
functional. PING frames can be sent from any endpoint.
"""
#: The flags defined for PING frames.
defined_flags = [Flag('ACK', 0x01)]
#: The type byte defined for PING frames.
type = 0x06
stream_association = _STREAM_ASSOC_NO_STREAM
def __init__(self, stream_id: int = 0, opaque_data: bytes = b'', **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The opaque data sent in this PING frame, as a bytestring.
self.opaque_data = opaque_data
def _body_repr(self) -> str:
return "opaque_data={!r}".format(
self.opaque_data,
)
def serialize_body(self) -> bytes:
if len(self.opaque_data) > 8:
raise InvalidFrameError(
"PING frame may not have more than 8 bytes of data, got %r" %
self.opaque_data
)
data = self.opaque_data
data += b'\x00' * (8 - len(self.opaque_data))
return data
def parse_body(self, data: memoryview) -> None:
if len(data) != 8:
raise InvalidFrameError(
"PING frame must have 8 byte length: got %s" % len(data)
)
self.opaque_data = data.tobytes()
self.body_len = 8
class GoAwayFrame(Frame):
"""
The GOAWAY frame informs the remote peer to stop creating streams on this
connection. It can be sent from the client or the server. Once sent, the
sender will ignore frames sent on new streams for the remainder of the
connection.
"""
#: The flags defined for GOAWAY frames.
defined_flags: List[Flag] = []
#: The type byte defined for GOAWAY frames.
type = 0x07
stream_association = _STREAM_ASSOC_NO_STREAM
def __init__(self,
stream_id: int = 0,
last_stream_id: int = 0,
error_code: int = 0,
additional_data: bytes = b'',
**kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The last stream ID definitely seen by the remote peer.
self.last_stream_id = last_stream_id
#: The error code for connection teardown.
self.error_code = error_code
#: Any additional data sent in the GOAWAY.
self.additional_data = additional_data
def _body_repr(self) -> str:
return "last_stream_id={}, error_code={}, additional_data={!r}".format(
self.last_stream_id,
self.error_code,
self.additional_data,
)
def serialize_body(self) -> bytes:
data = _STRUCT_LL.pack(
self.last_stream_id & 0x7FFFFFFF,
self.error_code
)
data += self.additional_data
return data
def parse_body(self, data: memoryview) -> None:
try:
self.last_stream_id, self.error_code = _STRUCT_LL.unpack(
data[:8]
)
except struct.error:
raise InvalidFrameError("Invalid GOAWAY body.")
self.body_len = len(data)
if len(data) > 8:
self.additional_data = data[8:].tobytes()
class WindowUpdateFrame(Frame):
"""
The WINDOW_UPDATE frame is used to implement flow control.
Flow control operates at two levels: on each individual stream and on the
entire connection.
Both types of flow control are hop by hop; that is, only between the two
endpoints. Intermediaries do not forward WINDOW_UPDATE frames between
dependent connections. However, throttling of data transfer by any receiver
can indirectly cause the propagation of flow control information toward the
original sender.
"""
#: The flags defined for WINDOW_UPDATE frames.
defined_flags: List[Flag] = []
#: The type byte defined for WINDOW_UPDATE frames.
type = 0x08
stream_association = _STREAM_ASSOC_EITHER
def __init__(self, stream_id: int, window_increment: int = 0, **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The amount the flow control window is to be incremented.
self.window_increment = window_increment
def _body_repr(self) -> str:
return "window_increment={}".format(
self.window_increment,
)
def serialize_body(self) -> bytes:
return _STRUCT_L.pack(self.window_increment & 0x7FFFFFFF)
def parse_body(self, data: memoryview) -> None:
if len(data) > 4:
raise InvalidFrameError(
"WINDOW_UPDATE frame must have 4 byte length: got %s" %
len(data)
)
try:
self.window_increment = _STRUCT_L.unpack(data)[0]
except struct.error:
raise InvalidFrameError("Invalid WINDOW_UPDATE body")
if not 1 <= self.window_increment <= 2**31-1:
raise InvalidDataError(
"WINDOW_UPDATE increment must be between 1 to 2^31-1"
)
self.body_len = 4
class HeadersFrame(Padding, Priority, Frame):
"""
The HEADERS frame carries name-value pairs. It is used to open a stream.
HEADERS frames can be sent on a stream in the "open" or "half closed
(remote)" states.
The HeadersFrame class is actually basically a data frame in this
implementation, because of the requirement to control the sizes of frames.
A header block fragment that doesn't fit in an entire HEADERS frame needs
to be followed with CONTINUATION frames. From the perspective of the frame
building code the header block is an opaque data segment.
"""
#: The flags defined for HEADERS frames.
defined_flags = [
Flag('END_STREAM', 0x01),
Flag('END_HEADERS', 0x04),
Flag('PADDED', 0x08),
Flag('PRIORITY', 0x20),
]
#: The type byte defined for HEADERS frames.
type = 0x01
stream_association = _STREAM_ASSOC_HAS_STREAM
def __init__(self, stream_id: int, data: bytes = b'', **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The HPACK-encoded header block.
self.data = data
def _body_repr(self) -> str:
return "exclusive={}, depends_on={}, stream_weight={}, data={}".format(
self.exclusive,
self.depends_on,
self.stream_weight,
_raw_data_repr(self.data),
)
def serialize_body(self) -> bytes:
padding_data = self.serialize_padding_data()
padding = b'\0' * self.pad_length
if 'PRIORITY' in self.flags:
priority_data = self.serialize_priority_data()
else:
priority_data = b''
return b''.join([padding_data, priority_data, self.data, padding])
def parse_body(self, data: memoryview) -> None:
padding_data_length = self.parse_padding_data(data)
data = data[padding_data_length:]
if 'PRIORITY' in self.flags:
priority_data_length = self.parse_priority_data(data)
else:
priority_data_length = 0
self.body_len = len(data)
self.data = (
data[priority_data_length:len(data)-self.pad_length].tobytes()
)
if self.pad_length and self.pad_length >= self.body_len:
raise InvalidPaddingError("Padding is too long.")
class ContinuationFrame(Frame):
"""
The CONTINUATION frame is used to continue a sequence of header block
fragments. Any number of CONTINUATION frames can be sent on an existing
stream, as long as the preceding frame on the same stream is one of
HEADERS, PUSH_PROMISE or CONTINUATION without the END_HEADERS flag set.
Much like the HEADERS frame, hyper treats this as an opaque data frame with
different flags and a different type.
"""
#: The flags defined for CONTINUATION frames.
defined_flags = [Flag('END_HEADERS', 0x04)]
#: The type byte defined for CONTINUATION frames.
type = 0x09
stream_association = _STREAM_ASSOC_HAS_STREAM
def __init__(self, stream_id: int, data: bytes = b'', **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
#: The HPACK-encoded header block.
self.data = data
def _body_repr(self) -> str:
return "data={}".format(
_raw_data_repr(self.data),
)
def serialize_body(self) -> bytes:
return self.data
def parse_body(self, data: memoryview) -> None:
self.data = data.tobytes()
self.body_len = len(data)
class AltSvcFrame(Frame):
"""
The ALTSVC frame is used to advertise alternate services that the current
host, or a different one, can understand. This frame is standardised as
part of RFC 7838.
This frame does no work to validate that the ALTSVC field parameter is
acceptable per the rules of RFC 7838.
.. note:: If the ``stream_id`` of this frame is nonzero, the origin field
must have zero length. Conversely, if the ``stream_id`` of this
frame is zero, the origin field must have nonzero length. Put
another way, a valid ALTSVC frame has ``stream_id != 0`` XOR
``len(origin) != 0``.
"""
type = 0xA
stream_association = _STREAM_ASSOC_EITHER
def __init__(self, stream_id: int, origin: bytes = b'', field: bytes = b'', **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
if not isinstance(origin, bytes):
raise InvalidDataError("AltSvc origin must be bytestring.")
if not isinstance(field, bytes):
raise InvalidDataError("AltSvc field must be a bytestring.")
self.origin = origin
self.field = field
def _body_repr(self) -> str:
return "origin={!r}, field={!r}".format(
self.origin,
self.field,
)
def serialize_body(self) -> bytes:
origin_len = _STRUCT_H.pack(len(self.origin))
return b''.join([origin_len, self.origin, self.field])
def parse_body(self, data: memoryview) -> None:
try:
origin_len = _STRUCT_H.unpack(data[0:2])[0]
self.origin = data[2:2+origin_len].tobytes()
if len(self.origin) != origin_len:
raise InvalidFrameError("Invalid ALTSVC frame body.")
self.field = data[2+origin_len:].tobytes()
except (struct.error, ValueError):
raise InvalidFrameError("Invalid ALTSVC frame body.")
self.body_len = len(data)
class ExtensionFrame(Frame):
"""
ExtensionFrame is used to wrap frames which are not natively interpretable
by hyperframe.
Although certain byte prefixes are ordained by specification to have
certain contextual meanings, frames with other prefixes are not prohibited,
and may be used to communicate arbitrary meaning between HTTP/2 peers.
Thus, hyperframe, rather than raising an exception when such a frame is
encountered, wraps it in a generic frame to be properly acted upon by
upstream consumers which might have additional context on how to use it.
.. versionadded:: 5.0.0
"""
stream_association = _STREAM_ASSOC_EITHER
def __init__(self, type: int, stream_id: int, flag_byte: int = 0x0, body: bytes = b'', **kwargs: Any) -> None:
super().__init__(stream_id, **kwargs)
self.type = type
self.flag_byte = flag_byte
self.body = body
def _body_repr(self) -> str:
return "type={}, flag_byte={}, body={}".format(
self.type,
self.flag_byte,
_raw_data_repr(self.body),
)
def parse_flags(self, flag_byte: int) -> None: # type: ignore
"""
For extension frames, we parse the flags by just storing a flag byte.
"""
self.flag_byte = flag_byte
def parse_body(self, data: memoryview) -> None:
self.body = data.tobytes()
self.body_len = len(data)
def serialize(self) -> bytes:
"""
A broad override of the serialize method that ensures that the data
comes back out exactly as it came in. This should not be used in most
user code: it exists only as a helper method if frames need to be
reconstituted.
"""
# Build the frame header.
# First, get the flags.
flags = self.flag_byte
header = _STRUCT_HBBBL.pack(
(self.body_len >> 8) & 0xFFFF, # Length spread over top 24 bits
self.body_len & 0xFF,
self.type,
flags,
self.stream_id & 0x7FFFFFFF # Stream ID is 32 bits.
)
return header + self.body
def _raw_data_repr(data: Optional[bytes]) -> str:
if not data:
return "None"
r = binascii.hexlify(data).decode('ascii')
if len(r) > 20:
r = r[:20] + "..."
return "<hex:" + r + ">"
_FRAME_CLASSES: List[Type[Frame]] = [
DataFrame,
HeadersFrame,
PriorityFrame,
RstStreamFrame,
SettingsFrame,
PushPromiseFrame,
PingFrame,
GoAwayFrame,
WindowUpdateFrame,
ContinuationFrame,
AltSvcFrame,
]
#: FRAMES maps the type byte for each frame to the class used to represent that
#: frame.
FRAMES = {cls.type: cls for cls in _FRAME_CLASSES}
| |
#!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
"""Element Software volume clone"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_volume_clone
short_description: NetApp Element Software Create Volume Clone
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create volume clones on Element OS
options:
name:
description:
- The name of the clone.
required: true
src_volume_id:
description:
- The id of the src volume to clone. id may be a numeric identifier or a volume name.
required: true
src_snapshot_id:
description:
- The id of the snapshot to clone. id may be a numeric identifier or a snapshot name.
account_id:
description:
- Account ID for the owner of this cloned volume. id may be a numeric identifier or an account name.
required: true
attributes:
description: A YAML dictionary of attributes that you would like to apply on this cloned volume.
size:
description:
- The size of the cloned volume in (size_unit).
size_unit:
description:
- The unit used to interpret the size parameter.
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: 'gb'
access:
choices: ['readOnly', 'readWrite', 'locked', 'replicationTarget']
description:
- Access allowed for the volume.
- If unspecified, the access settings of the clone will be the same as the source.
- readOnly - Only read operations are allowed.
- readWrite - Reads and writes are allowed.
- locked - No reads or writes are allowed.
- replicationTarget - Identify a volume as the target volume for a paired set of volumes. If the volume is not paired, the access status is locked.
'''
EXAMPLES = """
- name: Clone Volume
na_elementsw_volume_clone:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
name: CloneAnsibleVol
src_volume_id: 123
src_snapshot_id: 41
account_id: 3
size: 1
size_unit: gb
access: readWrite
attributes: {"virtual_network_id": 12345}
"""
RETURN = """
msg:
description: Success message
returned: success
type: str
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_elementsw_module import NaElementSWModule
HAS_SF_SDK = netapp_utils.has_sf_sdk()
class ElementOSVolumeClone(object):
"""
Contains methods to parse arguments,
derive details of Element Software objects
and send requests to Element OS via
the Solidfire SDK
"""
def __init__(self):
"""
Parse arguments, setup state variables,
check parameters and ensure SDK is installed
"""
self._size_unit_map = netapp_utils.SF_BYTE_MAP
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
name=dict(required=True),
src_volume_id=dict(required=True),
src_snapshot_id=dict(),
account_id=dict(required=True),
attributes=dict(type='dict', default=None),
size=dict(type='int'),
size_unit=dict(default='gb',
choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb',
'pb', 'eb', 'zb', 'yb'], type='str'),
access=dict(type='str',
default=None, choices=['readOnly', 'readWrite',
'locked', 'replicationTarget']),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
parameters = self.module.params
# set up state variables
self.name = parameters['name']
self.src_volume_id = parameters['src_volume_id']
self.src_snapshot_id = parameters['src_snapshot_id']
self.account_id = parameters['account_id']
self.attributes = parameters['attributes']
self.size_unit = parameters['size_unit']
if parameters['size'] is not None:
self.size = parameters['size'] * \
self._size_unit_map[self.size_unit]
else:
self.size = None
self.access = parameters['access']
if HAS_SF_SDK is False:
self.module.fail_json(
msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
self.elementsw_helper = NaElementSWModule(self.sfe)
# add telemetry attributes
if self.attributes is not None:
self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_volume_clone'))
else:
self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_volume_clone')
def get_account_id(self):
"""
Return account id if found
"""
try:
# Update and return self.account_id
self.account_id = self.elementsw_helper.account_exists(self.account_id)
return self.account_id
except Exception as err:
self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err))
def get_snapshot_id(self):
"""
Return snapshot details if found
"""
src_snapshot = self.elementsw_helper.get_snapshot(self.src_snapshot_id, self.src_volume_id)
# Update and return self.src_snapshot_id
if src_snapshot is not None:
self.src_snapshot_id = src_snapshot.snapshot_id
# Return src_snapshot
return self.src_snapshot_id
return None
def get_src_volume_id(self):
"""
Return volume id if found
"""
src_vol_id = self.elementsw_helper.volume_exists(self.src_volume_id, self.account_id)
if src_vol_id is not None:
# Update and return self.volume_id
self.src_volume_id = src_vol_id
# Return src_volume_id
return self.src_volume_id
return None
def clone_volume(self):
"""Clone Volume from source"""
try:
self.sfe.clone_volume(volume_id=self.src_volume_id,
name=self.name,
new_account_id=self.account_id,
new_size=self.size,
access=self.access,
snapshot_id=self.src_snapshot_id,
attributes=self.attributes)
except Exception as err:
self.module.fail_json(msg="Error creating clone %s of size %s" % (self.name, self.size), exception=to_native(err))
def apply(self):
"""Perform pre-checks, call functions and exit"""
changed = False
result_message = ""
if self.get_account_id() is None:
self.module.fail_json(msg="Account id not found: %s" % (self.account_id))
# there is only one state. other operations
# are part of the volume module
# ensure that a volume with the clone name
# isn't already present
if self.elementsw_helper.volume_exists(self.name, self.account_id) is None:
# check for the source volume
if self.get_src_volume_id() is not None:
# check for a valid snapshot
if self.src_snapshot_id and not self.get_snapshot_id():
self.module.fail_json(msg="Snapshot id not found: %s" % (self.src_snapshot_id))
# change required
changed = True
else:
self.module.fail_json(msg="Volume id not found %s" % (self.src_volume_id))
if changed:
if self.module.check_mode:
result_message = "Check mode, skipping changes"
else:
self.clone_volume()
result_message = "Volume cloned"
self.module.exit_json(changed=changed, msg=result_message)
def main():
"""Create object and call apply"""
volume_clone = ElementOSVolumeClone()
volume_clone.apply()
if __name__ == '__main__':
main()
| |
# encoding: utf-8
"""
lxml custom element classes for shape-related XML elements.
"""
from __future__ import absolute_import
from pptx.enum.shapes import MSO_AUTO_SHAPE_TYPE, PP_PLACEHOLDER
from pptx.oxml import parse_xml
from pptx.oxml.ns import nsdecls
from pptx.oxml.shapes.shared import BaseShapeElement
from pptx.oxml.simpletypes import (
ST_Coordinate,
ST_PositiveCoordinate,
XsdBoolean,
XsdString,
)
from pptx.oxml.text import CT_TextBody
from pptx.oxml.xmlchemy import (
BaseOxmlElement,
OneAndOnlyOne,
OptionalAttribute,
RequiredAttribute,
ZeroOrOne,
ZeroOrMore,
)
class CT_AdjPoint2D(BaseOxmlElement):
"""`a:pt` custom element class."""
x = RequiredAttribute("x", ST_Coordinate)
y = RequiredAttribute("y", ST_Coordinate)
class CT_CustomGeometry2D(BaseOxmlElement):
"""`a:custGeom` custom element class."""
_tag_seq = ("a:avLst", "a:gdLst", "a:ahLst", "a:cxnLst", "a:rect", "a:pathLst")
pathLst = ZeroOrOne("a:pathLst", successors=_tag_seq[6:])
class CT_GeomGuide(BaseOxmlElement):
"""
``<a:gd>`` custom element class, defining a "guide", corresponding to
a yellow diamond-shaped handle on an autoshape.
"""
name = RequiredAttribute("name", XsdString)
fmla = RequiredAttribute("fmla", XsdString)
class CT_GeomGuideList(BaseOxmlElement):
"""
``<a:avLst>`` custom element class
"""
gd = ZeroOrMore("a:gd")
class CT_NonVisualDrawingShapeProps(BaseShapeElement):
"""
``<p:cNvSpPr>`` custom element class
"""
spLocks = ZeroOrOne("a:spLocks")
txBox = OptionalAttribute("txBox", XsdBoolean)
class CT_Path2D(BaseOxmlElement):
"""`a:path` custom element class."""
close = ZeroOrMore("a:close", successors=())
lnTo = ZeroOrMore("a:lnTo", successors=())
moveTo = ZeroOrMore("a:moveTo", successors=())
w = OptionalAttribute("w", ST_PositiveCoordinate)
h = OptionalAttribute("h", ST_PositiveCoordinate)
def add_close(self):
"""Return a newly created `a:close` element.
The new `a:close` element is appended to this `a:path` element.
"""
return self._add_close()
def add_lnTo(self, x, y):
"""Return a newly created `a:lnTo` subtree with end point *(x, y)*.
The new `a:lnTo` element is appended to this `a:path` element.
"""
lnTo = self._add_lnTo()
pt = lnTo._add_pt()
pt.x, pt.y = x, y
return lnTo
def add_moveTo(self, x, y):
"""Return a newly created `a:moveTo` subtree with point *(x, y)*.
The new `a:moveTo` element is appended to this `a:path` element.
"""
moveTo = self._add_moveTo()
pt = moveTo._add_pt()
pt.x, pt.y = x, y
return moveTo
class CT_Path2DClose(BaseOxmlElement):
"""`a:close` custom element class."""
class CT_Path2DLineTo(BaseOxmlElement):
"""`a:lnTo` custom element class."""
pt = ZeroOrOne("a:pt", successors=())
class CT_Path2DList(BaseOxmlElement):
"""`a:pathLst` custom element class."""
path = ZeroOrMore("a:path", successors=())
def add_path(self, w, h):
"""Return a newly created `a:path` child element."""
path = self._add_path()
path.w, path.h = w, h
return path
class CT_Path2DMoveTo(BaseOxmlElement):
"""`a:moveTo` custom element class."""
pt = ZeroOrOne("a:pt", successors=())
class CT_PresetGeometry2D(BaseOxmlElement):
"""
<a:prstGeom> custom element class
"""
avLst = ZeroOrOne("a:avLst")
prst = RequiredAttribute("prst", MSO_AUTO_SHAPE_TYPE)
@property
def gd_lst(self):
"""
Sequence containing the ``gd`` element children of ``<a:avLst>``
child element, empty if none are present.
"""
avLst = self.avLst
if avLst is None:
return []
return avLst.gd_lst
def rewrite_guides(self, guides):
"""
Remove any ``<a:gd>`` element children of ``<a:avLst>`` and replace
them with ones having (name, val) in *guides*.
"""
self._remove_avLst()
avLst = self._add_avLst()
for name, val in guides:
gd = avLst._add_gd()
gd.name = name
gd.fmla = "val %d" % val
class CT_Shape(BaseShapeElement):
"""
``<p:sp>`` custom element class
"""
nvSpPr = OneAndOnlyOne("p:nvSpPr")
spPr = OneAndOnlyOne("p:spPr")
txBody = ZeroOrOne("p:txBody", successors=("p:extLst",))
def add_path(self, w, h):
"""Reference to `a:custGeom` descendant or |None| if not present."""
custGeom = self.spPr.custGeom
if custGeom is None:
raise ValueError("shape must be freeform")
pathLst = custGeom.get_or_add_pathLst()
return pathLst.add_path(w=w, h=h)
def get_or_add_ln(self):
"""
Return the <a:ln> grandchild element, newly added if not present.
"""
return self.spPr.get_or_add_ln()
@property
def has_custom_geometry(self):
"""True if this shape has custom geometry, i.e. is a freeform shape.
A shape has custom geometry if it has a `p:spPr/a:custGeom`
descendant (instead of `p:spPr/a:prstGeom`).
"""
return self.spPr.custGeom is not None
@property
def is_autoshape(self):
"""
True if this shape is an auto shape. A shape is an auto shape if it
has a ``<a:prstGeom>`` element and does not have a txBox="1" attribute
on cNvSpPr.
"""
prstGeom = self.prstGeom
if prstGeom is None:
return False
if self.nvSpPr.cNvSpPr.txBox is True:
return False
return True
@property
def is_textbox(self):
"""
True if this shape is a text box. A shape is a text box if it has a
``txBox`` attribute on cNvSpPr that resolves to |True|. The default
when the txBox attribute is missing is |False|.
"""
if self.nvSpPr.cNvSpPr.txBox is True:
return True
return False
@property
def ln(self):
"""
``<a:ln>`` grand-child element or |None| if not present
"""
return self.spPr.ln
@staticmethod
def new_autoshape_sp(id_, name, prst, left, top, width, height):
"""
Return a new ``<p:sp>`` element tree configured as a base auto shape.
"""
tmpl = CT_Shape._autoshape_sp_tmpl()
xml = tmpl % (id_, name, left, top, width, height, prst)
sp = parse_xml(xml)
return sp
@staticmethod
def new_freeform_sp(shape_id, name, x, y, cx, cy):
"""Return new `p:sp` element tree configured as freeform shape.
The returned shape has a `a:custGeom` subtree but no paths in its
path list.
"""
tmpl = CT_Shape._freeform_sp_tmpl()
xml = tmpl % (shape_id, name, x, y, cx, cy)
sp = parse_xml(xml)
return sp
@staticmethod
def new_placeholder_sp(id_, name, ph_type, orient, sz, idx):
"""
Return a new ``<p:sp>`` element tree configured as a placeholder
shape.
"""
tmpl = CT_Shape._ph_sp_tmpl()
xml = tmpl % (id_, name)
sp = parse_xml(xml)
ph = sp.nvSpPr.nvPr.get_or_add_ph()
ph.type = ph_type
ph.idx = idx
ph.orient = orient
ph.sz = sz
placeholder_types_that_have_a_text_frame = (
PP_PLACEHOLDER.TITLE,
PP_PLACEHOLDER.CENTER_TITLE,
PP_PLACEHOLDER.SUBTITLE,
PP_PLACEHOLDER.BODY,
PP_PLACEHOLDER.OBJECT,
)
if ph_type in placeholder_types_that_have_a_text_frame:
sp.append(CT_TextBody.new())
return sp
@staticmethod
def new_textbox_sp(id_, name, left, top, width, height):
"""
Return a new ``<p:sp>`` element tree configured as a base textbox
shape.
"""
tmpl = CT_Shape._textbox_sp_tmpl()
xml = tmpl % (id_, name, left, top, width, height)
sp = parse_xml(xml)
return sp
@property
def prst(self):
"""
Value of ``prst`` attribute of ``<a:prstGeom>`` element or |None| if
not present.
"""
prstGeom = self.prstGeom
if prstGeom is None:
return None
return prstGeom.prst
@property
def prstGeom(self):
"""
Reference to ``<a:prstGeom>`` child element or |None| if this shape
doesn't have one, for example, if it's a placeholder shape.
"""
return self.spPr.prstGeom
@staticmethod
def _autoshape_sp_tmpl():
return (
"<p:sp %s>\n"
" <p:nvSpPr>\n"
' <p:cNvPr id="%s" name="%s"/>\n'
" <p:cNvSpPr/>\n"
" <p:nvPr/>\n"
" </p:nvSpPr>\n"
" <p:spPr>\n"
" <a:xfrm>\n"
' <a:off x="%s" y="%s"/>\n'
' <a:ext cx="%s" cy="%s"/>\n'
" </a:xfrm>\n"
' <a:prstGeom prst="%s">\n'
" <a:avLst/>\n"
" </a:prstGeom>\n"
" </p:spPr>\n"
" <p:style>\n"
' <a:lnRef idx="1">\n'
' <a:schemeClr val="accent1"/>\n'
" </a:lnRef>\n"
' <a:fillRef idx="3">\n'
' <a:schemeClr val="accent1"/>\n'
" </a:fillRef>\n"
' <a:effectRef idx="2">\n'
' <a:schemeClr val="accent1"/>\n'
" </a:effectRef>\n"
' <a:fontRef idx="minor">\n'
' <a:schemeClr val="lt1"/>\n'
" </a:fontRef>\n"
" </p:style>\n"
" <p:txBody>\n"
' <a:bodyPr rtlCol="0" anchor="ctr"/>\n'
" <a:lstStyle/>\n"
" <a:p>\n"
' <a:pPr algn="ctr"/>\n'
" </a:p>\n"
" </p:txBody>\n"
"</p:sp>" % (nsdecls("a", "p"), "%d", "%s", "%d", "%d", "%d", "%d", "%s")
)
@staticmethod
def _freeform_sp_tmpl():
return (
"<p:sp %s>\n"
" <p:nvSpPr>\n"
' <p:cNvPr id="%s" name="%s"/>\n'
" <p:cNvSpPr/>\n"
" <p:nvPr/>\n"
" </p:nvSpPr>\n"
" <p:spPr>\n"
" <a:xfrm>\n"
' <a:off x="%s" y="%s"/>\n'
' <a:ext cx="%s" cy="%s"/>\n'
" </a:xfrm>\n"
" <a:custGeom>\n"
" <a:avLst/>\n"
" <a:gdLst/>\n"
" <a:ahLst/>\n"
" <a:cxnLst/>\n"
' <a:rect l="l" t="t" r="r" b="b"/>\n'
" <a:pathLst/>\n"
" </a:custGeom>\n"
" </p:spPr>\n"
" <p:style>\n"
' <a:lnRef idx="1">\n'
' <a:schemeClr val="accent1"/>\n'
" </a:lnRef>\n"
' <a:fillRef idx="3">\n'
' <a:schemeClr val="accent1"/>\n'
" </a:fillRef>\n"
' <a:effectRef idx="2">\n'
' <a:schemeClr val="accent1"/>\n'
" </a:effectRef>\n"
' <a:fontRef idx="minor">\n'
' <a:schemeClr val="lt1"/>\n'
" </a:fontRef>\n"
" </p:style>\n"
" <p:txBody>\n"
' <a:bodyPr rtlCol="0" anchor="ctr"/>\n'
" <a:lstStyle/>\n"
" <a:p>\n"
' <a:pPr algn="ctr"/>\n'
" </a:p>\n"
" </p:txBody>\n"
"</p:sp>" % (nsdecls("a", "p"), "%d", "%s", "%d", "%d", "%d", "%d")
)
def _new_txBody(self):
return CT_TextBody.new_p_txBody()
@staticmethod
def _ph_sp_tmpl():
return (
"<p:sp %s>\n"
" <p:nvSpPr>\n"
' <p:cNvPr id="%s" name="%s"/>\n'
" <p:cNvSpPr>\n"
' <a:spLocks noGrp="1"/>\n'
" </p:cNvSpPr>\n"
" <p:nvPr/>\n"
" </p:nvSpPr>\n"
" <p:spPr/>\n"
"</p:sp>" % (nsdecls("a", "p"), "%d", "%s")
)
@staticmethod
def _textbox_sp_tmpl():
return (
"<p:sp %s>\n"
" <p:nvSpPr>\n"
' <p:cNvPr id="%s" name="%s"/>\n'
' <p:cNvSpPr txBox="1"/>\n'
" <p:nvPr/>\n"
" </p:nvSpPr>\n"
" <p:spPr>\n"
" <a:xfrm>\n"
' <a:off x="%s" y="%s"/>\n'
' <a:ext cx="%s" cy="%s"/>\n'
" </a:xfrm>\n"
' <a:prstGeom prst="rect">\n'
" <a:avLst/>\n"
" </a:prstGeom>\n"
" <a:noFill/>\n"
" </p:spPr>\n"
" <p:txBody>\n"
' <a:bodyPr wrap="none">\n'
" <a:spAutoFit/>\n"
" </a:bodyPr>\n"
" <a:lstStyle/>\n"
" <a:p/>\n"
" </p:txBody>\n"
"</p:sp>" % (nsdecls("a", "p"), "%d", "%s", "%d", "%d", "%d", "%d")
)
class CT_ShapeNonVisual(BaseShapeElement):
"""
``<p:nvSpPr>`` custom element class
"""
cNvPr = OneAndOnlyOne("p:cNvPr")
cNvSpPr = OneAndOnlyOne("p:cNvSpPr")
nvPr = OneAndOnlyOne("p:nvPr")
| |
# Copyright 2012 Josh Durgin
# Copyright 2013 Canonical Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import ddt
from cinder import context
from cinder import exception
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_volume
from cinder.tests.unit import test
from cinder.tests.unit.volume.drivers.ceph \
import fake_rbd_iscsi_client as fake_client
import cinder.volume.drivers.ceph.rbd_iscsi as driver
# This is used to collect raised exceptions so that tests may check what was
# raised.
# NOTE: this must be initialised in test setUp().
RAISED_EXCEPTIONS = []
@ddt.ddt
class RBDISCSITestCase(test.TestCase):
def setUp(self):
global RAISED_EXCEPTIONS
RAISED_EXCEPTIONS = []
super(RBDISCSITestCase, self).setUp()
self.context = context.get_admin_context()
# bogus access to prevent pep8 violation
# from the import of fake_client.
# fake_client must be imported to create the fake
# rbd_iscsi_client system module
fake_client.rbdclient
self.fake_target_iqn = 'iqn.2019-01.com.suse.iscsi-gw:iscsi-igw'
self.fake_valid_response = {'status': '200'}
self.fake_clients = \
{'response':
{'Content-Type': 'application/json',
'Content-Length': '55',
'Server': 'Werkzeug/0.14.1 Python/2.7.15rc1',
'Date': 'Wed, 19 Jun 2019 20:13:18 GMT',
'status': '200',
'content-location': 'http://192.168.121.11:5001/api/clients/'
'XX_REPLACE_ME'},
'body':
{'clients': ['iqn.1993-08.org.debian:01:5d3b9abba13d']}}
self.volume_a = fake_volume.fake_volume_obj(
self.context,
**{'name': u'volume-0000000a',
'id': '4c39c3c7-168f-4b32-b585-77f1b3bf0a38',
'size': 10})
self.volume_b = fake_volume.fake_volume_obj(
self.context,
**{'name': u'volume-0000000b',
'id': '0c7d1f44-5a06-403f-bb82-ae7ad0d693a6',
'size': 10})
self.volume_c = fake_volume.fake_volume_obj(
self.context,
**{'name': u'volume-0000000a',
'id': '55555555-222f-4b32-b585-9991b3bf0a99',
'size': 12,
'encryption_key_id': fake.ENCRYPTION_KEY_ID})
def setup_configuration(self):
config = mock.MagicMock()
config.rbd_cluster_name = 'nondefault'
config.rbd_pool = 'rbd'
config.rbd_ceph_conf = '/etc/ceph/my_ceph.conf'
config.rbd_secret_uuid = None
config.rbd_user = 'cinder'
config.volume_backend_name = None
config.rbd_iscsi_api_user = 'fake_user'
config.rbd_iscsi_api_password = 'fake_password'
config.rbd_iscsi_api_url = 'http://fake.com:5000'
return config
@mock.patch(
'rbd_iscsi_client.client.RBDISCSIClient',
spec=True,
)
def setup_mock_client(self, _m_client, config=None, mock_conf=None):
_m_client = _m_client.return_value
# Configure the base constants, defaults etc...
if mock_conf:
_m_client.configure_mock(**mock_conf)
if config is None:
config = self.setup_configuration()
self.driver = driver.RBDISCSIDriver(configuration=config)
self.driver.set_initialized()
return _m_client
@mock.patch('rbd_iscsi_client.version', '0.1.0')
def test_unsupported_client_version(self):
self.setup_mock_client()
with mock.patch('cinder.volume.drivers.rbd.RBDDriver.do_setup'):
self.assertRaises(exception.InvalidInput,
self.driver.do_setup, None)
@ddt.data({'user': None, 'password': 'foo',
'url': 'http://fake.com:5000', 'iqn': None},
{'user': None, 'password': None,
'url': 'http://fake', 'iqn': None},
{'user': None, 'password': None,
'url': None, 'iqn': None},
{'user': 'fake', 'password': 'fake',
'url': None, 'iqn': None},
{'user': 'fake', 'password': 'fake',
'url': 'fake', 'iqn': None},
)
@ddt.unpack
def test_min_config(self, user, password, url, iqn):
config = self.setup_configuration()
config.rbd_iscsi_api_user = user
config.rbd_iscsi_api_password = password
config.rbd_iscsi_api_url = url
config.rbd_iscsi_target_iqn = iqn
self.setup_mock_client(config=config)
with mock.patch('cinder.volume.drivers.rbd.RBDDriver'
'.check_for_setup_error'):
self.assertRaises(exception.InvalidConfigurationValue,
self.driver.check_for_setup_error)
@ddt.data({'response': None},
{'response': {'nothing': 'nothing'}},
{'response': {'status': '300'}})
@ddt.unpack
def test_do_setup(self, response):
mock_conf = {
'get_api.return_value': (response, None)}
mock_client = self.setup_mock_client(mock_conf=mock_conf)
with mock.patch('cinder.volume.drivers.rbd.RBDDriver.do_setup'), \
mock.patch.object(driver.RBDISCSIDriver,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.InvalidConfigurationValue,
self.driver.do_setup, None)
@mock.patch('rbd_iscsi_client.version', "0.1.4")
def test_unsupported_version(self):
self.setup_mock_client()
self.assertRaises(exception.InvalidInput,
self.driver._create_client)
@ddt.data({'status': '200',
'target_iqn': 'iqn.2019-01.com.suse.iscsi-gw:iscsi-igw',
'clients': ['foo']},
{'status': '300',
'target_iqn': 'iqn.2019-01.com.suse.iscsi-gw:iscsi-igw',
'clients': None}
)
@ddt.unpack
def test__get_clients(self, status, target_iqn, clients):
config = self.setup_configuration()
config.rbd_iscsi_target_iqn = target_iqn
response = self.fake_clients['response']
response['status'] = status
response['content-location'] = (
response['content-location'].replace('XX_REPLACE_ME', target_iqn))
body = self.fake_clients['body']
mock_conf = {
'get_clients.return_value': (response, body),
'get_api.return_value': (self.fake_valid_response, None)
}
mock_client = self.setup_mock_client(mock_conf=mock_conf,
config=config)
with mock.patch('cinder.volume.drivers.rbd.RBDDriver.do_setup'), \
mock.patch.object(driver.RBDISCSIDriver,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.do_setup(None)
if status == '200':
actual_response = self.driver._get_clients()
self.assertEqual(actual_response, body)
else:
# we expect an exception
self.assertRaises(exception.VolumeBackendAPIException,
self.driver._get_clients)
@ddt.data({'status': '200',
'body': {'created': 'someday',
'discovery_auth': 'somecrap',
'disks': 'fakedisks',
'gateways': 'fakegws',
'targets': 'faketargets'}},
{'status': '300',
'body': None})
@ddt.unpack
def test__get_config(self, status, body):
config = self.setup_configuration()
config.rbd_iscsi_target_iqn = self.fake_target_iqn
response = self.fake_clients['response']
response['status'] = status
response['content-location'] = (
response['content-location'].replace('XX_REPLACE_ME',
self.fake_target_iqn))
mock_conf = {
'get_config.return_value': (response, body),
'get_api.return_value': (self.fake_valid_response, None)
}
mock_client = self.setup_mock_client(mock_conf=mock_conf,
config=config)
with mock.patch('cinder.volume.drivers.rbd.RBDDriver.do_setup'), \
mock.patch.object(driver.RBDISCSIDriver,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.do_setup(None)
if status == '200':
actual_response = self.driver._get_config()
self.assertEqual(body, actual_response)
else:
# we expect an exception
self.assertRaises(exception.VolumeBackendAPIException,
self.driver._get_config)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.