gt stringclasses 1 value | context stringlengths 2.49k 119k |
|---|---|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import json
import os.path
import re
import signal
import time
from ducktape.services.service import Service
from ducktape.utils.util import wait_until
from ducktape.cluster.remoteaccount import RemoteCommandError
from config import KafkaConfig
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.kafka import config_property
from kafkatest.services.monitor.jmx import JmxMixin
from kafkatest.services.security.minikdc import MiniKdc
from kafkatest.services.security.security_config import SecurityConfig
from kafkatest.version import DEV_BRANCH
Port = collections.namedtuple('Port', ['name', 'number', 'open'])
class KafkaService(KafkaPathResolverMixin, JmxMixin, Service):
PERSISTENT_ROOT = "/mnt"
STDOUT_STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "server-start-stdout-stderr.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties")
# Logs such as controller.log, server.log, etc all go here
OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs")
OPERATIONAL_LOG_INFO_DIR = os.path.join(OPERATIONAL_LOG_DIR, "info")
OPERATIONAL_LOG_DEBUG_DIR = os.path.join(OPERATIONAL_LOG_DIR, "debug")
# Kafka log segments etc go here
DATA_LOG_DIR_PREFIX = os.path.join(PERSISTENT_ROOT, "kafka-data-logs")
DATA_LOG_DIR_1 = "%s-1" % (DATA_LOG_DIR_PREFIX)
DATA_LOG_DIR_2 = "%s-2" % (DATA_LOG_DIR_PREFIX)
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "kafka.properties")
# Kafka Authorizer
SIMPLE_AUTHORIZER = "kafka.security.auth.SimpleAclAuthorizer"
logs = {
"kafka_server_start_stdout_stderr": {
"path": STDOUT_STDERR_CAPTURE,
"collect_default": True},
"kafka_operational_logs_info": {
"path": OPERATIONAL_LOG_INFO_DIR,
"collect_default": True},
"kafka_operational_logs_debug": {
"path": OPERATIONAL_LOG_DEBUG_DIR,
"collect_default": False},
"kafka_data_1": {
"path": DATA_LOG_DIR_1,
"collect_default": False},
"kafka_data_2": {
"path": DATA_LOG_DIR_2,
"collect_default": False}
}
def __init__(self, context, num_nodes, zk, security_protocol=SecurityConfig.PLAINTEXT, interbroker_security_protocol=SecurityConfig.PLAINTEXT,
client_sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI, interbroker_sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI,
authorizer_class_name=None, topics=None, version=DEV_BRANCH, jmx_object_names=None,
jmx_attributes=None, zk_connect_timeout=5000, zk_session_timeout=6000, server_prop_overides=[]):
"""
:type context
:type zk: ZookeeperService
:type topics: dict
"""
Service.__init__(self, context, num_nodes)
JmxMixin.__init__(self, num_nodes, jmx_object_names, jmx_attributes or [])
self.zk = zk
self.security_protocol = security_protocol
self.interbroker_security_protocol = interbroker_security_protocol
self.client_sasl_mechanism = client_sasl_mechanism
self.interbroker_sasl_mechanism = interbroker_sasl_mechanism
self.topics = topics
self.minikdc = None
self.authorizer_class_name = authorizer_class_name
self.zk_set_acl = False
self.server_prop_overides = server_prop_overides
self.log_level = "DEBUG"
self.num_nodes = num_nodes
#
# In a heavily loaded and not very fast machine, it is
# sometimes necessary to give more time for the zk client
# to have its session established, especially if the client
# is authenticating and waiting for the SaslAuthenticated
# in addition to the SyncConnected event.
#
# The defaut value for zookeeper.connect.timeout.ms is
# 2 seconds and here we increase it to 5 seconds, but
# it can be overriden by setting the corresponding parameter
# for this constructor.
self.zk_connect_timeout = zk_connect_timeout
# Also allow the session timeout to be provided explicitly,
# primarily so that test cases can depend on it when waiting
# e.g. brokers to deregister after a hard kill.
self.zk_session_timeout = zk_session_timeout
self.port_mappings = {
'PLAINTEXT': Port('PLAINTEXT', 9092, False),
'SSL': Port('SSL', 9093, False),
'SASL_PLAINTEXT': Port('SASL_PLAINTEXT', 9094, False),
'SASL_SSL': Port('SASL_SSL', 9095, False)
}
for node in self.nodes:
node.version = version
node.config = KafkaConfig(**{config_property.BROKER_ID: self.idx(node)})
def set_version(self, version):
for node in self.nodes:
node.version = version
@property
def security_config(self):
config = SecurityConfig(self.context, self.security_protocol, self.interbroker_security_protocol,
zk_sasl=self.zk.zk_sasl,
client_sasl_mechanism=self.client_sasl_mechanism, interbroker_sasl_mechanism=self.interbroker_sasl_mechanism)
for protocol in self.port_mappings:
port = self.port_mappings[protocol]
if port.open:
config.enable_security_protocol(port.name)
return config
def open_port(self, protocol):
self.port_mappings[protocol] = self.port_mappings[protocol]._replace(open=True)
def close_port(self, protocol):
self.port_mappings[protocol] = self.port_mappings[protocol]._replace(open=False)
def start_minikdc(self, add_principals=""):
if self.security_config.has_sasl:
if self.minikdc is None:
self.minikdc = MiniKdc(self.context, self.nodes, extra_principals = add_principals)
self.minikdc.start()
else:
self.minikdc = None
def alive(self, node):
return len(self.pids(node)) > 0
def start(self, add_principals=""):
self.open_port(self.security_protocol)
self.open_port(self.interbroker_security_protocol)
self.start_minikdc(add_principals)
Service.start(self)
# Create topics if necessary
if self.topics is not None:
for topic, topic_cfg in self.topics.items():
if topic_cfg is None:
topic_cfg = {}
topic_cfg["topic"] = topic
self.create_topic(topic_cfg)
def set_protocol_and_port(self, node):
listeners = []
advertised_listeners = []
for protocol in self.port_mappings:
port = self.port_mappings[protocol]
if port.open:
listeners.append(port.name + "://:" + str(port.number))
advertised_listeners.append(port.name + "://" + node.account.hostname + ":" + str(port.number))
self.listeners = ','.join(listeners)
self.advertised_listeners = ','.join(advertised_listeners)
def prop_file(self, node):
cfg = KafkaConfig(**node.config)
cfg[config_property.ADVERTISED_HOSTNAME] = node.account.hostname
cfg[config_property.ZOOKEEPER_CONNECT] = self.zk.connect_setting()
for prop in self.server_prop_overides:
cfg[prop[0]] = prop[1]
self.set_protocol_and_port(node)
# TODO - clean up duplicate configuration logic
prop_file = cfg.render()
prop_file += self.render('kafka.properties', node=node, broker_id=self.idx(node),
security_config=self.security_config)
return prop_file
def start_cmd(self, node):
cmd = "export JMX_PORT=%d; " % self.jmx_port
cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG
cmd += "export KAFKA_OPTS=%s; " % self.security_config.kafka_opts
cmd += "%s %s 1>> %s 2>> %s &" % \
(self.path.script("kafka-server-start.sh", node),
KafkaService.CONFIG_FILE,
KafkaService.STDOUT_STDERR_CAPTURE,
KafkaService.STDOUT_STDERR_CAPTURE)
return cmd
def start_node(self, node):
prop_file = self.prop_file(node)
self.logger.info("kafka.properties:")
self.logger.info(prop_file)
node.account.create_file(KafkaService.CONFIG_FILE, prop_file)
node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR))
self.security_config.setup_node(node)
self.security_config.setup_credentials(node, self.path, self.zk.connect_setting(), broker=True)
cmd = self.start_cmd(node)
self.logger.debug("Attempting to start KafkaService on %s with command: %s" % (str(node.account), cmd))
with node.account.monitor_log(KafkaService.STDOUT_STDERR_CAPTURE) as monitor:
node.account.ssh(cmd)
monitor.wait_until("Kafka Server.*started", timeout_sec=30, backoff_sec=.25, err_msg="Kafka server didn't finish startup")
# Credentials for inter-broker communication are created before starting Kafka.
# Client credentials are created after starting Kafka so that both loading of
# existing credentials from ZK and dynamic update of credentials in Kafka are tested.
self.security_config.setup_credentials(node, self.path, self.zk.connect_setting(), broker=False)
self.start_jmx_tool(self.idx(node), node)
if len(self.pids(node)) == 0:
raise Exception("No process ids recorded on node %s" % str(node))
def pids(self, node):
"""Return process ids associated with running processes on the given node."""
try:
cmd = "ps ax | grep -i kafka | grep java | grep -v grep | awk '{print $1}'"
pid_arr = [pid for pid in node.account.ssh_capture(cmd, allow_fail=True, callback=int)]
return pid_arr
except (RemoteCommandError, ValueError) as e:
return []
def signal_node(self, node, sig=signal.SIGTERM):
pids = self.pids(node)
for pid in pids:
node.account.signal(pid, sig)
def signal_leader(self, topic, partition=0, sig=signal.SIGTERM):
leader = self.leader(topic, partition)
self.signal_node(leader, sig)
def stop_node(self, node, clean_shutdown=True):
pids = self.pids(node)
sig = signal.SIGTERM if clean_shutdown else signal.SIGKILL
for pid in pids:
node.account.signal(pid, sig, allow_fail=False)
wait_until(lambda: len(self.pids(node)) == 0, timeout_sec=60, err_msg="Kafka node failed to stop")
def clean_node(self, node):
JmxMixin.clean_node(self, node)
self.security_config.clean_node(node)
node.account.kill_process("kafka", clean_shutdown=False, allow_fail=True)
node.account.ssh("sudo rm -rf /mnt/*", allow_fail=False)
def create_topic(self, topic_cfg, node=None):
"""Run the admin tool create topic command.
Specifying node is optional, and may be done if for different kafka nodes have different versions,
and we care where command gets run.
If the node is not specified, run the command from self.nodes[0]
"""
if node is None:
node = self.nodes[0]
self.logger.info("Creating topic %s with settings %s",
topic_cfg["topic"], topic_cfg)
kafka_topic_script = self.path.script("kafka-topics.sh", node)
cmd = kafka_topic_script + " "
cmd += "--zookeeper %(zk_connect)s --create --topic %(topic)s " % {
'zk_connect': self.zk.connect_setting(),
'topic': topic_cfg.get("topic"),
}
if 'replica-assignment' in topic_cfg:
cmd += " --replica-assignment %(replica-assignment)s" % {
'replica-assignment': topic_cfg.get('replica-assignment')
}
else:
cmd += " --partitions %(partitions)d --replication-factor %(replication-factor)d" % {
'partitions': topic_cfg.get('partitions', 1),
'replication-factor': topic_cfg.get('replication-factor', 1)
}
if "configs" in topic_cfg.keys() and topic_cfg["configs"] is not None:
for config_name, config_value in topic_cfg["configs"].items():
cmd += " --config %s=%s" % (config_name, str(config_value))
self.logger.info("Running topic creation command...\n%s" % cmd)
node.account.ssh(cmd)
time.sleep(1)
self.logger.info("Checking to see if topic was properly created...\n%s" % cmd)
for line in self.describe_topic(topic_cfg["topic"]).split("\n"):
self.logger.info(line)
def describe_topic(self, topic, node=None):
if node is None:
node = self.nodes[0]
cmd = "%s --zookeeper %s --topic %s --describe" % \
(self.path.script("kafka-topics.sh", node), self.zk.connect_setting(), topic)
output = ""
for line in node.account.ssh_capture(cmd):
output += line
return output
def list_topics(self, topic, node=None):
if node is None:
node = self.nodes[0]
cmd = "%s --zookeeper %s --list" % \
(self.path.script("kafka-topics.sh", node), self.zk.connect_setting())
for line in node.account.ssh_capture(cmd):
if not line.startswith("SLF4J"):
yield line.rstrip()
def alter_message_format(self, topic, msg_format_version, node=None):
if node is None:
node = self.nodes[0]
self.logger.info("Altering message format version for topic %s with format %s", topic, msg_format_version)
cmd = "%s --zookeeper %s --entity-name %s --entity-type topics --alter --add-config message.format.version=%s" % \
(self.path.script("kafka-configs.sh", node), self.zk.connect_setting(), topic, msg_format_version)
self.logger.info("Running alter message format command...\n%s" % cmd)
node.account.ssh(cmd)
def parse_describe_topic(self, topic_description):
"""Parse output of kafka-topics.sh --describe (or describe_topic() method above), which is a string of form
PartitionCount:2\tReplicationFactor:2\tConfigs:
Topic: test_topic\ttPartition: 0\tLeader: 3\tReplicas: 3,1\tIsr: 3,1
Topic: test_topic\tPartition: 1\tLeader: 1\tReplicas: 1,2\tIsr: 1,2
into a dictionary structure appropriate for use with reassign-partitions tool:
{
"partitions": [
{"topic": "test_topic", "partition": 0, "replicas": [3, 1]},
{"topic": "test_topic", "partition": 1, "replicas": [1, 2]}
]
}
"""
lines = map(lambda x: x.strip(), topic_description.split("\n"))
partitions = []
for line in lines:
m = re.match(".*Leader:.*", line)
if m is None:
continue
fields = line.split("\t")
# ["Partition: 4", "Leader: 0"] -> ["4", "0"]
fields = map(lambda x: x.split(" ")[1], fields)
partitions.append(
{"topic": fields[0],
"partition": int(fields[1]),
"replicas": map(int, fields[3].split(','))})
return {"partitions": partitions}
def verify_reassign_partitions(self, reassignment, node=None):
"""Run the reassign partitions admin tool in "verify" mode
"""
if node is None:
node = self.nodes[0]
json_file = "/tmp/%s_reassign.json" % str(time.time())
# reassignment to json
json_str = json.dumps(reassignment)
json_str = json.dumps(json_str)
# create command
cmd = "echo %s > %s && " % (json_str, json_file)
cmd += "%s " % self.path.script("kafka-reassign-partitions.sh", node)
cmd += "--zookeeper %s " % self.zk.connect_setting()
cmd += "--reassignment-json-file %s " % json_file
cmd += "--verify "
cmd += "&& sleep 1 && rm -f %s" % json_file
# send command
self.logger.info("Verifying parition reassignment...")
self.logger.debug(cmd)
output = ""
for line in node.account.ssh_capture(cmd):
output += line
self.logger.debug(output)
if re.match(".*Reassignment of partition.*failed.*",
output.replace('\n', '')) is not None:
return False
if re.match(".*is still in progress.*",
output.replace('\n', '')) is not None:
return False
return True
def execute_reassign_partitions(self, reassignment, node=None,
throttle=None):
"""Run the reassign partitions admin tool in "verify" mode
"""
if node is None:
node = self.nodes[0]
json_file = "/tmp/%s_reassign.json" % str(time.time())
# reassignment to json
json_str = json.dumps(reassignment)
json_str = json.dumps(json_str)
# create command
cmd = "echo %s > %s && " % (json_str, json_file)
cmd += "%s " % self.path.script( "kafka-reassign-partitions.sh", node)
cmd += "--zookeeper %s " % self.zk.connect_setting()
cmd += "--reassignment-json-file %s " % json_file
cmd += "--execute"
if throttle is not None:
cmd += " --throttle %d" % throttle
cmd += " && sleep 1 && rm -f %s" % json_file
# send command
self.logger.info("Executing parition reassignment...")
self.logger.debug(cmd)
output = ""
for line in node.account.ssh_capture(cmd):
output += line
self.logger.debug("Verify partition reassignment:")
self.logger.debug(output)
def search_data_files(self, topic, messages):
"""Check if a set of messages made it into the Kakfa data files. Note that
this method takes no account of replication. It simply looks for the
payload in all the partition files of the specified topic. 'messages' should be
an array of numbers. The list of missing messages is returned.
"""
payload_match = "payload: " + "$|payload: ".join(str(x) for x in messages) + "$"
found = set([])
self.logger.debug("number of unique missing messages we will search for: %d",
len(messages))
for node in self.nodes:
# Grab all .log files in directories prefixed with this topic
files = node.account.ssh_capture("find %s* -regex '.*/%s-.*/[^/]*.log'" % (KafkaService.DATA_LOG_DIR_PREFIX, topic))
# Check each data file to see if it contains the messages we want
for log in files:
cmd = "%s kafka.tools.DumpLogSegments --print-data-log --files %s | grep -E \"%s\"" % \
(self.path.script("kafka-run-class.sh", node), log.strip(), payload_match)
for line in node.account.ssh_capture(cmd, allow_fail=True):
for val in messages:
if line.strip().endswith("payload: "+str(val)):
self.logger.debug("Found %s in data-file [%s] in line: [%s]" % (val, log.strip(), line.strip()))
found.add(val)
self.logger.debug("Number of unique messages found in the log: %d",
len(found))
missing = list(set(messages) - found)
if len(missing) > 0:
self.logger.warn("The following values were not found in the data files: " + str(missing))
return missing
def restart_node(self, node, clean_shutdown=True):
"""Restart the given node."""
self.stop_node(node, clean_shutdown)
self.start_node(node)
def isr_idx_list(self, topic, partition=0):
""" Get in-sync replica list the given topic and partition.
"""
self.logger.debug("Querying zookeeper to find in-sync replicas for topic %s and partition %d" % (topic, partition))
zk_path = "/brokers/topics/%s/partitions/%d/state" % (topic, partition)
partition_state = self.zk.query(zk_path)
if partition_state is None:
raise Exception("Error finding partition state for topic %s and partition %d." % (topic, partition))
partition_state = json.loads(partition_state)
self.logger.info(partition_state)
isr_idx_list = partition_state["isr"]
self.logger.info("Isr for topic %s and partition %d is now: %s" % (topic, partition, isr_idx_list))
return isr_idx_list
def replicas(self, topic, partition=0):
""" Get the assigned replicas for the given topic and partition.
"""
self.logger.debug("Querying zookeeper to find assigned replicas for topic %s and partition %d" % (topic, partition))
zk_path = "/brokers/topics/%s" % (topic)
assignemnt = self.zk.query(zk_path)
if assignemnt is None:
raise Exception("Error finding partition state for topic %s and partition %d." % (topic, partition))
assignemnt = json.loads(assignemnt)
self.logger.info(assignemnt)
replicas = assignemnt["partitions"][str(partition)]
self.logger.info("Assigned replicas for topic %s and partition %d is now: %s" % (topic, partition, replicas))
return [self.get_node(replica) for replica in replicas]
def leader(self, topic, partition=0):
""" Get the leader replica for the given topic and partition.
"""
self.logger.debug("Querying zookeeper to find leader replica for topic %s and partition %d" % (topic, partition))
zk_path = "/brokers/topics/%s/partitions/%d/state" % (topic, partition)
partition_state = self.zk.query(zk_path)
if partition_state is None:
raise Exception("Error finding partition state for topic %s and partition %d." % (topic, partition))
partition_state = json.loads(partition_state)
self.logger.info(partition_state)
leader_idx = int(partition_state["leader"])
self.logger.info("Leader for topic %s and partition %d is now: %d" % (topic, partition, leader_idx))
return self.get_node(leader_idx)
def cluster_id(self):
""" Get the current cluster id
"""
self.logger.debug("Querying ZooKeeper to retrieve cluster id")
cluster = json.loads(self.zk.query("/cluster/id"))
if cluster is None:
raise Exception("Error querying ZK for cluster id.")
return cluster['id']
def list_consumer_groups(self, node=None, new_consumer=True, command_config=None):
""" Get list of consumer groups.
"""
if node is None:
node = self.nodes[0]
consumer_group_script = self.path.script("kafka-consumer-groups.sh", node)
if command_config is None:
command_config = ""
else:
command_config = "--command-config " + command_config
if new_consumer:
cmd = "%s --new-consumer --bootstrap-server %s %s --list" % \
(consumer_group_script,
self.bootstrap_servers(self.security_protocol),
command_config)
else:
cmd = "%s --zookeeper %s %s --list" % (consumer_group_script, self.zk.connect_setting(), command_config)
output = ""
self.logger.debug(cmd)
for line in node.account.ssh_capture(cmd):
if not line.startswith("SLF4J"):
output += line
self.logger.debug(output)
return output
def describe_consumer_group(self, group, node=None, new_consumer=True, command_config=None):
""" Describe a consumer group.
"""
if node is None:
node = self.nodes[0]
consumer_group_script = self.path.script("kafka-consumer-groups.sh", node)
if command_config is None:
command_config = ""
else:
command_config = "--command-config " + command_config
if new_consumer:
cmd = "%s --new-consumer --bootstrap-server %s %s --group %s --describe" % \
(consumer_group_script, self.bootstrap_servers(self.security_protocol), command_config, group)
else:
cmd = "%s --zookeeper %s %s --group %s --describe" % \
(consumer_group_script, self.zk.connect_setting(), command_config, group)
output = ""
self.logger.debug(cmd)
for line in node.account.ssh_capture(cmd):
if not (line.startswith("SLF4J") or line.startswith("TOPIC") or line.startswith("Could not fetch offset")):
output += line
self.logger.debug(output)
return output
def bootstrap_servers(self, protocol='PLAINTEXT', validate=True, offline_nodes=[]):
"""Return comma-delimited list of brokers in this cluster formatted as HOSTNAME1:PORT1,HOSTNAME:PORT2,...
This is the format expected by many config files.
"""
port_mapping = self.port_mappings[protocol]
self.logger.info("Bootstrap client port is: " + str(port_mapping.number))
if validate and not port_mapping.open:
raise ValueError("We are retrieving bootstrap servers for the port: %s which is not currently open. - " % str(port_mapping))
return ','.join([node.account.hostname + ":" + str(port_mapping.number) for node in self.nodes if node not in offline_nodes])
def controller(self):
""" Get the controller node
"""
self.logger.debug("Querying zookeeper to find controller broker")
controller_info = self.zk.query("/controller")
if controller_info is None:
raise Exception("Error finding controller info")
controller_info = json.loads(controller_info)
self.logger.debug(controller_info)
controller_idx = int(controller_info["brokerid"])
self.logger.info("Controller's ID: %d" % (controller_idx))
return self.get_node(controller_idx)
def is_registered(self, node):
"""
Check whether a broker is registered in Zookeeper
"""
self.logger.debug("Querying zookeeper to see if broker %s is registered", node)
broker_info = self.zk.query("/brokers/ids/%s" % self.idx(node))
self.logger.debug("Broker info: %s", broker_info)
return broker_info is not None
def get_offset_shell(self, topic, partitions, max_wait_ms, offsets, time):
node = self.nodes[0]
cmd = self.path.script("kafka-run-class.sh", node)
cmd += " kafka.tools.GetOffsetShell"
cmd += " --topic %s --broker-list %s --max-wait-ms %s --offsets %s --time %s" % (topic, self.bootstrap_servers(self.security_protocol), max_wait_ms, offsets, time)
if partitions:
cmd += ' --partitions %s' % partitions
cmd += " 2>> /mnt/get_offset_shell.log | tee -a /mnt/get_offset_shell.log &"
output = ""
self.logger.debug(cmd)
for line in node.account.ssh_capture(cmd):
output += line
self.logger.debug(output)
return output
| |
"""
The preprocessor mini-languages.
Doxhooks `preprocessor directives <preprocessor directive>`:term: are
written in an extensible `preprocessor`:term: `mini-language`:term:.
Each mini-language is a context that defines the meanings of the
*keywords* in that mini-language (`BasePreprocessorContext`). The
context is also where the `preprocessor variables <preprocessor
node>`:term: are defined.
Exports
-------
PreprocessorContext
The keywords and variables of a basic preprocessor mini-language.
BasePreprocessorContext
Base class of a preprocessor mini-language.
lowercase_booleans
Modify a context to allow lowercase boolean representations.
startcase_booleans
Modify a context to allow start-case boolean representations.
"""
import ast
import doxhooks.console as console
from doxhooks.errors import (
DoxhooksDataError, DoxhooksLookupError, DoxhooksTypeError)
from doxhooks.functions import findvalue
__all__ = [
"BasePreprocessorContext",
"PreprocessorContext",
"lowercase_booleans",
"startcase_booleans",
]
class BasePreprocessorContext:
"""
Base class of a preprocessor mini-language.
A subclass of `BasePreprocessorContext` defines a preprocessor
mini-language:
* An instance of the subclass is a runtime context of the
mini-language.
* The public instance variables and class variables are the
variables in the mini-language.
* The public methods are the *keywords* of the mini-language.
* The private methods and variables provide implementation details
of the mini-language.
Private methods and variables are those with names that start with
an underscore.
Class Interface
---------------
get
Return the output representation of a variable.
interpret
Interpret tokens within the context of the mini-language.
"""
def __init__(self, **variables):
r"""
Initialise the context with the names and values of variables.
Parameters
----------
\**variables
These keyword parameters and their arguments define the
names and values of variables in the mini-language. These
variables are implemented as instance variables of the
context.
"""
vars(self).update(variables)
def _get_token_value(self, object_token):
# Apply the 'member' operator ('.') within a token.
value = self
identifiers = object_token.split(".")
for i, identifier in enumerate(identifiers):
try:
value = findvalue(value, identifier)
except DoxhooksLookupError as error:
if i == 0:
description = "preprocessor context"
else:
description = "`{}`".format(".".join(identifiers[:i]))
error.description = description
raise
return value
def _convert_output_type_to_str(self, value, identifier):
# Return a str, float or int (and perhaps bool) value as str.
if isinstance(value, str):
return value
if isinstance(value, (float, int)) and not isinstance(value, bool):
return str(value)
if isinstance(value, bool):
try:
return self._convert_bool_to_str(value)
except AttributeError:
pass
output_types = "str, float or int ({}including bool)".format(
"" if hasattr(self, "_convert_bool_to_str") else "not ")
raise DoxhooksTypeError(value, identifier, output_types)
def get(self, output_token, *, preprocessor=None):
"""
Return the output representation of a variable.
Parameters
----------
output_token : str
The name of the variable.
preprocessor
Unused. Defaults to ``None``.
Returns
-------
str
The output representation.
Raises
------
~doxhooks.errors.DoxhooksLookupError
If a value with that name is not found.
~doxhooks.errors.DoxhooksTypeError
If the type of the variable is not `str`, `float` or `int`.
(`bool` can also be an output type: See `startcase_booleans`
and `lowercase_booleans`.)
"""
value = self._get_token_value(output_token)
return self._convert_output_type_to_str(value, output_token)
def _get_local_value(self, identifier):
try:
return findvalue(self, identifier)
except DoxhooksLookupError as error:
error.description = "preprocessor context"
raise
def interpret(self, keyword_token, *tokens, preprocessor=None):
r"""
Interpret tokens within the context of the mini-language.
Parameters
----------
keyword_token : str
The name of a public method that defines a keyword in the
mini-language.
\*tokens : str
Tokens to be passed to the keyword method.
preprocessor : ~doxhooks.preprocessors.Preprocessor or None, optional
Keyword-only. A preprocessor that may be required by the
keyword method. Defaults to ``None``.
Raises
------
~doxhooks.errors.DoxhooksLookupError
If the keyword is not found in this context.
~doxhooks.errors.DoxhooksTypeError
If the type of the keyword is not a `callable` type.
~doxhooks.errors.DoxhooksDataError
If the tokens do not conform to the mini-language syntax.
"""
keyword = self._get_local_value(keyword_token)
if not callable(keyword):
raise DoxhooksTypeError(keyword, keyword_token, "callable")
try:
keyword(*tokens, preprocessor=preprocessor)
except TypeError as error:
error_message = str(error)
if error_message.startswith(keyword.__name__ + "()"):
raise DoxhooksDataError("Bad syntax:", keyword_token, tokens) \
from error
raise
class PreprocessorContext(BasePreprocessorContext):
"""
The keywords and variables of a basic preprocessor mini-language.
`PreprocessorContext` extends `BasePreprocessorContext`.
Class Interface
---------------
set
Set the value of a variable.
if_
Interpret additional tokens if a condition is true.
insert
Preprocess the contents of one or more files.
include
Preprocess a file if it has not already been preprocessed.
write
Preprocess a line of text.
error
Raise a `~doxhooks.errors.DoxhooksDataError`.
warning
Write a warning message to stderr.
"""
def set(self, identifier, value_token, *, preprocessor=None):
"""
Set the value of a variable.
The variable will be defined in the runtime context of the
preprocessor mini-language.
The value is either the result of safely evaluating the value
token with `ast.literal_eval`, or the token itself if it cannot
be evaluated.
Parameters
----------
identifier : str
The name of the variable.
value_token : str
A token representing the value to be set.
preprocessor
Unused. Defaults to ``None``.
"""
try:
value = ast.literal_eval(value_token)
except (SyntaxError, ValueError):
value = value_token
setattr(self, identifier, value)
def if_(self, condition_token, keyword_token, *tokens, preprocessor=None):
r"""
Interpret additional tokens if a condition is true.
Note
----
This keyword can be written as ``if`` (instead of ``if_``)
in the preprocessor directives.
Parameters
----------
condition_token : str
The name of a variable that defines the condition.
keyword_token : str
A token to be interpreted (within the context of the
mini-language) if the condition is true.
\*tokens : str
More tokens to be interpreted if the condition is true.
preprocessor : ~doxhooks.preprocessors.Preprocessor or None, optional
Keyword-only. A preprocessor that may be required if the
additional tokens are interpreted. Defaults to ``None``.
Raises
------
~doxhooks.errors.DoxhooksLookupError
If the condition is not found.
~doxhooks.errors.DoxhooksTypeError
If the type of the condition is not `bool`.
"""
condition = self._get_token_value(condition_token)
if not isinstance(condition, bool):
raise DoxhooksTypeError(condition, condition_token, "bool")
if condition:
self.interpret(keyword_token, *tokens, preprocessor=preprocessor)
def _get_filename(self, file_token):
# Evaluate a filename token and return the value.
try:
value = self._get_token_value(file_token)
except DoxhooksDataError:
value = file_token
if not (isinstance(value, str) or value is None):
raise DoxhooksTypeError(value, file_token, "str or None")
return value
def insert(self, file_token, *file_tokens, preprocessor):
r"""
Preprocess the contents of one or more files.
Parameters
----------
file_token : str
A filename or the name of a variable that defines a
filename.
\*file_tokens : str
More filenames or variable names. The order of the arguments
determines the order that the files are inserted.
preprocessor : ~doxhooks.preprocessors.Preprocessor
The preprocessor that will preprocess the files.
Raises
------
~doxhooks.errors.DoxhooksTypeError
If the type of the filename is not `str` or ``None``.
~doxhooks.errors.DoxhooksFileError
If the file cannot be read.
"""
tokens = (file_token,) + file_tokens
for token in tokens:
preprocessor.insert_file(self._get_filename(token))
def include(self, file_token, *file_tokens, preprocessor):
r"""
Preprocess a file if it has not already been preprocessed.
Parameters
----------
file_token : str
A filename or the name of a variable that defines a
filename.
\*file_tokens : str
More filenames or variable names. The order of the arguments
determines the order that the files are included.
preprocessor : ~doxhooks.preprocessors.Preprocessor
The preprocessor that will preprocess the files.
Raises
------
~doxhooks.errors.DoxhooksTypeError
If the type of the filename is not `str` or ``None``.
~doxhooks.errors.DoxhooksFileError
If the file cannot be read.
"""
tokens = (file_token,) + file_tokens
for token in tokens:
filename = self._get_filename(token)
preprocessor.insert_file(filename, idempotent=True)
def write(self, line, *, preprocessor):
"""
Preprocess a line of text.
Parameters
----------
line : str
The line of text.
preprocessor : ~doxhooks.preprocessors.Preprocessor
The preprocessor that will preprocess the line.
"""
preprocessor.insert_lines((line + "\n",))
def error(self, message, *, preprocessor=None):
"""
Raise a `~doxhooks.errors.DoxhooksDataError`.
Parameters
----------
message : str
The error message.
preprocessor
Unused. Defaults to ``None``.
Raises
------
~doxhooks.errors.DoxhooksDataError
Unconditionally.
"""
raise DoxhooksDataError(message)
def warning(self, message, *, preprocessor=None):
"""
Write a warning message to stderr.
Parameters
----------
message : str
The warning message.
preprocessor
Unused. Defaults to ``None``.
"""
console.warning(message)
def _convert_to_lowercase_str(self, value):
return str(value).lower()
def lowercase_booleans(context_class):
"""
Modify a context to allow lowercase boolean representations.
This decorator modifies a subclass of `BasePreprocessorContext`:
* `BasePreprocessorContext.get` can return representations of
`bool` values.
* The representations are written in lowercase: ``true`` and
``false``.
Lowercase is the correct case for representing boolean literals in
most programming languages.
Parameters
----------
context_class : type
The subclass of `BasePreprocessorContext` to be modified.
Returns
-------
type
The modified subclass of `BasePreprocessorContext`.
"""
context_class._convert_bool_to_str = _convert_to_lowercase_str
return context_class
def _convert_to_str(self, value):
return str(value)
def startcase_booleans(context_class):
"""
Modify a context to allow start-case boolean representations.
This decorator modifies a subclass of `BasePreprocessorContext`:
* `BasePreprocessorContext.get` can return representations of
`bool` values.
* The representations are written in start case: ``True`` and
``False``.
Start case is the correct case for representing boolean literals in
the Python language.
Parameters
----------
context_class : type
The subclass of `BasePreprocessorContext` to be modified.
Returns
-------
type
The modified subclass of `BasePreprocessorContext`.
"""
context_class._convert_bool_to_str = _convert_to_str
return context_class
| |
#!/usr/bin/env python
#
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import bisect
import collections
import ctypes
import disasm
import mmap
import optparse
import os
import re
import subprocess
import sys
import time
USAGE="""usage: %prog [OPTION]...
Analyses V8 and perf logs to produce profiles.
Perf logs can be collected using a command like:
$ perf record -R -e cycles -c 10000 -f -i ./d8 bench.js --ll-prof
# -R: collect all data
# -e cycles: use cpu-cycles event (run "perf list" for details)
# -c 10000: write a sample after each 10000 events
# -f: force output file overwrite
# -i: limit profiling to our process and the kernel
# --ll-prof shell flag enables the right V8 logs
This will produce a binary trace file (perf.data) that %prog can analyse.
IMPORTANT:
The kernel has an internal maximum for events per second, it is 100K by
default. That's not enough for "-c 10000". Set it to some higher value:
$ echo 10000000 | sudo tee /proc/sys/kernel/perf_event_max_sample_rate
You can also make the warning about kernel address maps go away:
$ echo 0 | sudo tee /proc/sys/kernel/kptr_restrict
We have a convenience script that handles all of the above for you:
$ tools/run-llprof.sh ./d8 bench.js
Examples:
# Print flat profile with annotated disassembly for the 10 top
# symbols. Use default log names.
$ %prog --disasm-top=10
# Print flat profile with annotated disassembly for all used symbols.
# Use default log names and include kernel symbols into analysis.
$ %prog --disasm-all --kernel
# Print flat profile. Use custom log names.
$ %prog --log=foo.log --trace=foo.data
"""
JS_ORIGIN = "js"
class Code(object):
"""Code object."""
_id = 0
UNKNOWN = 0
V8INTERNAL = 1
FULL_CODEGEN = 2
OPTIMIZED = 3
def __init__(self, name, start_address, end_address, origin, origin_offset):
self.id = Code._id
Code._id += 1
self.name = name
self.other_names = None
self.start_address = start_address
self.end_address = end_address
self.origin = origin
self.origin_offset = origin_offset
self.self_ticks = 0
self.self_ticks_map = None
self.callee_ticks = None
if name.startswith("LazyCompile:*"):
self.codetype = Code.OPTIMIZED
elif name.startswith("LazyCompile:"):
self.codetype = Code.FULL_CODEGEN
elif name.startswith("v8::internal::"):
self.codetype = Code.V8INTERNAL
else:
self.codetype = Code.UNKNOWN
def AddName(self, name):
assert self.name != name
if self.other_names is None:
self.other_names = [name]
return
if not name in self.other_names:
self.other_names.append(name)
def FullName(self):
if self.other_names is None:
return self.name
self.other_names.sort()
return "%s (aka %s)" % (self.name, ", ".join(self.other_names))
def IsUsed(self):
return self.self_ticks > 0 or self.callee_ticks is not None
def Tick(self, pc):
self.self_ticks += 1
if self.self_ticks_map is None:
self.self_ticks_map = collections.defaultdict(lambda: 0)
offset = pc - self.start_address
self.self_ticks_map[offset] += 1
def CalleeTick(self, callee):
if self.callee_ticks is None:
self.callee_ticks = collections.defaultdict(lambda: 0)
self.callee_ticks[callee] += 1
def PrintAnnotated(self, arch, options):
if self.self_ticks_map is None:
ticks_map = []
else:
ticks_map = self.self_ticks_map.items()
# Convert the ticks map to offsets and counts arrays so that later
# we can do binary search in the offsets array.
ticks_map.sort(key=lambda t: t[0])
ticks_offsets = [t[0] for t in ticks_map]
ticks_counts = [t[1] for t in ticks_map]
# Get a list of disassembled lines and their addresses.
lines = self._GetDisasmLines(arch, options)
if len(lines) == 0:
return
# Print annotated lines.
address = lines[0][0]
total_count = 0
for i in xrange(len(lines)):
start_offset = lines[i][0] - address
if i == len(lines) - 1:
end_offset = self.end_address - self.start_address
else:
end_offset = lines[i + 1][0] - address
# Ticks (reported pc values) are not always precise, i.e. not
# necessarily point at instruction starts. So we have to search
# for ticks that touch the current instruction line.
j = bisect.bisect_left(ticks_offsets, end_offset)
count = 0
for offset, cnt in reversed(zip(ticks_offsets[:j], ticks_counts[:j])):
if offset < start_offset:
break
count += cnt
total_count += count
percent = 100.0 * count / self.self_ticks
offset = lines[i][0]
if percent >= 0.01:
# 5 spaces for tick count
# 1 space following
# 1 for '|'
# 1 space following
# 6 for the percentage number, incl. the '.'
# 1 for the '%' sign
# => 15
print "%5d | %6.2f%% %x(%d): %s" % (count, percent, offset, offset, lines[i][1])
else:
print "%s %x(%d): %s" % (" " * 15, offset, offset, lines[i][1])
print
assert total_count == self.self_ticks, \
"Lost ticks (%d != %d) in %s" % (total_count, self.self_ticks, self)
def __str__(self):
return "%s [0x%x, 0x%x) size: %d origin: %s" % (
self.name,
self.start_address,
self.end_address,
self.end_address - self.start_address,
self.origin)
def _GetDisasmLines(self, arch, options):
if self.origin == JS_ORIGIN:
inplace = False
filename = options.log + ".ll"
else:
inplace = True
filename = self.origin
return disasm.GetDisasmLines(filename,
self.origin_offset,
self.end_address - self.start_address,
arch,
inplace)
class CodePage(object):
"""Group of adjacent code objects."""
SHIFT = 20 # 1M pages
SIZE = (1 << SHIFT)
MASK = ~(SIZE - 1)
@staticmethod
def PageAddress(address):
return address & CodePage.MASK
@staticmethod
def PageId(address):
return address >> CodePage.SHIFT
@staticmethod
def PageAddressFromId(id):
return id << CodePage.SHIFT
def __init__(self, address):
self.address = address
self.code_objects = []
def Add(self, code):
self.code_objects.append(code)
def Remove(self, code):
self.code_objects.remove(code)
def Find(self, pc):
code_objects = self.code_objects
for i, code in enumerate(code_objects):
if code.start_address <= pc < code.end_address:
code_objects[0], code_objects[i] = code, code_objects[0]
return code
return None
def __iter__(self):
return self.code_objects.__iter__()
class CodeMap(object):
"""Code object map."""
def __init__(self):
self.pages = {}
self.min_address = 1 << 64
self.max_address = -1
def Add(self, code, max_pages=-1):
page_id = CodePage.PageId(code.start_address)
limit_id = CodePage.PageId(code.end_address + CodePage.SIZE - 1)
pages = 0
while page_id < limit_id:
if max_pages >= 0 and pages > max_pages:
print >>sys.stderr, \
"Warning: page limit (%d) reached for %s [%s]" % (
max_pages, code.name, code.origin)
break
if page_id in self.pages:
page = self.pages[page_id]
else:
page = CodePage(CodePage.PageAddressFromId(page_id))
self.pages[page_id] = page
page.Add(code)
page_id += 1
pages += 1
self.min_address = min(self.min_address, code.start_address)
self.max_address = max(self.max_address, code.end_address)
def Remove(self, code):
page_id = CodePage.PageId(code.start_address)
limit_id = CodePage.PageId(code.end_address + CodePage.SIZE - 1)
removed = False
while page_id < limit_id:
if page_id not in self.pages:
page_id += 1
continue
page = self.pages[page_id]
page.Remove(code)
removed = True
page_id += 1
return removed
def AllCode(self):
for page in self.pages.itervalues():
for code in page:
if CodePage.PageAddress(code.start_address) == page.address:
yield code
def UsedCode(self):
for code in self.AllCode():
if code.IsUsed():
yield code
def Print(self):
for code in self.AllCode():
print code
def Find(self, pc):
if pc < self.min_address or pc >= self.max_address:
return None
page_id = CodePage.PageId(pc)
if page_id not in self.pages:
return None
return self.pages[page_id].Find(pc)
class CodeInfo(object):
"""Generic info about generated code objects."""
def __init__(self, arch, header_size):
self.arch = arch
self.header_size = header_size
class LogReader(object):
"""V8 low-level (binary) log reader."""
_ARCH_TO_POINTER_TYPE_MAP = {
"ia32": ctypes.c_uint32,
"arm": ctypes.c_uint32,
"mips": ctypes.c_uint32,
"x64": ctypes.c_uint64,
"arm64": ctypes.c_uint64
}
_CODE_CREATE_TAG = "C"
_CODE_MOVE_TAG = "M"
_CODE_MOVING_GC_TAG = "G"
def __init__(self, log_name, code_map):
self.log_file = open(log_name, "r")
self.log = mmap.mmap(self.log_file.fileno(), 0, mmap.MAP_PRIVATE)
self.log_pos = 0
self.code_map = code_map
self.arch = self.log[:self.log.find("\0")]
self.log_pos += len(self.arch) + 1
assert self.arch in LogReader._ARCH_TO_POINTER_TYPE_MAP, \
"Unsupported architecture %s" % self.arch
pointer_type = LogReader._ARCH_TO_POINTER_TYPE_MAP[self.arch]
self.code_create_struct = LogReader._DefineStruct([
("name_size", ctypes.c_int32),
("code_address", pointer_type),
("code_size", ctypes.c_int32)])
self.code_move_struct = LogReader._DefineStruct([
("from_address", pointer_type),
("to_address", pointer_type)])
self.code_delete_struct = LogReader._DefineStruct([
("address", pointer_type)])
def ReadUpToGC(self):
while self.log_pos < self.log.size():
tag = self.log[self.log_pos]
self.log_pos += 1
if tag == LogReader._CODE_MOVING_GC_TAG:
return
if tag == LogReader._CODE_CREATE_TAG:
event = self.code_create_struct.from_buffer(self.log, self.log_pos)
self.log_pos += ctypes.sizeof(event)
start_address = event.code_address
end_address = start_address + event.code_size
name = self.log[self.log_pos:self.log_pos + event.name_size]
origin = JS_ORIGIN
self.log_pos += event.name_size
origin_offset = self.log_pos
self.log_pos += event.code_size
code = Code(name, start_address, end_address, origin, origin_offset)
conficting_code = self.code_map.Find(start_address)
if conficting_code:
if not (conficting_code.start_address == code.start_address and
conficting_code.end_address == code.end_address):
self.code_map.Remove(conficting_code)
else:
LogReader._HandleCodeConflict(conficting_code, code)
# TODO(vitalyr): this warning is too noisy because of our
# attempts to reconstruct code log from the snapshot.
# print >>sys.stderr, \
# "Warning: Skipping duplicate code log entry %s" % code
continue
self.code_map.Add(code)
continue
if tag == LogReader._CODE_MOVE_TAG:
event = self.code_move_struct.from_buffer(self.log, self.log_pos)
self.log_pos += ctypes.sizeof(event)
old_start_address = event.from_address
new_start_address = event.to_address
if old_start_address == new_start_address:
# Skip useless code move entries.
continue
code = self.code_map.Find(old_start_address)
if not code:
print >>sys.stderr, "Warning: Not found %x" % old_start_address
continue
assert code.start_address == old_start_address, \
"Inexact move address %x for %s" % (old_start_address, code)
self.code_map.Remove(code)
size = code.end_address - code.start_address
code.start_address = new_start_address
code.end_address = new_start_address + size
self.code_map.Add(code)
continue
assert False, "Unknown tag %s" % tag
def Dispose(self):
self.log.close()
self.log_file.close()
@staticmethod
def _DefineStruct(fields):
class Struct(ctypes.Structure):
_fields_ = fields
return Struct
@staticmethod
def _HandleCodeConflict(old_code, new_code):
assert (old_code.start_address == new_code.start_address and
old_code.end_address == new_code.end_address), \
"Conficting code log entries %s and %s" % (old_code, new_code)
if old_code.name == new_code.name:
return
# Code object may be shared by a few functions. Collect the full
# set of names.
old_code.AddName(new_code.name)
class Descriptor(object):
"""Descriptor of a structure in the binary trace log."""
CTYPE_MAP = {
"u16": ctypes.c_uint16,
"u32": ctypes.c_uint32,
"u64": ctypes.c_uint64
}
def __init__(self, fields):
class TraceItem(ctypes.Structure):
_fields_ = Descriptor.CtypesFields(fields)
def __str__(self):
return ", ".join("%s: %s" % (field, self.__getattribute__(field))
for field, _ in TraceItem._fields_)
self.ctype = TraceItem
def Read(self, trace, offset):
return self.ctype.from_buffer(trace, offset)
@staticmethod
def CtypesFields(fields):
return [(field, Descriptor.CTYPE_MAP[format]) for (field, format) in fields]
# Please see http://git.kernel.org/?p=linux/kernel/git/torvalds/linux-2.6.git;a=tree;f=tools/perf
# for the gory details.
# Reference: struct perf_file_header in kernel/tools/perf/util/header.h
TRACE_HEADER_DESC = Descriptor([
("magic", "u64"),
("size", "u64"),
("attr_size", "u64"),
("attrs_offset", "u64"),
("attrs_size", "u64"),
("data_offset", "u64"),
("data_size", "u64"),
("event_types_offset", "u64"),
("event_types_size", "u64")
])
# Reference: /usr/include/linux/perf_event.h
PERF_EVENT_ATTR_DESC = Descriptor([
("type", "u32"),
("size", "u32"),
("config", "u64"),
("sample_period_or_freq", "u64"),
("sample_type", "u64"),
("read_format", "u64"),
("flags", "u64"),
("wakeup_events_or_watermark", "u32"),
("bp_type", "u32"),
("bp_addr", "u64"),
("bp_len", "u64")
])
# Reference: /usr/include/linux/perf_event.h
PERF_EVENT_HEADER_DESC = Descriptor([
("type", "u32"),
("misc", "u16"),
("size", "u16")
])
# Reference: kernel/tools/perf/util/event.h
PERF_MMAP_EVENT_BODY_DESC = Descriptor([
("pid", "u32"),
("tid", "u32"),
("addr", "u64"),
("len", "u64"),
("pgoff", "u64")
])
# Reference: kernel/tools/perf/util/event.h
PERF_MMAP2_EVENT_BODY_DESC = Descriptor([
("pid", "u32"),
("tid", "u32"),
("addr", "u64"),
("len", "u64"),
("pgoff", "u64"),
("maj", "u32"),
("min", "u32"),
("ino", "u64"),
("ino_generation", "u64"),
("prot", "u32"),
("flags","u32")
])
# perf_event_attr.sample_type bits control the set of
# perf_sample_event fields.
PERF_SAMPLE_IP = 1 << 0
PERF_SAMPLE_TID = 1 << 1
PERF_SAMPLE_TIME = 1 << 2
PERF_SAMPLE_ADDR = 1 << 3
PERF_SAMPLE_READ = 1 << 4
PERF_SAMPLE_CALLCHAIN = 1 << 5
PERF_SAMPLE_ID = 1 << 6
PERF_SAMPLE_CPU = 1 << 7
PERF_SAMPLE_PERIOD = 1 << 8
PERF_SAMPLE_STREAM_ID = 1 << 9
PERF_SAMPLE_RAW = 1 << 10
# Reference: /usr/include/perf_event.h, the comment for PERF_RECORD_SAMPLE.
PERF_SAMPLE_EVENT_BODY_FIELDS = [
("ip", "u64", PERF_SAMPLE_IP),
("pid", "u32", PERF_SAMPLE_TID),
("tid", "u32", PERF_SAMPLE_TID),
("time", "u64", PERF_SAMPLE_TIME),
("addr", "u64", PERF_SAMPLE_ADDR),
("id", "u64", PERF_SAMPLE_ID),
("stream_id", "u64", PERF_SAMPLE_STREAM_ID),
("cpu", "u32", PERF_SAMPLE_CPU),
("res", "u32", PERF_SAMPLE_CPU),
("period", "u64", PERF_SAMPLE_PERIOD),
# Don't want to handle read format that comes after the period and
# before the callchain and has variable size.
("nr", "u64", PERF_SAMPLE_CALLCHAIN)
# Raw data follows the callchain and is ignored.
]
PERF_SAMPLE_EVENT_IP_FORMAT = "u64"
PERF_RECORD_MMAP = 1
PERF_RECORD_MMAP2 = 10
PERF_RECORD_SAMPLE = 9
class TraceReader(object):
"""Perf (linux-2.6/tools/perf) trace file reader."""
_TRACE_HEADER_MAGIC = 4993446653023372624
def __init__(self, trace_name):
self.trace_file = open(trace_name, "r")
self.trace = mmap.mmap(self.trace_file.fileno(), 0, mmap.MAP_PRIVATE)
self.trace_header = TRACE_HEADER_DESC.Read(self.trace, 0)
if self.trace_header.magic != TraceReader._TRACE_HEADER_MAGIC:
print >>sys.stderr, "Warning: unsupported trace header magic"
self.offset = self.trace_header.data_offset
self.limit = self.trace_header.data_offset + self.trace_header.data_size
assert self.limit <= self.trace.size(), \
"Trace data limit exceeds trace file size"
self.header_size = ctypes.sizeof(PERF_EVENT_HEADER_DESC.ctype)
assert self.trace_header.attrs_size != 0, \
"No perf event attributes found in the trace"
perf_event_attr = PERF_EVENT_ATTR_DESC.Read(self.trace,
self.trace_header.attrs_offset)
self.sample_event_body_desc = self._SampleEventBodyDesc(
perf_event_attr.sample_type)
self.callchain_supported = \
(perf_event_attr.sample_type & PERF_SAMPLE_CALLCHAIN) != 0
if self.callchain_supported:
self.ip_struct = Descriptor.CTYPE_MAP[PERF_SAMPLE_EVENT_IP_FORMAT]
self.ip_size = ctypes.sizeof(self.ip_struct)
def ReadEventHeader(self):
if self.offset >= self.limit:
return None, 0
offset = self.offset
header = PERF_EVENT_HEADER_DESC.Read(self.trace, self.offset)
self.offset += header.size
return header, offset
def ReadMmap(self, header, offset):
mmap_info = PERF_MMAP_EVENT_BODY_DESC.Read(self.trace,
offset + self.header_size)
# Read null-terminated filename.
filename = self.trace[offset + self.header_size + ctypes.sizeof(mmap_info):
offset + header.size]
mmap_info.filename = HOST_ROOT + filename[:filename.find(chr(0))]
return mmap_info
def ReadMmap2(self, header, offset):
mmap_info = PERF_MMAP2_EVENT_BODY_DESC.Read(self.trace,
offset + self.header_size)
# Read null-terminated filename.
filename = self.trace[offset + self.header_size + ctypes.sizeof(mmap_info):
offset + header.size]
mmap_info.filename = HOST_ROOT + filename[:filename.find(chr(0))]
return mmap_info
def ReadSample(self, header, offset):
sample = self.sample_event_body_desc.Read(self.trace,
offset + self.header_size)
if not self.callchain_supported:
return sample
sample.ips = []
offset += self.header_size + ctypes.sizeof(sample)
for _ in xrange(sample.nr):
sample.ips.append(
self.ip_struct.from_buffer(self.trace, offset).value)
offset += self.ip_size
return sample
def Dispose(self):
self.trace.close()
self.trace_file.close()
def _SampleEventBodyDesc(self, sample_type):
assert (sample_type & PERF_SAMPLE_READ) == 0, \
"Can't hande read format in samples"
fields = [(field, format)
for (field, format, bit) in PERF_SAMPLE_EVENT_BODY_FIELDS
if (bit & sample_type) != 0]
return Descriptor(fields)
OBJDUMP_SECTION_HEADER_RE = re.compile(
r"^\s*\d+\s(\.\S+)\s+[a-f0-9]")
OBJDUMP_SYMBOL_LINE_RE = re.compile(
r"^([a-f0-9]+)\s(.{7})\s(\S+)\s+([a-f0-9]+)\s+(?:\.hidden\s+)?(.*)$")
OBJDUMP_DYNAMIC_SYMBOLS_START_RE = re.compile(
r"^DYNAMIC SYMBOL TABLE")
OBJDUMP_SKIP_RE = re.compile(
r"^.*ld\.so\.cache$")
KERNEL_ALLSYMS_FILE = "/proc/kallsyms"
PERF_KERNEL_ALLSYMS_RE = re.compile(
r".*kallsyms.*")
KERNEL_ALLSYMS_LINE_RE = re.compile(
r"^([a-f0-9]+)\s(?:t|T)\s(\S+)$")
class LibraryRepo(object):
def __init__(self):
self.infos = []
self.names = set()
self.ticks = {}
def HasDynamicSymbols(self, filename):
if filename.endswith(".ko"): return False
process = subprocess.Popen(
"%s -h %s" % (OBJDUMP_BIN, filename),
shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
pipe = process.stdout
try:
for line in pipe:
match = OBJDUMP_SECTION_HEADER_RE.match(line)
if match and match.group(1) == 'dynsym': return True
finally:
pipe.close()
assert process.wait() == 0, "Failed to objdump -h %s" % filename
return False
def Load(self, mmap_info, code_map, options):
# Skip kernel mmaps when requested using the fact that their tid
# is 0.
if mmap_info.tid == 0 and not options.kernel:
return True
if OBJDUMP_SKIP_RE.match(mmap_info.filename):
return True
if PERF_KERNEL_ALLSYMS_RE.match(mmap_info.filename):
return self._LoadKernelSymbols(code_map)
self.infos.append(mmap_info)
mmap_info.ticks = 0
mmap_info.unique_name = self._UniqueMmapName(mmap_info)
if not os.path.exists(mmap_info.filename):
return True
# Request section headers (-h), symbols (-t), and dynamic symbols
# (-T) from objdump.
# Unfortunately, section headers span two lines, so we have to
# keep the just seen section name (from the first line in each
# section header) in the after_section variable.
if self.HasDynamicSymbols(mmap_info.filename):
dynamic_symbols = "-T"
else:
dynamic_symbols = ""
process = subprocess.Popen(
"%s -h -t %s -C %s" % (OBJDUMP_BIN, dynamic_symbols, mmap_info.filename),
shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
pipe = process.stdout
after_section = None
code_sections = set()
reloc_sections = set()
dynamic = False
try:
for line in pipe:
if after_section:
if line.find("CODE") != -1:
code_sections.add(after_section)
if line.find("RELOC") != -1:
reloc_sections.add(after_section)
after_section = None
continue
match = OBJDUMP_SECTION_HEADER_RE.match(line)
if match:
after_section = match.group(1)
continue
if OBJDUMP_DYNAMIC_SYMBOLS_START_RE.match(line):
dynamic = True
continue
match = OBJDUMP_SYMBOL_LINE_RE.match(line)
if match:
start_address = int(match.group(1), 16)
origin_offset = start_address
flags = match.group(2)
section = match.group(3)
if section in code_sections:
if dynamic or section in reloc_sections:
start_address += mmap_info.addr
size = int(match.group(4), 16)
name = match.group(5)
origin = mmap_info.filename
code_map.Add(Code(name, start_address, start_address + size,
origin, origin_offset))
finally:
pipe.close()
assert process.wait() == 0, "Failed to objdump %s" % mmap_info.filename
def Tick(self, pc):
for i, mmap_info in enumerate(self.infos):
if mmap_info.addr <= pc < (mmap_info.addr + mmap_info.len):
mmap_info.ticks += 1
self.infos[0], self.infos[i] = mmap_info, self.infos[0]
return True
return False
def _UniqueMmapName(self, mmap_info):
name = mmap_info.filename
index = 1
while name in self.names:
name = "%s-%d" % (mmap_info.filename, index)
index += 1
self.names.add(name)
return name
def _LoadKernelSymbols(self, code_map):
if not os.path.exists(KERNEL_ALLSYMS_FILE):
print >>sys.stderr, "Warning: %s not found" % KERNEL_ALLSYMS_FILE
return False
kallsyms = open(KERNEL_ALLSYMS_FILE, "r")
code = None
for line in kallsyms:
match = KERNEL_ALLSYMS_LINE_RE.match(line)
if match:
start_address = int(match.group(1), 16)
end_address = start_address
name = match.group(2)
if code:
code.end_address = start_address
code_map.Add(code, 16)
code = Code(name, start_address, end_address, "kernel", 0)
return True
def PrintReport(code_map, library_repo, arch, ticks, options):
print "Ticks per symbol:"
used_code = [code for code in code_map.UsedCode()]
used_code.sort(key=lambda x: x.self_ticks, reverse=True)
for i, code in enumerate(used_code):
code_ticks = code.self_ticks
print "%10d %5.1f%% %s [%s]" % (code_ticks, 100. * code_ticks / ticks,
code.FullName(), code.origin)
if options.disasm_all or i < options.disasm_top:
code.PrintAnnotated(arch, options)
print
print "Ticks per library:"
mmap_infos = [m for m in library_repo.infos if m.ticks > 0]
mmap_infos.sort(key=lambda m: m.ticks, reverse=True)
for mmap_info in mmap_infos:
mmap_ticks = mmap_info.ticks
print "%10d %5.1f%% %s" % (mmap_ticks, 100. * mmap_ticks / ticks,
mmap_info.unique_name)
def PrintDot(code_map, options):
print "digraph G {"
for code in code_map.UsedCode():
if code.self_ticks < 10:
continue
print "n%d [shape=box,label=\"%s\"];" % (code.id, code.name)
if code.callee_ticks:
for callee, ticks in code.callee_ticks.iteritems():
print "n%d -> n%d [label=\"%d\"];" % (code.id, callee.id, ticks)
print "}"
if __name__ == "__main__":
parser = optparse.OptionParser(USAGE)
parser.add_option("--log",
default="v8.log",
help="V8 log file name [default: %default]")
parser.add_option("--trace",
default="perf.data",
help="perf trace file name [default: %default]")
parser.add_option("--kernel",
default=False,
action="store_true",
help="process kernel entries [default: %default]")
parser.add_option("--disasm-top",
default=0,
type="int",
help=("number of top symbols to disassemble and annotate "
"[default: %default]"))
parser.add_option("--disasm-all",
default=False,
action="store_true",
help=("disassemble and annotate all used symbols "
"[default: %default]"))
parser.add_option("--dot",
default=False,
action="store_true",
help="produce dot output (WIP) [default: %default]")
parser.add_option("--quiet", "-q",
default=False,
action="store_true",
help="no auxiliary messages [default: %default]")
parser.add_option("--gc-fake-mmap",
default="/tmp/__v8_gc__",
help="gc fake mmap file [default: %default]")
parser.add_option("--objdump",
default="/usr/bin/objdump",
help="objdump tool to use [default: %default]")
parser.add_option("--host-root",
default="",
help="Path to the host root [default: %default]")
options, args = parser.parse_args()
if not options.quiet:
print "V8 log: %s, %s.ll" % (options.log, options.log)
print "Perf trace file: %s" % options.trace
V8_GC_FAKE_MMAP = options.gc_fake_mmap
HOST_ROOT = options.host_root
if os.path.exists(options.objdump):
disasm.OBJDUMP_BIN = options.objdump
OBJDUMP_BIN = options.objdump
else:
print "Cannot find %s, falling back to default objdump" % options.objdump
# Stats.
events = 0
ticks = 0
missed_ticks = 0
really_missed_ticks = 0
optimized_ticks = 0
generated_ticks = 0
v8_internal_ticks = 0
mmap_time = 0
sample_time = 0
# Initialize the log reader.
code_map = CodeMap()
log_reader = LogReader(log_name=options.log + ".ll",
code_map=code_map)
if not options.quiet:
print "Generated code architecture: %s" % log_reader.arch
print
sys.stdout.flush()
# Process the code and trace logs.
library_repo = LibraryRepo()
log_reader.ReadUpToGC()
trace_reader = TraceReader(options.trace)
while True:
header, offset = trace_reader.ReadEventHeader()
if not header:
break
events += 1
if header.type == PERF_RECORD_MMAP:
start = time.time()
mmap_info = trace_reader.ReadMmap(header, offset)
if mmap_info.filename == HOST_ROOT + V8_GC_FAKE_MMAP:
log_reader.ReadUpToGC()
else:
library_repo.Load(mmap_info, code_map, options)
mmap_time += time.time() - start
elif header.type == PERF_RECORD_MMAP2:
start = time.time()
mmap_info = trace_reader.ReadMmap2(header, offset)
if mmap_info.filename == HOST_ROOT + V8_GC_FAKE_MMAP:
log_reader.ReadUpToGC()
else:
library_repo.Load(mmap_info, code_map, options)
mmap_time += time.time() - start
elif header.type == PERF_RECORD_SAMPLE:
ticks += 1
start = time.time()
sample = trace_reader.ReadSample(header, offset)
code = code_map.Find(sample.ip)
if code:
code.Tick(sample.ip)
if code.codetype == Code.OPTIMIZED:
optimized_ticks += 1
elif code.codetype == Code.FULL_CODEGEN:
generated_ticks += 1
elif code.codetype == Code.V8INTERNAL:
v8_internal_ticks += 1
else:
missed_ticks += 1
if not library_repo.Tick(sample.ip) and not code:
really_missed_ticks += 1
if trace_reader.callchain_supported:
for ip in sample.ips:
caller_code = code_map.Find(ip)
if caller_code:
if code:
caller_code.CalleeTick(code)
code = caller_code
sample_time += time.time() - start
if options.dot:
PrintDot(code_map, options)
else:
PrintReport(code_map, library_repo, log_reader.arch, ticks, options)
if not options.quiet:
def PrintTicks(number, total, description):
print("%10d %5.1f%% ticks in %s" %
(number, 100.0*number/total, description))
print
print "Stats:"
print "%10d total trace events" % events
print "%10d total ticks" % ticks
print "%10d ticks not in symbols" % missed_ticks
unaccounted = "unaccounted ticks"
if really_missed_ticks > 0:
unaccounted += " (probably in the kernel, try --kernel)"
PrintTicks(really_missed_ticks, ticks, unaccounted)
PrintTicks(optimized_ticks, ticks, "ticks in optimized code")
PrintTicks(generated_ticks, ticks, "ticks in other lazily compiled code")
PrintTicks(v8_internal_ticks, ticks, "ticks in v8::internal::*")
print "%10d total symbols" % len([c for c in code_map.AllCode()])
print "%10d used symbols" % len([c for c in code_map.UsedCode()])
print "%9.2fs library processing time" % mmap_time
print "%9.2fs tick processing time" % sample_time
log_reader.Dispose()
trace_reader.Dispose()
| |
from mangopaysdk.entities.card import Card
from mangopaysdk.entities.user import User
from mangopaysdk.entities.bankaccount import BankAccount
from mangopaysdk.entities.refund import Refund
from mangopaysdk.entities.kycdocument import KycDocument
from mangopaysdk.entities.kycpage import KycPage
from mangopaysdk.entities.wallet import Wallet
from mangopaysdk.entities.cardregistration import CardRegistration
from mangopaysdk.entities.payout import PayOut
from mangopaysdk.entities.payin import PayIn
from mangopaysdk.entities.transfer import Transfer
from mangopaysdk.types.money import Money
from mangopaysdk.types.payinexecutiondetailsdirect import (
PayInExecutionDetailsDirect)
from mangopaysdk.types.payinpaymentdetailsbankwire import (
PayInPaymentDetailsBankWire)
class MockMangoPayApi():
def __init__(self, user_id=None, bank_account_id=None,
card_registration_id=None, card_id=None,
document_id=None, wallet_id=None, refund_id=None,
pay_out_id=None, pay_in_id=None, transfer_id=None):
self.users = MockUserApi(user_id, bank_account_id, document_id)
self.cardRegistrations = MockCardRegistrationApi(
card_registration_id, card_id)
self.cards = MockCardApi(card_id)
self.wallets = MockWalletApi(wallet_id)
self.payOuts = MockPayOutApi(pay_out_id)
self.payIns = MockPayInApi(refund_id, pay_in_id)
self.transfers = MockTransferApi(transfer_id)
class MockUserApi():
def __init__(self, user_id, bank_account_id, document_id):
self.user_id = user_id
self.bank_account_id = bank_account_id
self.document_id = document_id
def Create(self, user):
if isinstance(user, User):
user.Id = self.user_id
return user
else:
raise TypeError("User must be a User Entity")
def CreateBankAccount(self, user_id, bank_account):
if isinstance(bank_account, BankAccount) and isinstance(user_id, str):
bank_account.Id = self.bank_account_id
return bank_account
else:
raise TypeError("Arguements are the wrong types")
def Update(self, user):
if isinstance(user, User) and user.Id:
return user
else:
raise TypeError("User must be a User Entity with an Id")
def CreateUserKycDocument(self, document, user_id):
if isinstance(document, KycDocument):
document.Id = self.document_id
document.Status = "CREATED"
return document
else:
raise TypeError("Document must be a KycDocument entity")
def GetUserKycDocument(self, document_id, user_id):
document = KycDocument()
document.Id = document_id
document.Status = "VALIDATED"
return document
def UpdateUserKycDocument(self, document, user_id, document_id):
if (isinstance(document, KycDocument)
and document.Id == document_id
and document.Status == "VALIDATION_ASKED"):
return document
else:
raise BaseException("Arguements are of the wrong types")
def CreateUserKycPage(self, page, user_id, document_id):
if isinstance(page, KycPage):
pass
else:
raise TypeError("Page must be a KycPage")
class MockCardApi():
def __init__(self, card_id):
self.card_id = card_id
def Get(self, card_id):
card = Card(id=card_id)
card.Alias = "497010XXXXXX4414"
card.ExpirationDate = "1018"
card.Active = True
card.Validity = "VALID"
return card
class MockCardRegistrationApi():
def __init__(self, card_registration_id, card_id=None):
self.card_registration_id = card_registration_id
self.card_id = card_id
def Create(self, card_registration):
if isinstance(card_registration, CardRegistration):
card_registration.Id = self.card_registration_id
return card_registration
else:
raise TypeError(
"Card Registration must be a CardRegistration Entity")
def Update(self, card_registration):
if isinstance(card_registration, CardRegistration):
card_registration.CardId = self.card_id
return card_registration
else:
raise TypeError(
"Card Registration must be a CardRegistration Entity")
def Get(self, card_registration_id):
card_registration = CardRegistration(card_registration_id)
card_registration.RegistrationData = "data=RegistrationData"
card_registration.PreregistrationData = "PreregistrationData"
card_registration.AccessKey = "AccessKey"
card_registration.CardRegistrationURL = "CardRegistrationURL"
return card_registration
class MockWalletApi():
def __init__(self, wallet_id):
self.wallet_id = wallet_id
def Create(self, wallet):
if isinstance(wallet, Wallet) and not wallet.Id:
wallet.Id = self.wallet_id
return wallet
else:
raise TypeError("Wallet must be a Wallet Entity")
def Get(self, wallet_id):
wallet = Wallet()
wallet.id = wallet_id
wallet.Balance = Money(10000, currency="EUR")
return wallet
class MockPayOutApi():
def __init__(self, pay_out_id):
self.pay_out_id = pay_out_id
def Create(self, pay_out):
if isinstance(pay_out, PayOut) and not pay_out.Id:
pay_out.Id = self.pay_out_id
pay_out.ExecutionDate = 12312312
return pay_out
else:
raise TypeError("PayOut must be a PayOut Entity")
def Get(self, pay_out_id):
pay_out = PayOut()
pay_out.Id = pay_out_id
pay_out.ExecutionDate = 12312312
pay_out.Status = "CREATED"
return pay_out
class MockPayInApi():
def __init__(self, refund_id, pay_in_id):
self.refund_id = refund_id
self.pay_in_id = pay_in_id
def Create(self, pay_in):
if isinstance(pay_in, PayIn) and not pay_in.Id:
pay_in.Id = self.pay_in_id
pay_in.ExecutionDate = 12312312
pay_in.PaymentDetails = PayInPaymentDetailsBankWire()
pay_in.PaymentDetails.WireReference = '4a57980154'
pay_in.PaymentDetails.BankAccount = BankAccount()
pay_in.PaymentDetails.BankAccount.IBAN = "FR7618829754160173622224251"
pay_in.PaymentDetails.BankAccount.BIC = "CMBRFR2BCME"
return pay_in
else:
raise TypeError("PayIn must be a PayIn entity")
def Get(self, pay_in_id):
pay_in = PayIn()
pay_in.Id = pay_in_id
pay_in.ExecutionDate = 12312312
pay_in.ExecutionDetails = PayInExecutionDetailsDirect()
pay_in.ExecutionDetails.SecureModeRedirectURL = "https://test.com"
pay_in.Status = "SUCCEEDED"
return pay_in
def CreateRefund(self, pay_in_id, refund):
if isinstance(refund, Refund) and pay_in_id:
refund.Id = self.refund_id
refund.ExecutionDate = 12312312
refund.Status = "SUCCEEDED"
return refund
else:
raise TypeError("Refund must be a refund entity")
class MockTransferApi():
def __init__(self, transfer_id):
self.transfer_id = transfer_id
def Create(self, transfer):
if isinstance(transfer, Transfer) and not transfer.Id:
transfer.Id = self.transfer_id
return transfer
def Get(self, transfer_id):
transfer = Transfer()
transfer.Id = transfer_id
transfer.ExecutionDate = 12312312
transfer.Status = "SUCCEEDED"
return transfer
| |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from lxml import etree as ET
except ImportError:
from xml.etree import ElementTree as ET
import sys
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.dimensiondata import DimensionDataAPIException, NetworkDomainServicePlan
from libcloud.common.dimensiondata import DimensionDataServerCpuSpecification, DimensionDataServerDisk
from libcloud.compute.drivers.dimensiondata import DimensionDataNodeDriver as DimensionData
from libcloud.compute.base import Node, NodeAuthPassword, NodeLocation
from libcloud.test import MockHttp, unittest
from libcloud.test.compute import TestCaseMixin
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.test.secrets import DIMENSIONDATA_PARAMS
class DimensionDataTests(unittest.TestCase, TestCaseMixin):
def setUp(self):
DimensionData.connectionCls.conn_classes = (None, DimensionDataMockHttp)
DimensionDataMockHttp.type = None
self.driver = DimensionData(*DIMENSIONDATA_PARAMS)
def test_invalid_region(self):
with self.assertRaises(ValueError):
DimensionData(*DIMENSIONDATA_PARAMS, region='blah')
def test_invalid_creds(self):
DimensionDataMockHttp.type = 'UNAUTHORIZED'
with self.assertRaises(InvalidCredsError):
self.driver.list_nodes()
def test_list_locations_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_locations()
self.assertEqual(len(ret), 5)
first_loc = ret[0]
self.assertEqual(first_loc.id, 'NA3')
self.assertEqual(first_loc.name, 'US - West')
self.assertEqual(first_loc.country, 'US')
def test_list_nodes_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertEqual(len(ret), 7)
def test_server_states(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertTrue(ret[0].state == 'running')
self.assertTrue(ret[1].state == 'starting')
self.assertTrue(ret[2].state == 'stopping')
self.assertTrue(ret[3].state == 'reconfiguring')
self.assertTrue(ret[4].state == 'running')
self.assertTrue(ret[5].state == 'terminated')
self.assertTrue(ret[6].state == 'stopped')
self.assertEqual(len(ret), 7)
node = ret[0]
self.assertTrue(isinstance(node.extra['disks'], list))
self.assertTrue(isinstance(node.extra['disks'][0], DimensionDataServerDisk))
self.assertTrue(isinstance(ret[0].extra['disks'], list))
self.assertTrue(isinstance(ret[0].extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(ret[0].extra['disks'][0].size_gb, 10)
self.assertTrue(isinstance(ret[1].extra['disks'], list))
self.assertTrue(isinstance(ret[1].extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(ret[1].extra['disks'][0].size_gb, 10)
def test_list_nodes_response_PAGINATED(self):
DimensionDataMockHttp.type = 'PAGINATED'
ret = self.driver.list_nodes()
self.assertEqual(len(ret), 9)
# We're making sure here the filters make it to the URL
# See _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_ALLFILTERS for asserts
def test_list_nodes_response_strings_ALLFILTERS(self):
DimensionDataMockHttp.type = 'ALLFILTERS'
ret = self.driver.list_nodes(ex_location='fake_loc', ex_name='fake_name',
ex_ipv6='fake_ipv6', ex_ipv4='fake_ipv4', ex_vlan='fake_vlan',
ex_image='fake_image', ex_deployed=True,
ex_started=True, ex_state='fake_state',
ex_network='fake_network', ex_network_domain='fake_network_domain')
self.assertTrue(isinstance(ret, list))
def test_list_nodes_response_LOCATION(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_locations()
first_loc = ret[0]
ret = self.driver.list_nodes(ex_location=first_loc)
for node in ret:
self.assertEqual(node.extra['datacenterId'], 'NA3')
def test_list_nodes_response_LOCATION_STR(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes(ex_location='NA3')
for node in ret:
self.assertEqual(node.extra['datacenterId'], 'NA3')
def test_list_sizes_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_sizes()
self.assertEqual(len(ret), 1)
size = ret[0]
self.assertEqual(size.name, 'default')
def test_reboot_node_response(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = node.reboot()
self.assertTrue(ret is True)
def test_reboot_node_response_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
node.reboot()
def test_destroy_node_response(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = node.destroy()
self.assertTrue(ret is True)
def test_destroy_node_response_RESOURCE_BUSY(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
node.destroy()
def test_list_images(self):
images = self.driver.list_images()
self.assertEqual(len(images), 3)
self.assertEqual(images[0].name, 'RedHat 6 64-bit 2 CPU')
self.assertEqual(images[0].id, 'c14b1a46-2428-44c1-9c1a-b20e6418d08c')
self.assertEqual(images[0].extra['location'].id, 'NA9')
self.assertEqual(images[0].extra['cpu'].cpu_count, 2)
self.assertEqual(images[0].extra['OS_displayName'], 'REDHAT6/64')
def test_ex_list_customer_images(self):
images = self.driver.ex_list_customer_images()
self.assertEqual(len(images), 3)
self.assertEqual(images[0].name, 'ImportedCustomerImage')
self.assertEqual(images[0].id, '5234e5c7-01de-4411-8b6e-baeb8d91cf5d')
self.assertEqual(images[0].extra['location'].id, 'NA9')
self.assertEqual(images[0].extra['cpu'].cpu_count, 4)
self.assertEqual(images[0].extra['OS_displayName'], 'REDHAT6/64')
def test_create_node_response(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_response_STR(self):
rootPw = 'pass123'
image = self.driver.list_images()[0].id
network = self.driver.ex_list_networks()[0].id
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_response_network_domain(self):
rootPw = NodeAuthPassword('pass123')
location = self.driver.ex_get_location_by_id('NA9')
image = self.driver.list_images(location=location)[0]
network_domain = self.driver.ex_list_network_domains(location=location)[0]
vlan = self.driver.ex_list_vlans(location=location)[0]
cpu = DimensionDataServerCpuSpecification(
cpu_count=4,
cores_per_socket=1,
performance='HIGHPERFORMANCE'
)
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node',
ex_network_domain=network_domain,
ex_vlan=vlan,
ex_is_started=False, ex_cpu_specification=cpu,
ex_memory_gb=4)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_response_network_domain_STR(self):
rootPw = NodeAuthPassword('pass123')
location = self.driver.ex_get_location_by_id('NA9')
image = self.driver.list_images(location=location)[0]
network_domain = self.driver.ex_list_network_domains(location=location)[0].id
vlan = self.driver.ex_list_vlans(location=location)[0].id
cpu = DimensionDataServerCpuSpecification(
cpu_count=4,
cores_per_socket=1,
performance='HIGHPERFORMANCE'
)
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node',
ex_network_domain=network_domain,
ex_vlan=vlan,
ex_is_started=False, ex_cpu_specification=cpu,
ex_memory_gb=4)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_no_network(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network=None,
ex_isStarted=False)
def test_ex_shutdown_graceful(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_shutdown_graceful(node)
self.assertTrue(ret is True)
def test_ex_shutdown_graceful_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_shutdown_graceful(node)
def test_ex_start_node(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_start_node(node)
self.assertTrue(ret is True)
def test_ex_start_node_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_start_node(node)
def test_ex_power_off(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_power_off(node)
self.assertTrue(ret is True)
def test_ex_update_vm_tools(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_update_vm_tools(node)
self.assertTrue(ret is True)
def test_ex_power_off_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_power_off(node)
def test_ex_reset(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_reset(node)
self.assertTrue(ret is True)
def test_ex_attach_node_to_vlan(self):
node = self.driver.ex_get_node_by_id('e75ead52-692f-4314-8725-c8a4f4d13a87')
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
ret = self.driver.ex_attach_node_to_vlan(node, vlan)
self.assertTrue(ret is True)
def test_ex_destroy_nic(self):
node = self.driver.ex_destroy_nic('a202e51b-41c0-4cfc-add0-b1c62fc0ecf6')
self.assertTrue(node)
def test_list_networks(self):
nets = self.driver.list_networks()
self.assertEqual(nets[0].name, 'test-net1')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_create_network(self):
location = self.driver.ex_get_location_by_id('NA9')
net = self.driver.ex_create_network(location, "Test Network", "test")
self.assertEqual(net.id, "208e3a8e-9d2f-11e2-b29c-001517c4643e")
self.assertEqual(net.name, "Test Network")
def test_ex_create_network_NO_DESCRIPTION(self):
location = self.driver.ex_get_location_by_id('NA9')
net = self.driver.ex_create_network(location, "Test Network")
self.assertEqual(net.id, "208e3a8e-9d2f-11e2-b29c-001517c4643e")
self.assertEqual(net.name, "Test Network")
def test_ex_delete_network(self):
net = self.driver.ex_list_networks()[0]
result = self.driver.ex_delete_network(net)
self.assertTrue(result)
def test_ex_rename_network(self):
net = self.driver.ex_list_networks()[0]
result = self.driver.ex_rename_network(net, "barry")
self.assertTrue(result)
def test_ex_create_network_domain(self):
location = self.driver.ex_get_location_by_id('NA9')
plan = NetworkDomainServicePlan.ADVANCED
net = self.driver.ex_create_network_domain(location=location,
name='test',
description='test',
service_plan=plan)
self.assertEqual(net.name, 'test')
self.assertTrue(net.id, 'f14a871f-9a25-470c-aef8-51e13202e1aa')
def test_ex_create_network_domain_NO_DESCRIPTION(self):
location = self.driver.ex_get_location_by_id('NA9')
plan = NetworkDomainServicePlan.ADVANCED
net = self.driver.ex_create_network_domain(location=location,
name='test',
service_plan=plan)
self.assertEqual(net.name, 'test')
self.assertTrue(net.id, 'f14a871f-9a25-470c-aef8-51e13202e1aa')
def test_ex_get_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(net.id, '8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(net.description, 'test2')
self.assertEqual(net.name, 'test')
def test_ex_update_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
net.name = 'new name'
net2 = self.driver.ex_update_network_domain(net)
self.assertEqual(net2.name, 'new name')
def test_ex_delete_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
result = self.driver.ex_delete_network_domain(net)
self.assertTrue(result)
def test_ex_list_networks(self):
nets = self.driver.ex_list_networks()
self.assertEqual(nets[0].name, 'test-net1')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_network_domains(self):
nets = self.driver.ex_list_network_domains()
self.assertEqual(nets[0].name, 'Aurora')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_vlans(self):
vlans = self.driver.ex_list_vlans()
self.assertEqual(vlans[0].name, "Primary")
def test_ex_create_vlan(self,):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
vlan = self.driver.ex_create_vlan(network_domain=net,
name='test',
private_ipv4_base_address='10.3.4.0',
private_ipv4_prefix_size='24',
description='test vlan')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
def test_ex_create_vlan_NO_DESCRIPTION(self,):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
vlan = self.driver.ex_create_vlan(network_domain=net,
name='test',
private_ipv4_base_address='10.3.4.0',
private_ipv4_prefix_size='24')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
def test_ex_get_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
self.assertEqual(vlan.description, 'test2')
self.assertEqual(vlan.status, 'NORMAL')
self.assertEqual(vlan.name, 'Production VLAN')
self.assertEqual(vlan.private_ipv4_range_address, '10.0.3.0')
self.assertEqual(vlan.private_ipv4_range_size, 24)
self.assertEqual(vlan.ipv6_range_size, 64)
self.assertEqual(vlan.ipv6_range_address, '2607:f480:1111:1153:0:0:0:0')
self.assertEqual(vlan.ipv4_gateway, '10.0.3.1')
self.assertEqual(vlan.ipv6_gateway, '2607:f480:1111:1153:0:0:0:1')
def test_ex_wait_for_state(self):
self.driver.ex_wait_for_state('NORMAL',
self.driver.ex_get_vlan,
vlan_id='0e56433f-d808-4669-821d-812769517ff8')
def test_ex_wait_for_state_NODE(self):
self.driver.ex_wait_for_state('running',
self.driver.ex_get_node_by_id,
id='e75ead52-692f-4314-8725-c8a4f4d13a87')
def test_ex_wait_for_state_FAIL(self):
with self.assertRaises(DimensionDataAPIException) as context:
self.driver.ex_wait_for_state('starting',
self.driver.ex_get_node_by_id,
id='e75ead52-692f-4314-8725-c8a4f4d13a87',
timeout=2
)
self.assertEqual(context.exception.code, 'running')
self.assertTrue('timed out' in context.exception.msg)
def test_ex_update_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
vlan.name = 'new name'
vlan2 = self.driver.ex_update_vlan(vlan)
self.assertEqual(vlan2.name, 'new name')
def test_ex_delete_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
result = self.driver.ex_delete_vlan(vlan)
self.assertTrue(result)
def test_ex_expand_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
vlan.private_ipv4_range_size = '23'
vlan = self.driver.ex_expand_vlan(vlan)
self.assertEqual(vlan.private_ipv4_range_size, '23')
def test_ex_add_public_ip_block_to_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
block = self.driver.ex_add_public_ip_block_to_network_domain(net)
self.assertEqual(block.id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
def test_ex_list_public_ip_blocks(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
blocks = self.driver.ex_list_public_ip_blocks(net)
self.assertEqual(blocks[0].base_ip, '168.128.4.18')
self.assertEqual(blocks[0].size, '2')
self.assertEqual(blocks[0].id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(blocks[0].location.id, 'NA9')
self.assertEqual(blocks[0].network_domain.id, net.id)
def test_ex_get_public_ip_block(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
block = self.driver.ex_get_public_ip_block('9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(block.base_ip, '168.128.4.18')
self.assertEqual(block.size, '2')
self.assertEqual(block.id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(block.location.id, 'NA9')
self.assertEqual(block.network_domain.id, net.id)
def test_ex_delete_public_ip_block(self):
block = self.driver.ex_get_public_ip_block('9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
result = self.driver.ex_delete_public_ip_block(block)
self.assertTrue(result)
def test_ex_list_firewall_rules(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
self.assertEqual(rules[0].id, '756cba02-b0bc-48f4-aea5-9445870b6148')
self.assertEqual(rules[0].network_domain.id, '8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(rules[0].name, 'CCDEFAULT.BlockOutboundMailIPv4')
self.assertEqual(rules[0].action, 'DROP')
self.assertEqual(rules[0].ip_version, 'IPV4')
self.assertEqual(rules[0].protocol, 'TCP')
self.assertEqual(rules[0].source.ip_address, 'ANY')
self.assertTrue(rules[0].source.any_ip)
self.assertTrue(rules[0].destination.any_ip)
def test_ex_create_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
rule = self.driver.ex_create_firewall_rule(net, rules[0], 'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_with_specific_source_ip(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
specific_source_ip_rule = list(filter(lambda x: x.name == 'SpecificSourceIP',
rules))[0]
rule = self.driver.ex_create_firewall_rule(net, specific_source_ip_rule, 'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_get_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_set_firewall_rule_state(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
result = self.driver.ex_set_firewall_rule_state(rule, False)
self.assertTrue(result)
def test_ex_delete_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
result = self.driver.ex_delete_firewall_rule(rule)
self.assertTrue(result)
def test_ex_create_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_create_nat_rule(net, '1.2.3.4', '4.3.2.1')
self.assertEqual(rule.id, 'd31c2db0-be6b-4d50-8744-9a7a534b5fba')
def test_ex_list_nat_rules(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_nat_rules(net)
self.assertEqual(rules[0].id, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rules[0].internal_ip, '10.0.0.15')
self.assertEqual(rules[0].external_ip, '165.180.12.18')
def test_ex_get_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_nat_rule(net, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rule.id, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rule.internal_ip, '10.0.0.16')
self.assertEqual(rule.external_ip, '165.180.12.19')
def test_ex_delete_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_nat_rule(net, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
result = self.driver.ex_delete_nat_rule(rule)
self.assertTrue(result)
def test_ex_enable_monitoring(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_enable_monitoring(node, "ADVANCED")
self.assertTrue(result)
def test_ex_disable_monitoring(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_disable_monitoring(node)
self.assertTrue(result)
def test_ex_change_monitoring_plan(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_update_monitoring_plan(node, "ESSENTIALS")
self.assertTrue(result)
def test_ex_add_storage_to_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_add_storage_to_node(node, 30, 'PERFORMANCE')
self.assertTrue(result)
def test_ex_remove_storage_from_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_remove_storage_from_node(node, 1)
self.assertTrue(result)
def test_ex_change_storage_speed(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_change_storage_speed(node, 1, 'PERFORMANCE')
self.assertTrue(result)
def test_ex_change_storage_size(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_change_storage_size(node, 1, 100)
self.assertTrue(result)
def test_ex_clone_node_to_image(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_clone_node_to_image(node, 'my image', 'a description')
self.assertTrue(result)
def test_ex_update_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_update_node(node, 'my new name', 'a description', 2, 4048)
self.assertTrue(result)
def test_ex_reconfigure_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_reconfigure_node(node, 4, 4, 1, 'HIGHPERFORMANCE')
self.assertTrue(result)
def test_ex_get_location_by_id(self):
location = self.driver.ex_get_location_by_id('NA9')
self.assertTrue(location.id, 'NA9')
def test_ex_get_location_by_id_NO_LOCATION(self):
location = self.driver.ex_get_location_by_id(None)
self.assertIsNone(location)
def test_priv_location_to_location_id(self):
location = self.driver.ex_get_location_by_id('NA9')
self.assertEqual(
self.driver._location_to_location_id(location),
'NA9'
)
def test_priv_location_to_location_id_STR(self):
self.assertEqual(
self.driver._location_to_location_id('NA9'),
'NA9'
)
def test_priv_location_to_location_id_TYPEERROR(self):
with self.assertRaises(TypeError):
self.driver._location_to_location_id([1, 2, 3])
class InvalidRequestError(Exception):
def __init__(self, tag):
super(InvalidRequestError, self).__init__("Invalid Request - %s" % tag)
class DimensionDataMockHttp(MockHttp):
fixtures = ComputeFileFixtures('dimensiondata')
def _oec_0_9_myaccount_UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED, "", {}, httplib.responses[httplib.UNAUTHORIZED])
def _oec_0_9_myaccount(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_INPROGRESS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_PAGINATED(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_ALLFILTERS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_base_image(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_base_image.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_base_imageWithDiskSpeed(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_base_imageWithDiskSpeed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployed(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_pendingDeploy(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_pendingDeploy.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_datacenter(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11(self, method, url, body, headers):
body = None
action = url.split('?')[-1]
if action == 'restart':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_restart.xml')
elif action == 'shutdown':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_shutdown.xml')
elif action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_delete.xml')
elif action == 'start':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_start.xml')
elif action == 'poweroff':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_poweroff.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_INPROGRESS(self, method, url, body, headers):
body = None
action = url.split('?')[-1]
if action == 'restart':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_restart_INPROGRESS.xml')
elif action == 'shutdown':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_shutdown_INPROGRESS.xml')
elif action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_delete_INPROGRESS.xml')
elif action == 'start':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_start_INPROGRESS.xml')
elif action == 'poweroff':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_poweroff_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server(self, method, url, body, headers):
body = self.fixtures.load(
'_oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation(self, method, url, body, headers):
if method is "POST":
request = ET.fromstring(body)
if request.tag != "{http://oec.api.opsource.net/schemas/network}NewNetworkWithLocation":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation_NA9(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_4bba37be_506f_11e3_b29c_001517c4643e(self, method,
url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_4bba37be_506f_11e3_b29c_001517c4643e.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSize(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSize.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSpeed(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSpeed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1(self, method, url, body, headers):
action = url.split('?')[-1]
if action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if method == 'POST':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_POST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer_RESOURCEBUSY.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}rebootServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}rebootServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer_RESOURCEBUSY.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server(self, method, url, body, headers):
if url.endswith('datacenterId=NA3'):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_NA3.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGINATED(self, method, url, body, headers):
if url.endswith('pageNumber=2'):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
else:
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_paginated.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'datacenterId':
assert value == 'fake_loc'
elif key == 'networkId':
assert value == 'fake_network'
elif key == 'networkDomainId':
assert value == 'fake_network_domain'
elif key == 'vlanId':
assert value == 'fake_vlan'
elif key == 'ipv6':
assert value == 'fake_ipv6'
elif key == 'privateIpv4':
assert value == 'fake_ipv4'
elif key == 'name':
assert value == 'fake_name'
elif key == 'state':
assert value == 'fake_state'
elif key == 'started':
assert value == 'True'
elif key == 'deployed':
assert value == 'True'
elif key == 'sourceImageId':
assert value == 'fake_image'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_infrastructure_datacenter(self, method, url, body, headers):
if url.endswith('id=NA9'):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_infrastructure_datacenter_NA9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_infrastructure_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_updateVmwareTools(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}updateVmwareTools":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_updateVmwareTools.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}startServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}startServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}shutdownServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}shutdownServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_resetServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}resetServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_resetServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}powerOffServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}powerOffServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_e75ead52_692f_4314_8725_c8a4f4d13a87(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_e75ead52_692f_4314_8725_c8a4f4d13a87.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan_0e56433f_d808_4669_821d_812769517ff8(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan_0e56433f_d808_4669_821d_812769517ff8.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_expandVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}expandVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_expandVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_addPublicIpBlock(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}addPublicIpBlock":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_addPublicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_4487241a_f0ca_11e3_9315_d4bed9b167ba(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_4487241a_f0ca_11e3_9315_d4bed9b167ba.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_9945dc4a_bdce_11e4_8c14_b8ca3a5d9ef8(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_9945dc4a_bdce_11e4_8c14_b8ca3a5d9ef8.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_removePublicIpBlock(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removePublicIpBlock":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_removePublicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule_d0a20f59_77b9_4f28_a63b_e58496b73a6c(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule_d0a20f59_77b9_4f28_a63b_e58496b73a6c.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createNatRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createNatRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createNatRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule_2187a636_7ebb_49a1_a2ff_5d617f496dce(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule_2187a636_7ebb_49a1_a2ff_5d617f496dce.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNatRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteNatRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNatRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_addNic(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}addNic":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_addNic.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_removeNic(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removeNic":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_removeNic.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_disableServerMonitoring(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}disableServerMonitoring":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_disableServerMonitoring.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_enableServerMonitoring(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}enableServerMonitoring":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_enableServerMonitoring.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_changeServerMonitoringPlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}changeServerMonitoringPlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_changeServerMonitoringPlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_reconfigureServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}reconfigureServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_reconfigureServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
| |
import boto3
import moto
from flask import json
from mock import call
from app.models import Notification
from datetime import datetime
from app import db, sms_wrapper
from app.job.sms_jobs import send_sms, fetch_sms_status
from app.connectors.sms.clients import ClientException
@moto.mock_sqs
def test_should_send_sms_all_notifications(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.send', return_value=("1234", "twilio"))
create_notification = Notification(
id=1000,
to="to",
message="message",
created_at=datetime.utcnow(),
status='created',
method='sms',
job_id=1234
)
db.session.add(create_notification)
db.session.commit()
read_notification_2 = Notification.query.get(1234)
q = set_up_mock_queue()
send_message_to_mock_queue(q, create_notification)
send_message_to_mock_queue(q, read_notification_2)
send_sms()
# new one
read_notification_1 = Notification.query.get(1000)
assert read_notification_1.status == 'sent'
assert read_notification_1.sent_at >= read_notification_1.created_at
assert read_notification_1.sender_id == "1234"
assert read_notification_1.sender == "twilio"
# normal test session one
read_notification_2 = Notification.query.get(1234)
assert read_notification_2.status == 'sent'
assert read_notification_2.sent_at >= read_notification_2.created_at
assert read_notification_2.sender_id == "1234"
assert read_notification_2.sender == "twilio"
# sms calls made correctly
sms_wrapper.send.assert_has_calls([call('phone-number', 'this is a message', 1234), call("to", "message", 1000)],
any_order=True)
@moto.mock_sqs
def test_should_send_sms_notification(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.send', return_value=("1234", "twilio"))
q = set_up_mock_queue()
send_message_to_mock_queue(q, Notification.query.get(1234))
send_sms()
read_notification = Notification.query.get(1234)
assert read_notification.status == 'sent'
assert read_notification.sender_id == "1234"
assert read_notification.sender == "twilio"
assert read_notification.sent_at >= read_notification.created_at
sms_wrapper.send.assert_called_once_with('phone-number', 'this is a message', 1234)
@moto.mock_sqs
def test_only_send_notifications_in_created_state(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.send', return_value=("1234", "twilio"))
sent_at = datetime.utcnow()
create_notification = Notification(
id=1000,
to="to",
message="message",
created_at=datetime.utcnow(),
sent_at=sent_at,
status='sent',
sender='twilio',
method='sms',
job_id=1234,
sender_id="999"
)
db.session.add(create_notification)
db.session.commit()
q = set_up_mock_queue()
send_message_to_mock_queue(q, Notification.query.get(1234))
send_sms()
# new one
read_notification_1 = Notification.query.get(1000)
print(read_notification_1.sender)
assert read_notification_1.status == 'sent'
assert read_notification_1.sender_id == '999'
assert read_notification_1.sender == 'twilio'
assert read_notification_1.sent_at == sent_at
# normal test session one
read_notification_2 = Notification.query.get(1234)
assert read_notification_2.status == 'sent'
assert read_notification_2.sender_id == '1234'
assert read_notification_2.sender == 'twilio'
assert read_notification_2.sent_at >= read_notification_2.created_at
# sms calls made correctly
sms_wrapper.send.assert_called_once_with('phone-number', 'this is a message', 1234)
@moto.mock_sqs
def test_should_put_notification_into_error_if_failed(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.send', side_effect=ClientException('twilio'))
q = set_up_mock_queue()
send_message_to_mock_queue(q, Notification.query.get(1234))
send_sms()
read_notification = Notification.query.get(1234)
assert read_notification.status == 'error'
assert read_notification.sender == 'twilio'
assert read_notification.sent_at is None
# sms calls made correctly
sms_wrapper.send.assert_called_once_with('phone-number', 'this is a message', 1234)
def test_should_set_status_for_all_send_notifications(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.status', return_value="delivered")
sent_at = datetime.utcnow()
create_notification = Notification(
id=1000,
to="to",
message="message",
created_at=datetime.utcnow(),
sent_at=sent_at,
status='sent',
method='sms',
job_id=1234,
sender_id="1",
sender="twilio"
)
db.session.add(create_notification)
notification = Notification.query.get(1234)
notification.status = 'sent'
notification.sender_id = '2'
notification.sender = 'twilio'
db.session.add(notification)
db.session.commit()
fetch_sms_status()
read_notification = Notification.query.get(1234)
assert read_notification.status == 'delivered'
assert read_notification.delivered_at >= read_notification.created_at
sms_wrapper.status.assert_has_calls([call("1", "twilio"), call("2", "twilio")])
def test_should_set_status_for_send_notifications(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.status', return_value="delivered")
notification = Notification.query.get(1234)
notification.status = 'sent'
notification.sender_id = '1234'
notification.sender = 'twilio'
db.session.add(notification)
db.session.commit()
fetch_sms_status()
read_notification = Notification.query.get(1234)
assert read_notification.status == 'delivered'
assert read_notification.delivered_at >= read_notification.created_at
sms_wrapper.status.assert_called_once_with("1234", 'twilio')
def test_should_not_set_delivered_at_if_not_delivered(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.status', return_value="failed")
notification = Notification.query.get(1234)
notification.status = 'sent'
notification.sender_id = '1234'
notification.sender = 'twilio'
db.session.add(notification)
db.session.commit()
fetch_sms_status()
read_notification = Notification.query.get(1234)
assert read_notification.status == 'failed'
assert not read_notification.delivered_at
sms_wrapper.status.assert_called_once_with("1234", 'twilio')
def test_should_not_check_status_unless_sent(notify_api, notify_db, notify_db_session, notify_config, mocker):
mocker.patch('app.sms_wrapper.status')
fetch_sms_status()
read_notification = Notification.query.get(1234)
assert read_notification.status == 'created'
assert not read_notification.delivered_at
sms_wrapper.status.assert_not_called
def set_up_mock_queue():
# set up mock queue
boto3.setup_default_session(region_name='eu-west-1')
conn = boto3.resource('sqs')
q = conn.create_queue(QueueName='gov_uk_notify_sms_queue')
return q
def send_message_to_mock_queue(queue, notification):
queue.send_message(MessageBody=json.dumps(notification.serialize()),
MessageAttributes={'type': {'StringValue': 'sms', 'DataType': 'String'}})
| |
from __future__ import print_function
import sys
from catkin_pkg.package import parse_package_string
from rosdistro import get_distribution_cache
from rosdistro import get_index
from ros_buildfarm.common import get_devel_job_name
from ros_buildfarm.common import get_devel_view_name
from ros_buildfarm.common import git_github_orgunit
from ros_buildfarm.common import get_github_project_url
from ros_buildfarm.common \
import get_repositories_and_script_generating_key_files
from ros_buildfarm.common import JobValidationError
from ros_buildfarm.config import get_distribution_file
from ros_buildfarm.config import get_index as get_config_index
from ros_buildfarm.config import get_source_build_files
from ros_buildfarm.git import get_repository
from ros_buildfarm.templates import expand_template
def configure_devel_jobs(
config_url, rosdistro_name, source_build_name, groovy_script=None):
"""
Configure all Jenkins devel jobs.
L{configure_release_job} will be invoked for source repository and target
which matches the build file criteria.
"""
config = get_config_index(config_url)
build_files = get_source_build_files(config, rosdistro_name)
build_file = build_files[source_build_name]
index = get_index(config.rosdistro_index_url)
dist_cache = None
if build_file.notify_maintainers:
dist_cache = get_distribution_cache(index, rosdistro_name)
# get targets
targets = []
for os_name in build_file.targets.keys():
for os_code_name in build_file.targets[os_name].keys():
for arch in build_file.targets[os_name][os_code_name]:
targets.append((os_name, os_code_name, arch))
print('The build file contains the following targets:')
for os_name, os_code_name, arch in targets:
print(' -', os_name, os_code_name, arch)
dist_file = get_distribution_file(index, rosdistro_name, build_file)
if not dist_file:
print('No distribution file matches the build file')
return
devel_view_name = get_devel_view_name(
rosdistro_name, source_build_name, pull_request=False)
pull_request_view_name = get_devel_view_name(
rosdistro_name, source_build_name, pull_request=True)
from ros_buildfarm.jenkins import connect
jenkins = connect(config.jenkins_url)
views = []
if build_file.test_commits_force is not False:
views.append(configure_devel_view(jenkins, devel_view_name))
if build_file.test_pull_requests_force is not False:
views.append(configure_devel_view(jenkins, pull_request_view_name))
if groovy_script is not None:
# all further configuration will be handled by the groovy script
jenkins = False
repo_names = dist_file.repositories.keys()
filtered_repo_names = build_file.filter_repositories(repo_names)
devel_job_names = []
pull_request_job_names = []
job_configs = {}
for repo_name in sorted(repo_names):
is_disabled = repo_name not in filtered_repo_names
if is_disabled and build_file.skip_ignored_repositories:
print("Skipping ignored repository '%s'" % repo_name,
file=sys.stderr)
continue
repo = dist_file.repositories[repo_name]
if not repo.source_repository:
print("Skipping repository '%s': no source section" % repo_name)
continue
if not repo.source_repository.version:
print("Skipping repository '%s': no source version" % repo_name)
continue
job_types = []
# check for testing commits
if build_file.test_commits_force is False:
print(("Skipping repository '%s': 'test_commits' is forced to " +
"false in the build file") % repo_name)
elif repo.source_repository.test_commits is False:
print(("Skipping repository '%s': 'test_commits' of the " +
"repository set to false") % repo_name)
elif repo.source_repository.test_commits is None and \
not build_file.test_commits_default:
print(("Skipping repository '%s': 'test_commits' defaults to " +
"false in the build file") % repo_name)
else:
job_types.append('commit')
if not is_disabled:
# check for testing pull requests
if build_file.test_pull_requests_force is False:
# print(("Skipping repository '%s': 'test_pull_requests' " +
# "is forced to false in the build file") % repo_name)
pass
elif repo.source_repository.test_pull_requests is False:
# print(("Skipping repository '%s': 'test_pull_requests' of " +
# "the repository set to false") % repo_name)
pass
elif repo.source_repository.test_pull_requests is None and \
not build_file.test_pull_requests_default:
# print(("Skipping repository '%s': 'test_pull_requests' " +
# "defaults to false in the build file") % repo_name)
pass
else:
print("Pull request job for repository '%s'" % repo_name)
job_types.append('pull_request')
for job_type in job_types:
pull_request = job_type == 'pull_request'
for os_name, os_code_name, arch in targets:
try:
job_name, job_config = configure_devel_job(
config_url, rosdistro_name, source_build_name,
repo_name, os_name, os_code_name, arch, pull_request,
config=config, build_file=build_file,
index=index, dist_file=dist_file,
dist_cache=dist_cache, jenkins=jenkins, views=views,
is_disabled=is_disabled,
groovy_script=groovy_script)
if not pull_request:
devel_job_names.append(job_name)
else:
pull_request_job_names.append(job_name)
if groovy_script is not None:
print("Configuration for job '%s'" % job_name)
job_configs[job_name] = job_config
except JobValidationError as e:
print(e.message, file=sys.stderr)
devel_job_prefix = '%s__' % devel_view_name
pull_request_job_prefix = '%s__' % pull_request_view_name
if groovy_script is None:
# delete obsolete jobs in these views
from ros_buildfarm.jenkins import remove_jobs
print('Removing obsolete devel jobs')
remove_jobs(jenkins, devel_job_prefix, devel_job_names)
print('Removing obsolete pull request jobs')
remove_jobs(
jenkins, pull_request_job_prefix, pull_request_job_names)
else:
print("Writing groovy script '%s' to reconfigure %d jobs" %
(groovy_script, len(job_configs)))
data = {
'job_configs': job_configs,
'job_prefixes_and_names': {
'devel': (devel_job_prefix, devel_job_names),
'pull_request': (
pull_request_job_prefix, pull_request_job_names),
}
}
content = expand_template('snippet/reconfigure_jobs.groovy.em', data)
with open(groovy_script, 'w') as h:
h.write(content)
def configure_devel_job(
config_url, rosdistro_name, source_build_name,
repo_name, os_name, os_code_name, arch,
pull_request=False,
config=None, build_file=None,
index=None, dist_file=None, dist_cache=None,
jenkins=None, views=None,
is_disabled=False,
groovy_script=None,
source_repository=None,
build_targets=None):
"""
Configure a single Jenkins devel job.
This includes the following steps:
- clone the source repository to use
- clone the ros_buildfarm repository
- write the distribution repository keys into files
- invoke the release/run_devel_job.py script
"""
if config is None:
config = get_config_index(config_url)
if build_file is None:
build_files = get_source_build_files(config, rosdistro_name)
build_file = build_files[source_build_name]
# Overwrite build_file.targets if build_targets is specified
if build_targets is not None:
build_file.targets = build_targets
if index is None:
index = get_index(config.rosdistro_index_url)
if dist_file is None:
dist_file = get_distribution_file(index, rosdistro_name, build_file)
if not dist_file:
raise JobValidationError(
'No distribution file matches the build file')
repo_names = dist_file.repositories.keys()
if repo_name is not None:
if repo_name not in repo_names:
raise JobValidationError(
"Invalid repository name '%s' " % repo_name +
'choose one of the following: %s' %
', '.join(sorted(repo_names)))
repo = dist_file.repositories[repo_name]
if not repo.source_repository:
raise JobValidationError(
"Repository '%s' has no source section" % repo_name)
if not repo.source_repository.version:
raise JobValidationError(
"Repository '%s' has no source version" % repo_name)
source_repository = repo.source_repository
if os_name not in build_file.targets.keys():
raise JobValidationError(
"Invalid OS name '%s' " % os_name +
'choose one of the following: ' +
', '.join(sorted(build_file.targets.keys())))
if os_code_name not in build_file.targets[os_name].keys():
raise JobValidationError(
"Invalid OS code name '%s' " % os_code_name +
'choose one of the following: ' +
', '.join(sorted(build_file.targets[os_name].keys())))
if arch not in build_file.targets[os_name][os_code_name]:
raise JobValidationError(
"Invalid architecture '%s' " % arch +
'choose one of the following: %s' % ', '.join(sorted(
build_file.targets[os_name][os_code_name])))
if dist_cache is None and build_file.notify_maintainers:
dist_cache = get_distribution_cache(index, rosdistro_name)
if jenkins is None:
from ros_buildfarm.jenkins import connect
jenkins = connect(config.jenkins_url)
if views is None:
view_name = get_devel_view_name(
rosdistro_name, source_build_name, pull_request=pull_request)
configure_devel_view(jenkins, view_name)
job_name = get_devel_job_name(
rosdistro_name, source_build_name,
repo_name, os_name, os_code_name, arch, pull_request)
job_config = _get_devel_job_config(
config, rosdistro_name, source_build_name,
build_file, os_name, os_code_name, arch, source_repository,
repo_name, pull_request, job_name, dist_cache=dist_cache,
is_disabled=is_disabled)
# jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
if isinstance(jenkins, object) and jenkins is not False:
from ros_buildfarm.jenkins import configure_job
configure_job(jenkins, job_name, job_config)
return job_name, job_config
def configure_devel_view(jenkins, view_name):
from ros_buildfarm.jenkins import configure_view
return configure_view(
jenkins, view_name, include_regex='%s__.+' % view_name,
template_name='dashboard_view_devel_jobs.xml.em')
def _get_devel_job_config(
config, rosdistro_name, source_build_name,
build_file, os_name, os_code_name, arch, source_repo_spec,
repo_name, pull_request, job_name, dist_cache=None,
is_disabled=False):
template_name = 'devel/devel_job.xml.em'
repository_args, script_generating_key_files = \
get_repositories_and_script_generating_key_files(build_file=build_file)
maintainer_emails = set([])
if build_file.notify_maintainers and dist_cache and repo_name:
# add maintainers listed in latest release to recipients
repo = dist_cache.distribution_file.repositories[repo_name]
if repo.release_repository:
for pkg_name in repo.release_repository.package_names:
if pkg_name not in dist_cache.release_package_xmls:
continue
pkg_xml = dist_cache.release_package_xmls[pkg_name]
pkg = parse_package_string(pkg_xml)
for m in pkg.maintainers:
maintainer_emails.add(m.email)
job_priority = \
build_file.jenkins_commit_job_priority \
if not pull_request \
else build_file.jenkins_pull_request_job_priority
job_data = {
'github_url': get_github_project_url(source_repo_spec.url),
'job_priority': job_priority,
'node_label': build_file.jenkins_job_label,
'pull_request': pull_request,
'source_repo_spec': source_repo_spec,
'disabled': is_disabled,
# this should not be necessary
'job_name': job_name,
'github_orgunit': git_github_orgunit(source_repo_spec.url),
'ros_buildfarm_repository': get_repository(),
'script_generating_key_files': script_generating_key_files,
'rosdistro_index_url': config.rosdistro_index_url,
'rosdistro_name': rosdistro_name,
'source_build_name': source_build_name,
'os_name': os_name,
'os_code_name': os_code_name,
'arch': arch,
'repository_args': repository_args,
'notify_emails': build_file.notify_emails,
'maintainer_emails': maintainer_emails,
'notify_maintainers': build_file.notify_maintainers,
'notify_committers': build_file.notify_committers,
'timeout_minutes': build_file.jenkins_job_timeout,
}
job_config = expand_template(template_name, job_data)
return job_config
| |
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import absolute_import, unicode_literals, print_function
import os
import re
import pytest
import inspect
import logging
from osbs.core import Openshift
from osbs.http import HttpResponse
from osbs.conf import Configuration
from osbs.api import OSBS
from tests.constants import (TEST_BUILD, TEST_COMPONENT, TEST_GIT_REF,
TEST_GIT_BRANCH, TEST_BUILD_CONFIG)
from tempfile import NamedTemporaryFile
try:
# py2
import httplib
import urlparse
except ImportError:
# py3
import http.client as httplib
import urllib.parse as urlparse
logger = logging.getLogger("osbs.tests")
API_VER = Configuration.get_openshift_api_version()
OAPI_PREFIX = "/oapi/{v}/".format(v=API_VER)
API_PREFIX = "/api/{v}/".format(v=API_VER)
class StreamingResponse(object):
def __init__(self, status_code=200, content=b'', headers=None):
self.status_code = status_code
self.content = content
self.headers = headers or {}
def iter_lines(self):
yield self.content.decode("utf-8")
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
class Connection(object):
def __init__(self, version="0.5.4"):
self.version = version
self.response_mapping = ResponseMapping(version,
lookup=self.get_definition_for)
# mapping of urls or tuples of urls to responses; use get_definition_for
# to get values from this dict
#
# The files are captured using the command line tool's
# --capture-dir parameter, and edited as needed.
self.DEFINITION = {
(OAPI_PREFIX + "namespaces/default/builds",
OAPI_PREFIX + "namespaces/default/builds/"): {
"get": {
# Contains a list of builds
"file": "builds_list.json",
},
"post": {
# Contains a single build named test-build-123
"file": "build_test-build-123.json",
},
},
# Some 'builds' requests are with a trailing slash, some without:
(OAPI_PREFIX + "namespaces/default/builds/%s" % TEST_BUILD,
OAPI_PREFIX + "namespaces/default/builds/%s/" % TEST_BUILD): {
"get": {
# Contains a single build in Completed phase
# named test-build-123
"file": "build_test-build-123.json",
},
"put": {
"file": "build_test-build-123.json",
}
},
(OAPI_PREFIX + "namespaces/default/builds/%s/log/" % TEST_BUILD,
OAPI_PREFIX + "namespaces/default/builds/%s/log/?follow=0" % TEST_BUILD,
OAPI_PREFIX + "namespaces/default/builds/%s/log/?follow=1" % TEST_BUILD): {
"get": {
# Lines of text
"file": "build_test-build-123_logs.txt",
},
},
("/oauth/authorize",
"/oauth/authorize?client_id=openshift-challenging-client&response_type=token",
"/oauth/authorize?response_type=token&client_id=openshift-challenging-client"): {
"get": {
"file": "authorize.txt",
"custom_callback": self.process_authorize,
}
},
OAPI_PREFIX + "users/~/": {
"get": {
"file": "get_user.json",
}
},
OAPI_PREFIX + "watch/namespaces/default/builds/%s/" % TEST_BUILD: {
"get": {
# Single MODIFIED item, with a Build object in
# Completed phase named test-build-123
"file": "watch_build_test-build-123.json",
}
},
OAPI_PREFIX + "namespaces/default/buildconfigs/": {
"post": {
# Contains a BuildConfig named test-build-config-123
"file": "created_build_config_test-build-config-123.json",
}
},
OAPI_PREFIX + "namespaces/default/buildconfigs/%s/instantiate" % TEST_BUILD_CONFIG: {
"post": {
# A Build named test-build-123 instantiated from a
# BuildConfig named test-build-config-123
"file": "instantiated_test-build-config-123.json",
}
},
# use both version with ending slash and without it
(OAPI_PREFIX + "namespaces/default/buildconfigs/%s" % TEST_BUILD_CONFIG,
OAPI_PREFIX + "namespaces/default/buildconfigs/%s/" % TEST_BUILD_CONFIG): {
"get": {
"custom_callback":
self.with_status_code(httplib.NOT_FOUND),
# Empty file (no response content as the status is 404
"file": None,
}
},
OAPI_PREFIX + "namespaces/default/builds/?labelSelector=buildconfig%%3D%s" %
TEST_BUILD_CONFIG: {
"get": {
# Contains a BuildList with Builds labeled with
# buildconfig=fedora23-something, none of which
# are running
"file": "builds_list.json"
}
},
API_PREFIX + "namespaces/default/pods/?labelSelector=openshift.io%%2Fbuild.name%%3D%s" %
TEST_BUILD: {
"get": {
# Contains a list of build pods, just needs not to
# be empty
"file": "pods.json",
},
},
API_PREFIX + "namespaces/default/resourcequotas/": {
# Make the POST fail so we can test PUT
"post": {
"custom_callback": self.with_status_code(httplib.CONFLICT),
# Reponse is not really empty but it isn't relevant to
# the testing
"file": None,
},
},
API_PREFIX + "namespaces/default/resourcequotas/pause": {
"put": {
"file": None,
},
"delete": {
"file": None, # not really empty but not relevant
},
},
}
@staticmethod
def process_authorize(key, content):
match = re.findall("[Ll]ocation: (.+)", content.decode("utf-8"))
headers = {
"location": match[0],
}
logger.debug("headers: %s", headers)
return {
"headers": headers
}
@staticmethod
def with_status_code(status_code):
def custom_func(key, content):
return {
"content": content,
"status_code": status_code,
}
return custom_func
def get_definition_for(self, key):
"""
Returns key and value associated with given key in DEFINITION dict.
This means that either key is an actual dict key in DEFINITION or it is member
of a tuple that serves as a dict key in DEFINITION.
"""
try:
# Try a direct look-up
return key, self.DEFINITION[key]
except KeyError:
# Try all the tuples
for k, v in self.DEFINITION.items():
if isinstance(k, tuple) and key in k:
return k, v
raise ValueError("Can't find '%s' in url mapping definition" % key)
@staticmethod
def response(status_code=200, content=b'', headers=None):
return HttpResponse(status_code, headers or {}, content.decode("utf-8"))
def request(self, url, method, stream=None, *args, **kwargs):
parsed_url = urlparse.urlparse(url)
# fragment = parsed_url.fragment
# parsed_fragment = urlparse.parse_qs(fragment)
url_path = parsed_url.path
if parsed_url.query:
url_path += '?' + parsed_url.query
logger.info("URL path is '%s'", url_path)
kwargs = self.response_mapping.response_mapping(url_path, method)
if stream:
return StreamingResponse(**kwargs)
else:
return self.response(**kwargs)
def get(self, url, *args, **kwargs):
return self.request(url, "get", *args, **kwargs)
def post(self, url, *args, **kwargs):
return self.request(url, "post", *args, **kwargs)
def put(self, url, *args, **kwargs):
return self.request(url, "put", *args, **kwargs)
def delete(self, url, *args, **kwargs):
return self.request(url, "delete", *args, **kwargs)
@pytest.fixture(params=["0.5.4", "1.0.4"])
def openshift(request):
os_inst = Openshift(OAPI_PREFIX, API_VER, "/oauth/authorize",
k8s_api_url=API_PREFIX)
os_inst._con = Connection(request.param)
return os_inst
@pytest.fixture
def osbs(openshift):
with NamedTemporaryFile(mode="wt") as fp:
fp.write("""
[general]
build_json_dir = {build_json_dir}
[default]
openshift_url = /
registry_uri = registry.example.com
sources_command = fedpkg sources
vendor = Example, Inc.
build_host = localhost
authoritative_registry = registry.example.com
distribution_scope = authoritative-source-only
koji_root = http://koji.example.com/kojiroot
koji_hub = http://koji.example.com/kojihub
build_type = simple
use_auth = false
""".format (build_json_dir="inputs"))
fp.flush()
dummy_config = Configuration(fp.name)
osbs = OSBS(dummy_config, dummy_config)
osbs.os = openshift
return osbs
@pytest.fixture
def osbs106(openshift):
with NamedTemporaryFile(mode="wt") as fp:
fp.write("""
[general]
build_json_dir = {build_json_dir}
openshift_required_version = 1.0.6
[default]
openshift_url = /
registry_uri = registry.example.com
sources_command = fedpkg sources
vendor = Example, Inc.
build_host = localhost
authoritative_registry = registry.example.com
distribution_scope = authoritative-source-only
koji_root = http://koji.example.com/kojiroot
koji_hub = http://koji.example.com/kojihub
build_type = simple
use_auth = false
""".format (build_json_dir="inputs"))
fp.flush()
dummy_config = Configuration(fp.name)
osbs = OSBS(dummy_config, dummy_config)
osbs.os = openshift
return osbs
class ResponseMapping(object):
def __init__(self, version, lookup):
self.version = version
self.lookup = lookup
def get_response_content(self, file_name):
this_file = inspect.getfile(ResponseMapping)
this_dir = os.path.dirname(this_file)
json_path = os.path.join(this_dir, "mock_jsons", self.version, file_name)
logger.debug("File: %s", json_path)
with open(json_path, "r") as fd:
return fd.read().encode("utf-8")
def response_mapping(self, url_path, method):
key, value_to_use = self.lookup(url_path)
file_name = value_to_use[method]["file"]
logger.debug("API response content: %s", file_name)
custom_callback = value_to_use[method].get("custom_callback", None)
if file_name is None:
content = b''
else:
content = self.get_response_content(file_name)
if custom_callback:
logger.debug("Custom API callback: %s", custom_callback)
return custom_callback(key, content)
else:
return {"content": content}
| |
#
# Copyright (C) 2001-2017 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
from collections import abc # this won't work in python2, but we don't support that any more
from rdkit import Chem
from rdkit.Chem import rdMolDescriptors as _rdMolDescriptors
from rdkit.Chem import rdPartialCharges, rdMolDescriptors
import rdkit.Chem.ChemUtils.DescriptorUtilities as _du
from rdkit.Chem.EState.EState import (MaxEStateIndex, MinEStateIndex, MaxAbsEStateIndex,
MinAbsEStateIndex)
from rdkit.Chem.QED import qed
def _isCallable(thing):
return isinstance(thing, abc.Callable) or \
hasattr(thing, '__call__')
_descList = []
def _setupDescriptors(namespace):
global _descList, descList
from rdkit.Chem import GraphDescriptors, MolSurf, Lipinski, Fragments, Crippen, Descriptors3D
from rdkit.Chem.EState import EState_VSA
_descList.clear()
mods = [GraphDescriptors, MolSurf, EState_VSA, Lipinski, Crippen, Fragments]
otherMods = [Chem]
for nm, thing in tuple(namespace.items()):
if nm[0] != '_' and _isCallable(thing):
_descList.append((nm, thing))
others = []
for mod in otherMods:
tmp = dir(mod)
for name in tmp:
if name[0] != '_':
thing = getattr(mod, name)
if _isCallable(thing):
others.append(name)
for mod in mods:
tmp = dir(mod)
for name in tmp:
if name[0] != '_' and name[-1] != '_' and name not in others:
# filter out python reference implementations:
if name[:2] == 'py' and name[2:] in tmp:
continue
if name == 'print_function':
continue
thing = getattr(mod, name)
if _isCallable(thing):
namespace[name] = thing
_descList.append((name, thing))
descList = _descList
MolWt = lambda *x, **y: _rdMolDescriptors._CalcMolWt(*x, **y)
MolWt.version = _rdMolDescriptors._CalcMolWt_version
MolWt.__doc__ = """The average molecular weight of the molecule
>>> MolWt(Chem.MolFromSmiles('CC'))
30.07
>>> MolWt(Chem.MolFromSmiles('[NH4+].[Cl-]'))
53.49...
"""
def HeavyAtomMolWt(x):
return MolWt(x, True)
HeavyAtomMolWt.__doc__ = """The average molecular weight of the molecule ignoring hydrogens
>>> HeavyAtomMolWt(Chem.MolFromSmiles('CC'))
24.02...
>>> HeavyAtomMolWt(Chem.MolFromSmiles('[NH4+].[Cl-]'))
49.46
"""
HeavyAtomMolWt.version = "1.0.0"
ExactMolWt = lambda *x, **y: _rdMolDescriptors.CalcExactMolWt(*x, **y)
ExactMolWt.version = _rdMolDescriptors._CalcExactMolWt_version
ExactMolWt.__doc__ = """The exact molecular weight of the molecule
>>> ExactMolWt(Chem.MolFromSmiles('CC'))
30.04...
>>> ExactMolWt(Chem.MolFromSmiles('[13CH3]C'))
31.05...
"""
def NumValenceElectrons(mol):
""" The number of valence electrons the molecule has
>>> NumValenceElectrons(Chem.MolFromSmiles('CC'))
14
>>> NumValenceElectrons(Chem.MolFromSmiles('C(=O)O'))
18
>>> NumValenceElectrons(Chem.MolFromSmiles('C(=O)[O-]'))
18
>>> NumValenceElectrons(Chem.MolFromSmiles('C(=O)'))
12
"""
tbl = Chem.GetPeriodicTable()
return sum(
tbl.GetNOuterElecs(atom.GetAtomicNum()) - atom.GetFormalCharge() + atom.GetTotalNumHs()
for atom in mol.GetAtoms())
NumValenceElectrons.version = "1.1.0"
def NumRadicalElectrons(mol):
""" The number of radical electrons the molecule has
(says nothing about spin state)
>>> NumRadicalElectrons(Chem.MolFromSmiles('CC'))
0
>>> NumRadicalElectrons(Chem.MolFromSmiles('C[CH3]'))
0
>>> NumRadicalElectrons(Chem.MolFromSmiles('C[CH2]'))
1
>>> NumRadicalElectrons(Chem.MolFromSmiles('C[CH]'))
2
>>> NumRadicalElectrons(Chem.MolFromSmiles('C[C]'))
3
"""
return sum(atom.GetNumRadicalElectrons() for atom in mol.GetAtoms())
NumRadicalElectrons.version = "1.1.0"
def _ChargeDescriptors(mol, force=False):
if not force and hasattr(mol, '_chargeDescriptors'):
return mol._chargeDescriptors
chgs = rdPartialCharges.ComputeGasteigerCharges(mol)
minChg = 500.
maxChg = -500.
for at in mol.GetAtoms():
chg = float(at.GetProp('_GasteigerCharge'))
minChg = min(chg, minChg)
maxChg = max(chg, maxChg)
res = (minChg, maxChg)
mol._chargeDescriptors = res
return res
def MaxPartialCharge(mol, force=False):
_, res = _ChargeDescriptors(mol, force)
return res
MaxPartialCharge.version = "1.0.0"
def MinPartialCharge(mol, force=False):
res, _ = _ChargeDescriptors(mol, force)
return res
MinPartialCharge.version = "1.0.0"
def MaxAbsPartialCharge(mol, force=False):
v1, v2 = _ChargeDescriptors(mol, force)
return max(abs(v1), abs(v2))
MaxAbsPartialCharge.version = "1.0.0"
def MinAbsPartialCharge(mol, force=False):
v1, v2 = _ChargeDescriptors(mol, force)
return min(abs(v1), abs(v2))
MinAbsPartialCharge.version = "1.0.0"
def _FingerprintDensity(mol, func, *args, **kwargs):
fp = func(*((mol, ) + args), **kwargs)
if hasattr(fp, 'GetNumOnBits'):
val = fp.GetNumOnBits()
else:
val = len(fp.GetNonzeroElements())
num_heavy_atoms = mol.GetNumHeavyAtoms()
if num_heavy_atoms == 0:
return 0
return float(val) / num_heavy_atoms
def FpDensityMorgan1(x):
return _FingerprintDensity(x, _rdMolDescriptors.GetMorganFingerprint, 1)
def FpDensityMorgan2(x):
return _FingerprintDensity(x, _rdMolDescriptors.GetMorganFingerprint, 2)
def FpDensityMorgan3(x):
return _FingerprintDensity(x, _rdMolDescriptors.GetMorganFingerprint, 3)
_du.setDescriptorVersion('1.0.0')(FpDensityMorgan1)
_du.setDescriptorVersion('1.0.0')(FpDensityMorgan2)
_du.setDescriptorVersion('1.0.0')(FpDensityMorgan3)
if hasattr(rdMolDescriptors, 'BCUT2D'):
names = [
"BCUT2D_%s" % s
for s in ('MWHI', "MWLOW", "CHGHI", "CHGLO", "LOGPHI", "LOGPLOW", "MRHI", "MRLOW")
]
_du.VectorDescriptorWrapper(_rdMolDescriptors.BCUT2D, names=names, version="1.0.0",
namespace=locals())
_setupDescriptors(locals())
if hasattr(rdMolDescriptors, 'CalcAUTOCORR2D'):
names = ["AUTOCORR2D_%s" % str(i + 1) for i in range(192)]
autocorr = _du.VectorDescriptorWrapper(_rdMolDescriptors.CalcAUTOCORR2D, names=names,
version="1.0.0", namespace=locals())
def setupAUTOCorrDescriptors():
"""Adds AUTOCORR descriptors to the default descriptor lists"""
_setupDescriptors(namespace=autocorr.namespace)
class PropertyFunctor(rdMolDescriptors.PythonPropertyFunctor):
"""Creates a python based property function that can be added to the
global property list. To use, subclass this class and override the
__call__ method. Then create an instance and add it to the
registry. The __call__ method should return a numeric value.
Example:
class NumAtoms(Descriptors.PropertyFunctor):
def __init__(self):
Descriptors.PropertyFunctor.__init__(self, "NumAtoms", "1.0.0")
def __call__(self, mol):
return mol.GetNumAtoms()
numAtoms = NumAtoms()
rdMolDescriptors.Properties.RegisterProperty(numAtoms)
"""
def __init__(self, name, version):
rdMolDescriptors.PythonPropertyFunctor.__init__(self, self, name, version)
def __call__(self, mol):
raise NotImplementedError("Please implement the __call__ method")
# ------------------------------------
#
# doctest boilerplate
#
def _runDoctests(verbose=None): # pragma: nocover
import sys
import doctest
failed, _ = doctest.testmod(optionflags=doctest.ELLIPSIS, verbose=verbose)
sys.exit(failed)
if __name__ == '__main__': # pragma: nocover
_runDoctests()
| |
#!/usr/bin/python
import sys
import pickle
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
### Include all quantitative features. In addition, 'std_from_poi' and
### 'std_to_poi' are standardized feature (see details below).
features_list = ['poi','salary',
'bonus',
'expenses',
'exercised_stock_options', 'other',
'restricted_stock', 'shared_receipt_with_poi',
'std_from_poi','std_to_poi']
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
### Task 2: Remove outliers
### Task 3: Create new feature(s)
### Store to my_dataset for easy export below.
# Add new features: std_from_poi and std_to_poi by dividing the message
# to/from poi by the total sent or received messages, respectively.
data_dict.pop('TOTAL')
data_dict.pop('THE TRAVEL AGENCY IN THE PARK')
data_dict.pop('LOCKHART EUGENE E')
for key in data_dict:
if (type(data_dict[key]['from_poi_to_this_person']) == int and
type(data_dict[key]['from_messages']) == int):
data_dict[key]['std_from_poi'] = \
(data_dict[key]['from_poi_to_this_person']/
data_dict[key]['from_messages'])
else:
data_dict[key]['std_from_poi'] = 0
if (type(data_dict[key]['from_this_person_to_poi']) == int and
type(data_dict[key]['to_messages']) == int):
data_dict[key]['std_to_poi'] = \
(data_dict[key]['from_this_person_to_poi']/
data_dict[key]['to_messages'])
else:
data_dict[key]['std_to_poi'] = 0
my_dataset = data_dict
### Extract features and labels from dataset for local testing
data = featureFormat(my_dataset, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
# Provided to give you a starting point. Try a variety of classifiers.
# The followings are the major steps in the analysis:
# A. Visualize the data using dimensionality reduction PCA and LDA to gain
# further insight into the data
# B. Algorithm selection using repeated nested cross validation to choose
# the algorithm that has highest accuracy
# C. Model selection using repeated cross validation to identify the best
# hyperparameter values
# The following classification algorithms are used:
# 1. Logistic Regression
# 2. Random Forest Classifier
# 3. KNN Classifier
# 4. Support Vector Classifier
# 5. Neural Network: Multi-layer Perceptron Classifier
from IPython.core.display import display
from __future__ import division
import numpy as np
import seaborn as sns
import pandas as pd
import matplotlib.pyplot as plt
from __future__ import division
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB
from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import cross_val_score
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import MinMaxScaler
from sklearn.decomposition import PCA
from time import time
# For simplicity, rename features as X and labels as y
X = features
y = labels
### First, explore the dataset.
### Identify the total number of data points.
print 'Total number of data points:',np.shape(X)[0]
print 'Total number of features:', np.shape(X)[1]
X_std = MinMaxScaler().fit_transform(X)
pca = PCA()
X_pca = pca.fit_transform(X_std)
print 'PCA explained_variance_ratio_', pca.explained_variance_ratio_
#This section is adapted from Udacity Forum 'What are the testing
#features when using SelectKBest?'
K_best = SelectKBest(chi2,k=9)
features_kbest = K_best.fit_transform(X_std,y)
print features_kbest.shape
feature_scores = ['%.3f' %elem for elem in K_best.scores_]
feature_scores_pvalues = ['%.3f' %elem for elem in K_best.pvalues_]
features_selected_tuple = [(features_list[i+1],feature_scores[i],feature_scores_pvalues[i])
for i in K_best.get_support(indices=True)]
features_selected_tuple = sorted(features_selected_tuple,key=lambda feature: float(feature[1]), reverse=True)
sorted_scores = []
sorted_p_value = []
sorted_feature = []
for feature_tuple in features_selected_tuple:
sorted_feature.append(feature_tuple[0])
sorted_scores.append(feature_tuple[1])
sorted_p_value.append(feature_tuple[2])
print(feature_tuple)
df = pd.DataFrame(features_selected_tuple).set_index(0)
import seaborn as sns
import pandas as pd
import matplotlib.pyplot as plt
df = pd.DataFrame(X_std)
pg = sns.PairGrid(df)
pg.map_diag(plt.hist)
pg.map_offdiag(plt.scatter)
plt.show()
### Task 5: Tune your classifier to achieve better than .3 precision and recall
### using our testing script. Check the tester.py script in the final project
### folder for details on the evaluation method, especially the test_classifier
### function. Because of the small size of the dataset, the script uses
### stratified shuffle split cross validation. For more info:
### http://scikit-learn.org/stable/modules/generated/sklearn.cross_validation.StratifiedShuffleSplit.html
clf_labels = \
['Logistic Regression','KNN','Random Forest','SVC','Kernel SVC','MLP']
#Set the number of repeats of the cross validation
N_outer = 5
N_inner = 5
#Logistic Regression
scores=[]
clf_lr = LogisticRegression(penalty='l2')
pipe_lr = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_lr]])
params_lr = {'clf__C':10.0**np.arange(-4,4)}
t0 = time()
for i in range(N_outer):
k_fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
k_fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_lr = GridSearchCV(estimator=pipe_lr,param_grid=params_lr,
cv=k_fold_inner,scoring='f1')
scores.append(cross_val_score(gs_lr,X,y,cv=k_fold_outer,
scoring='f1'))
print 'CV F1 Score of Logistic Regression: %.3f +/- %.3f' %(np.mean(scores),np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
k_fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
k_fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_lr = GridSearchCV(estimator=pipe_lr,param_grid=params_lr,
cv=k_fold_inner,scoring='precision')
scores.append(cross_val_score(gs_lr,X,y,cv=k_fold_outer,
scoring='precision'))
print 'CV Precision Score of Logistic Regression: %.3f +/- %.3f' %(np.mean(scores),np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
k_fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
k_fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_lr = GridSearchCV(estimator=pipe_lr,param_grid=params_lr,
cv=k_fold_inner,scoring='recall')
scores.append(cross_val_score(gs_lr,X,y,cv=k_fold_outer,
scoring='recall'))
print 'CV Recall Score of Logistic Regression: %.3f +/- %.3f' %(np.mean(scores),np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
#Set the number of repeats of the cross validation
N_outer = 5
N_inner = 5
#Random Forest Classifier
scores=[]
clf_rf = RandomForestClassifier(random_state=42)
pipe_rf = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_rf]])
params_rf = {'clf__n_estimators':np.arange(1,11)}
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_rf = GridSearchCV(estimator=pipe_rf,param_grid=params_rf,
cv=fold_inner,scoring='f1')
scores.append(cross_val_score(gs_rf,X,y,cv=fold_outer,
scoring='f1'))
print ('CV F1 Score of Random Forest Classifier: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_rf = GridSearchCV(estimator=pipe_rf,param_grid=params_rf,
cv=fold_inner,scoring='precision')
scores.append(cross_val_score(gs_rf,X,y,cv=fold_outer,
scoring='precision'))
print ('CV Precision Score of Random Forest Classifier: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_rf = GridSearchCV(estimator=pipe_rf,param_grid=params_rf,
cv=fold_inner,scoring='recall')
scores.append(cross_val_score(gs_rf,X,y,cv=fold_outer,
scoring='recall'))
print ('CV Recall Score of Random Forest Classifier: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
#Set the number of repeats of the cross validation
N_outer = 5
N_inner = 5
#KNN Classifier
scores=[]
clf_knn = KNeighborsClassifier()
pipe_knn = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_knn]])
params_knn = {'clf__n_neighbors':np.arange(1,6)}
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_knn = GridSearchCV(estimator=pipe_knn,param_grid=params_knn,
cv=fold_inner,scoring='f1')
scores.append(cross_val_score(gs_knn,X,y,cv=fold_outer,
scoring='f1'))
print ('CV F1 Score of KNN Classifier: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_knn = GridSearchCV(estimator=pipe_knn,param_grid=params_knn,
cv=fold_inner,scoring='precision')
scores.append(cross_val_score(gs_knn,X,y,cv=fold_outer,
scoring='precision'))
print ('CV Precision Score of KNN Classifier: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_knn = GridSearchCV(estimator=pipe_knn,param_grid=params_knn,
cv=fold_inner,scoring='recall')
scores.append(cross_val_score(gs_knn,X,y,cv=fold_outer,
scoring='recall'))
print ('CV Recall Score of KNN Classifier: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
#Set the number of repeats of the cross validation
N_outer = 5
N_inner = 5
#Linear SVC
scores=[]
clf_svc = SVC()
pipe_svc = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_svc]])
params_svc = {'clf__C':10.0**np.arange(-4,4)}
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_svc = GridSearchCV(estimator=pipe_svc,param_grid=params_svc,
cv=fold_inner,scoring='f1')
scores.append(cross_val_score(gs_svc,X,y,cv=fold_outer,
scoring='f1'))
print ('CV F1 Score of Linear SVC: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_svc = GridSearchCV(estimator=pipe_svc,param_grid=params_svc,
cv=fold_inner,scoring='precision')
scores.append(cross_val_score(gs_svc,X,y,cv=fold_outer,
scoring='precision'))
print ('CV Precision Score of Linear SVC: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_svc = GridSearchCV(estimator=pipe_svc,param_grid=params_svc,
cv=fold_inner,scoring='recall')
scores.append(cross_val_score(gs_svc,X,y,cv=fold_outer,
scoring='recall'))
print ('CV Recall Score of Linear SVC: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
#Set the number of repeats of the cross validation
N_outer = 5
N_inner = 5
#Kernel SVC
scores=[]
clf_ksvc = SVC(kernel='rbf')
pipe_ksvc = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_ksvc]])
params_ksvc = {'clf__C':10.0**np.arange(-4,4),'clf__gamma':10.0**np.arange(-4,4)}
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_ksvc = GridSearchCV(estimator=pipe_ksvc,param_grid=params_ksvc,
cv=fold_inner,scoring='f1')
scores.append(cross_val_score(gs_ksvc,X,y,cv=fold_outer,
scoring='f1'))
print ('CV F1 Score of Kernel SVC: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_ksvc = GridSearchCV(estimator=pipe_ksvc,param_grid=params_ksvc,
cv=fold_inner,scoring='precision')
scores.append(cross_val_score(gs_ksvc,X,y,cv=fold_outer,
scoring='precision'))
print ('CV Precision Score of Kernel SVC: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_ksvc = GridSearchCV(estimator=pipe_ksvc,param_grid=params_ksvc,
cv=fold_inner,scoring='recall')
scores.append(cross_val_score(gs_ksvc,X,y,cv=fold_outer,
scoring='recall'))
print ('CV Recall Score of Kernel SVC: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
#Set the number of repeats of the cross validation
N_outer = 5
#Naive Bayes
scores=[]
clf_nb = GaussianNB()
pipe_nb = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_nb]])
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
scores.append(cross_val_score(pipe_nb,X,y,cv=fold_outer,
scoring='f1'))
print 'CV F1 Score of Naive Bayes: %.3f +/- %.3f' %(np.mean(scores),
np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
scores.append(cross_val_score(pipe_nb,X,y,cv=fold_outer,
scoring='precision'))
print 'CV Precision Score of Naive Bayes: %.3f +/- %.3f' %(np.mean(scores),
np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
scores.append(cross_val_score(pipe_nb,X,y,cv=fold_outer,
scoring='recall'))
print 'CV Recall Score of Naive Bayes: %.3f +/- %.3f' %(np.mean(scores),
np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
#Set the number of repeats of the cross validation
N_outer = 5
N_inner = 5
#Kernel SVC
scores=[]
clf_mlp = MLPClassifier(solver='lbfgs')
pipe_mlp = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_mlp]])
params_mlp = {'clf__activation':['logistic','relu'],'clf__alpha':10.0**np.arange(-4,4)}
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_mlp = GridSearchCV(estimator=pipe_mlp,param_grid=params_mlp,
cv=fold_inner,scoring='f1')
scores.append(cross_val_score(gs_mlp,X,y,cv=fold_outer,
scoring='f1'))
print ('CV F1 Score of MLP: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_mlp = GridSearchCV(estimator=pipe_mlp,param_grid=params_mlp,
cv=fold_inner,scoring='precision')
scores.append(cross_val_score(gs_mlp,X,y,cv=fold_outer,
scoring='precision'))
print ('CV Precision of MLP: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_mlp = GridSearchCV(estimator=pipe_mlp,param_grid=params_mlp,
cv=fold_inner,scoring='recall')
scores.append(cross_val_score(gs_mlp,X,y,cv=fold_outer,
scoring='recall'))
print ('CV Recall Score of MLP: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
#Set the number of repeats of the cross validation
N_outer = 5
N_inner = 5
#AdaBoost
scores=[]
clf_ada = AdaBoostClassifier(random_state=42)
pipe_ada = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_ada]])
params_ada = {'clf__n_estimators':np.arange(1,11)*10}
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_ada = GridSearchCV(estimator=pipe_ada,param_grid=params_ada,
cv=fold_inner,scoring='f1')
scores.append(cross_val_score(gs_ada,X,y,cv=fold_outer,
scoring='f1'))
print ('CV F1 Score of AdaBoost: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_ada = GridSearchCV(estimator=pipe_ada,param_grid=params_ada,
cv=fold_inner,scoring='precision')
scores.append(cross_val_score(gs_ada,X,y,cv=fold_outer,
scoring='precision'))
print ('CV F1 Score of AdaBoost: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
for j in range(N_inner):
fold_inner = StratifiedKFold(n_splits=5,shuffle=True,random_state=j)
gs_ada = GridSearchCV(estimator=pipe_ada,param_grid=params_ada,
cv=fold_inner,scoring='recall')
scores.append(cross_val_score(gs_ada,X,y,cv=fold_outer,
scoring='recall'))
print ('CV F1 Score of AdaBoost: %.3f +/- %.3f'
%(np.mean(scores), np.std(scores)))
print 'Complete in %.1f sec' %(time()-t0)
# Model selection for KNN based on F1 Score
from IPython.core.display import display
n_reps = 1000
best_params = []
clf_knn = KNeighborsClassifier()
pipe_knn = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_knn]])
params_knn = {'clf__n_neighbors':np.arange(1,6)}
t0 = time()
for rep in np.arange(n_reps):
k_fold = StratifiedKFold(n_splits=5,shuffle=True,random_state=rep)
gs_knn_cv = GridSearchCV(estimator=pipe_knn,param_grid=params_knn,cv=k_fold,scoring='f1')
gs_knn_cv = gs_knn_cv.fit(X,y)
best_param = gs_knn_cv.best_params_
best_param.update({'Best Score': gs_knn_cv.best_score_})
best_params.append(best_param)
#DataFrame summarizing average of best scores, frequency for each best parameter value
best_params_df = pd.DataFrame(best_params)
best_params_df = best_params_df.rename(columns={'clf__n_neighbors':'N Neighbors'})
best_params_df = best_params_df.groupby('N Neighbors')['Best Score'].describe()
best_params_df = np.round(best_params_df,decimals=2).sort_values(['mean','count'],axis=0,ascending=[False,False])
display(best_params_df)
print time() - t0
#Model selection for KNN based on precision
from IPython.core.display import display
n_reps = 1000
best_params = []
clf_knn = KNeighborsClassifier()
pipe_knn = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_knn]])
params_knn = {'clf__n_neighbors':np.arange(1,6)}
t0 = time()
for rep in np.arange(n_reps):
k_fold = StratifiedKFold(n_splits=5,shuffle=True,random_state=rep)
gs_knn_cv = GridSearchCV(estimator=pipe_knn,param_grid=params_knn,cv=k_fold,scoring='precision')
gs_knn_cv = gs_knn_cv.fit(X,y)
best_param = gs_knn_cv.best_params_
best_param.update({'Best Score': gs_knn_cv.best_score_})
best_params.append(best_param)
#DataFrame summarizing average of best scores, frequency for each best parameter value
best_params_df = pd.DataFrame(best_params)
best_params_df = best_params_df.rename(columns={'clf__n_neighbors':'N Neighbors'})
best_params_df = best_params_df.groupby('N Neighbors')['Best Score'].describe()
best_params_df = np.round(best_params_df,decimals=2).sort_values(['mean','count'],axis=0,ascending=[False,False])
display(best_params_df)
print time() - t0
#Model selection for KNN based on recall
from IPython.core.display import display
n_reps = 1000
best_params = []
clf_knn = KNeighborsClassifier()
pipe_knn = Pipeline([['sc',MinMaxScaler()],
['kbest',SelectKBest(chi2,k=2)],
['clf',clf_knn]])
params_knn = {'clf__n_neighbors':np.arange(1,6)}
t0 = time()
for rep in np.arange(n_reps):
k_fold = StratifiedKFold(n_splits=5,shuffle=True,random_state=rep)
gs_knn_cv = GridSearchCV(estimator=pipe_knn,param_grid=params_knn,cv=k_fold,scoring='recall')
gs_knn_cv = gs_knn_cv.fit(X,y)
best_param = gs_knn_cv.best_params_
best_param.update({'Best Score': gs_knn_cv.best_score_})
best_params.append(best_param)
#DataFrame summarizing average of best scores, frequency for each best parameter value
best_params_df = pd.DataFrame(best_params)
best_params_df = best_params_df.rename(columns={'clf__n_neighbors':'N Neighbors'})
best_params_df = best_params_df.groupby('N Neighbors')['Best Score'].describe()
best_params_df = np.round(best_params_df,decimals=2).sort_values(['mean','count'],axis=0,ascending=[False,False])
display(best_params_df)
print time() - t0
#Dummy classifier
from sklearn.dummy import DummyClassifier
#Set the number of repeats of the cross validation
N_outer = 5
#Dummy Classifier
scores=[]
clf_dm = DummyClassifier(strategy='uniform')
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
scores.append(cross_val_score(clf_dm,X,y,cv=fold_outer,
scoring='f1'))
print 'CV F1 Score of Dummy Classifier: %.3f +/- %.3f' %(np.mean(scores),
np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
scores.append(cross_val_score(clf_dm,X,y,cv=fold_outer,
scoring='precision'))
print 'CV Precision Score of Dummy Classifier: %.3f +/- %.3f' %(np.mean(scores),
np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
t0 = time()
for i in range(N_outer):
fold_outer = StratifiedKFold(n_splits=5,shuffle=True,random_state=i)
scores.append(cross_val_score(clf_dm,X,y,cv=fold_outer,
scoring='recall'))
print 'CV Recall Score of Dummy Classifier: %.3f +/- %.3f' %(np.mean(scores),
np.std(scores))
print 'Complete in %.1f sec' %(time()-t0)
### Task 6: Dump your classifier, dataset, and features_list so anyone can
### check your results. You do not need to change anything below, but make sure
### that the version of poi_id.py that you submit can be run on its own and
### generates the necessary .pkl files for validating your results.
dump_classifier_and_data(clf, my_dataset, features_list)
| |
"""
Copyright (c) 2014, Enrique Fernandez
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Willow Garage, Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import rospy
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
import numpy as np
class LaserProjection:
"""
A class to Project Laser Scan
This calls will project laser scans into point clouds. It caches
unit vectors between runs (provided the angular resolution of
your scanner is not changing) to avoid excess computation.
By default all range values less thatn the scanner min_range,
greater than the scanner max_range are removed from the generated
point cloud, as these are assumed to be invalid.
If it is important to preserve a mapping between the index of
range values and points in the cloud, the recommended approach is to
pre-filter your laser scan message to meet the requirement that all
ranges are between min and max_range.
The generate PointClouds have a number of channels which can be enabled
through the use of ChannelOption.
- ChannelOption.INTENSITY - Create a channel named "intensities" with the
intensity of the return for each point.
- ChannelOption.INDEX - Create a channel named "index" containing the
index from the original array for each point.
- ChannelOption.DISTANCE - Create a channel named "distance" containing
the distance from the laser to each point.
- ChannelOption.TIMESTAMP - Create a channel named "stamps" containing the
specific timestamp at which each point was measured.
"""
LASER_SCAN_INVALID = -1.0
LASER_SCAN_MIN_RANGE = -2.0
LASER_SCAN_MAX_RANGE = -3.0
class ChannelOption:
NONE = 0x00 # Enable no channels
INTENSITY = 0x01 # Enable "intensities" channel
INDEX = 0x02 # Enable "index" channel
DISTANCE = 0x04 # Enable "distances" channel
TIMESTAMP = 0x08 # Enable "stamps" channel
VIEWPOINT = 0x10 # Enable "viewpoint" channel
DEFAULT = (INTENSITY | INDEX)
def __init__(self):
self.__angle_min = 0.0
self.__angle_max = 0.0
self.__cos_sin_map = np.array([[]])
def projectLaser(self, scan_in,
range_cutoff=-1.0, channel_options=ChannelOption.DEFAULT):
"""
Project a sensor_msgs::LaserScan into a sensor_msgs::PointCloud2.
Project a single laser scan from a linear array into a 3D
point cloud. The generated cloud will be in the same frame
as the original laser scan.
Keyword arguments:
scan_in -- The input laser scan.
range_cutoff -- An additional range cutoff which can be
applied which is more limiting than max_range in the scan
(default -1.0).
channel_options -- An OR'd set of channels to include.
"""
return self.__projectLaser(scan_in, range_cutoff, channel_options)
def __projectLaser(self, scan_in, range_cutoff, channel_options):
N = len(scan_in.ranges)
ranges = np.array(scan_in.ranges)
if (self.__cos_sin_map.shape[1] != N or
self.__angle_min != scan_in.angle_min or
self.__angle_max != scan_in.angle_max):
rospy.logdebug("No precomputed map given. Computing one.")
self.__angle_min = scan_in.angle_min
self.__angle_max = scan_in.angle_max
angles = scan_in.angle_min + np.arange(N) * scan_in.angle_increment
self.__cos_sin_map = np.array([np.cos(angles), np.sin(angles)])
output = ranges * self.__cos_sin_map
# Set the output cloud accordingly
cloud_out = PointCloud2()
fields = [pc2.PointField() for _ in range(3)]
fields[0].name = "x"
fields[0].offset = 0
fields[0].datatype = pc2.PointField.FLOAT32
fields[0].count = 1
fields[1].name = "y"
fields[1].offset = 4
fields[1].datatype = pc2.PointField.FLOAT32
fields[1].count = 1
fields[2].name = "z"
fields[2].offset = 8
fields[2].datatype = pc2.PointField.FLOAT32
fields[2].count = 1
idx_intensity = idx_index = idx_distance = idx_timestamp = -1
idx_vpx = idx_vpy = idx_vpz = -1
offset = 12
if (channel_options & self.ChannelOption.INTENSITY and
len(scan_in.intensities) > 0):
field_size = len(fields)
fields.append(pc2.PointField())
fields[field_size].name = "intensity"
fields[field_size].datatype = pc2.PointField.FLOAT32
fields[field_size].offset = offset
fields[field_size].count = 1
offset += 4
idx_intensity = field_size
if channel_options & self.ChannelOption.INDEX:
field_size = len(fields)
fields.append(pc2.PointField())
fields[field_size].name = "index"
fields[field_size].datatype = pc2.PointField.INT32
fields[field_size].offset = offset
fields[field_size].count = 1
offset += 4
idx_index = field_size
if channel_options & self.ChannelOption.DISTANCE:
field_size = len(fields)
fields.append(pc2.PointField())
fields[field_size].name = "distances"
fields[field_size].datatype = pc2.PointField.FLOAT32
fields[field_size].offset = offset
fields[field_size].count = 1
offset += 4
idx_distance = field_size
if channel_options & self.ChannelOption.TIMESTAMP:
field_size = len(fields)
fields.append(pc2.PointField())
fields[field_size].name = "stamps"
fields[field_size].datatype = pc2.PointField.FLOAT32
fields[field_size].offset = offset
fields[field_size].count = 1
offset += 4
idx_timestamp = field_size
if channel_options & self.ChannelOption.VIEWPOINT:
field_size = len(fields)
fields.extend([pc2.PointField() for _ in range(3)])
fields[field_size].name = "vp_x"
fields[field_size].datatype = pc2.PointField.FLOAT32
fields[field_size].offset = offset
fields[field_size].count = 1
offset += 4
idx_vpx = field_size
field_size += 1
fields[field_size].name = "vp_y"
fields[field_size].datatype = pc2.PointField.FLOAT32
fields[field_size].offset = offset
fields[field_size].count = 1
offset += 4
idx_vpy = field_size
field_size += 1
fields[field_size].name = "vp_z"
fields[field_size].datatype = pc2.PointField.FLOAT32
fields[field_size].offset = offset
fields[field_size].count = 1
offset += 4
idx_vpz = field_size
if range_cutoff < 0:
range_cutoff = scan_in.range_max
else:
range_cutoff = min(range_cutoff, scan_in.range_max)
points = []
for i in range(N):
ri = scan_in.ranges[i]
if ri < range_cutoff and ri >= scan_in.range_min:
point = output[:, i].tolist()
point.append(0)
if idx_intensity != -1:
point.append(scan_in.intensities[i])
if idx_index != -1:
point.append(i)
if idx_distance != -1:
point.append(scan_in.ranges[i])
if idx_timestamp != -1:
point.append(i * scan_in.time_increment)
if idx_vpx != -1 and idx_vpy != -1 and idx_vpz != -1:
point.extend([0 for _ in range(3)])
points.append(point)
cloud_out = pc2.create_cloud(scan_in.header, fields, points)
return cloud_out
| |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exports a SavedModel from a Checkpointable Python object."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import os
from tensorflow.core.framework import versions_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import saved_model_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function as defun
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.saved_model import builder_impl
from tensorflow.python.saved_model import constants
from tensorflow.python.saved_model import function_serialization
from tensorflow.python.saved_model import nested_structure_coder
from tensorflow.python.saved_model import revived_types
from tensorflow.python.saved_model import saved_object_graph_pb2
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import signature_def_utils
from tensorflow.python.saved_model import signature_serialization
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.saved_model import utils_impl
from tensorflow.python.training.checkpointable import base
from tensorflow.python.training.checkpointable import graph_view
from tensorflow.python.training.checkpointable import object_identity
from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util
from tensorflow.python.training.saving import functional_saver
from tensorflow.python.util import compat
from tensorflow.python.util.tf_export import tf_export
_UNCOPIABLE_DTYPES = frozenset((dtypes.resource, dtypes.variant))
# A container for an EagerTensor constant which has been copied to the exported
# Graph.
_CapturedConstant = collections.namedtuple(
"_CapturedConstant", ["eager_tensor", "graph_tensor"])
class _AugmentedGraphView(graph_view.ObjectGraphView):
"""An extendable graph which also tracks functions attached to objects.
Extensions through `add_object` appear in the object graph and any checkpoints
generated from it, even if they are not dependencies of the node they were
attached to in the saving program. For example a `.signatures` attribute is
added to exported SavedModel root objects without modifying the root object
itself.
Also tracks functions attached to objects in the graph, through the caching
`list_functions` method. Enumerating functions only through this method
ensures that we get a consistent view of functions, even if object attributes
create new functions every time they are accessed.
"""
def __init__(self, root):
super(_AugmentedGraphView, self).__init__(root)
# Object -> (name -> dep)
self._extra_dependencies = object_identity.ObjectIdentityDictionary()
self._functions = object_identity.ObjectIdentityDictionary()
def add_object(self, parent_node, name_in_parent, subgraph_root):
"""Attach an object to `parent_node`, overriding any existing dependency."""
self._extra_dependencies.setdefault(
parent_node, {})[name_in_parent] = subgraph_root
def list_dependencies(self, obj):
"""Overrides a parent method to include `add_object` objects."""
extra_dependencies = self._extra_dependencies.get(obj, {})
used_names = set()
for name, dep in super(_AugmentedGraphView, self).list_dependencies(obj):
used_names.add(name)
if name in extra_dependencies:
yield base.CheckpointableReference(name, extra_dependencies[name])
else:
yield base.CheckpointableReference(name, dep)
for name, dep in extra_dependencies.items():
if name in used_names:
continue
yield base.CheckpointableReference(name, dep)
def list_functions(self, obj):
obj_functions = self._functions.get(obj, None)
if obj_functions is None:
obj_functions = obj._list_functions_for_serialization() # pylint: disable=protected-access
self._functions[obj] = obj_functions
return obj_functions
class _SaveableView(object):
"""Provides a frozen view over a checkpointable root.
This class helps creating a single stable view over an object to save. The
saving code should access properties and functions via this class and not via
the original object as there are cases where an object construct their
checkpointable attributes and functions dynamically per call and will yield
different objects if invoked more than once.
Changes to the graph, for example adding objects, must happen in
`checkpoint_view` (an `_AugmentedGraphView`) before the `_SaveableView` is
constructed. Changes after the `_SaveableView` has been constructed will be
ignored.
"""
def __init__(self, checkpoint_view):
self.checkpoint_view = checkpoint_view
checkpointable_objects, node_ids, slot_variables = (
self.checkpoint_view.objects_ids_and_slot_variables())
self.nodes = checkpointable_objects
self.node_ids = node_ids
self.captured_tensor_node_ids = object_identity.ObjectIdentityDictionary()
self.slot_variables = slot_variables
self.concrete_functions = []
# Also add `Function`s as nodes.
nodes_without_functions = list(self.nodes)
seen_function_names = set()
for node in nodes_without_functions:
for function in checkpoint_view.list_functions(node).values():
if function not in self.node_ids:
self.node_ids[function] = len(self.nodes)
self.nodes.append(function)
if isinstance(function, def_function.Function):
# Force listing the concrete functions for the side effects:
# - populate the cache for functions that have an input_signature
# and have not been called.
# - force side effects of creation of concrete functions, e.g. create
# variables on first run.
concrete_functions = (
function._list_all_concrete_functions_for_serialization()) # pylint: disable=protected-access
else:
concrete_functions = [function]
for concrete_function in concrete_functions:
if concrete_function.name not in seen_function_names:
seen_function_names.add(concrete_function.name)
self.concrete_functions.append(concrete_function)
@property
def root(self):
return self.nodes[0]
def fill_object_graph_proto(self, proto):
"""Populate the nodes, children and slot_variables of a SavedObjectGraph."""
for node_id, node in enumerate(self.nodes):
assert self.node_ids[node] == node_id
object_proto = proto.nodes.add()
object_proto.slot_variables.extend(self.slot_variables.get(node, ()))
if isinstance(node, (def_function.Function, defun.ConcreteFunction,
_CapturedConstant)):
continue
for child in self.checkpoint_view.list_dependencies(node):
child_proto = object_proto.children.add()
child_proto.node_id = self.node_ids[child.ref]
child_proto.local_name = child.name
for local_name, ref_function in (
self.checkpoint_view.list_functions(node).items()):
child_proto = object_proto.children.add()
child_proto.node_id = self.node_ids[ref_function]
child_proto.local_name = local_name
def map_resources(self):
"""Makes new resource handle ops corresponding to existing resource tensors.
Creates resource handle ops in the current default graph, whereas
`accessible_objects` will be from an eager context. Resource mapping adds
resource handle ops to the main GraphDef of a SavedModel, which allows the
C++ loader API to interact with variables.
Returns:
A tuple of (object_map, resource_map, asset_info):
object_map: A dictionary mapping from object in `accessible_objects` to
replacement objects created to hold the new resource tensors.
resource_map: A dictionary mapping from resource tensors extracted from
`accessible_objects` to newly created resource tensors.
asset_info: An _AssetInfo tuple describing external assets referenced
from accessible_objects.
"""
# Only makes sense when adding to the export Graph
assert not context.executing_eagerly()
# TODO(allenl): Handle MirroredVariables and other types of variables which
# may need special casing.
object_map = object_identity.ObjectIdentityDictionary()
resource_map = {}
asset_info = _AssetInfo(
asset_defs=[],
asset_initializers_by_resource={},
asset_filename_map={},
asset_index={})
for node_id, obj in enumerate(self.nodes):
if isinstance(obj, tracking.TrackableResource):
new_resource = obj.create_resource()
resource_map[obj.resource_handle] = new_resource
self.captured_tensor_node_ids[obj.resource_handle] = node_id
elif resource_variable_ops.is_resource_variable(obj):
new_variable = resource_variable_ops.copy_to_graph_uninitialized(obj)
object_map[obj] = new_variable
resource_map[obj.handle] = new_variable.handle
self.captured_tensor_node_ids[obj.handle] = node_id
elif isinstance(obj, tracking.TrackableAsset):
_process_asset(obj, asset_info, resource_map)
self.captured_tensor_node_ids[obj.asset_path] = node_id
for concrete_function in self.concrete_functions:
for capture in concrete_function.captured_inputs:
if (isinstance(capture, ops.EagerTensor)
and capture.dtype not in _UNCOPIABLE_DTYPES
and capture not in self.captured_tensor_node_ids):
copied_tensor = constant_op.constant(capture.numpy())
node_id = len(self.nodes)
node = _CapturedConstant(
eager_tensor=capture, graph_tensor=copied_tensor)
self.nodes.append(node)
self.node_ids[capture] = node_id
self.node_ids[node] = node_id
self.captured_tensor_node_ids[capture] = node_id
resource_map[capture] = copied_tensor
return object_map, resource_map, asset_info
def _tensor_dict_to_tensorinfo(tensor_dict):
return {key: utils_impl.build_tensor_info_internal(value)
for key, value in tensor_dict.items()}
def _map_captures_to_created_tensors(
original_captures, resource_map):
"""Maps eager tensors captured by a function to Graph resources for export.
Args:
original_captures: A dictionary mapping from tensors captured by the
function to interior placeholders for those tensors (inside the function
body).
resource_map: A dictionary mapping from resource tensors owned by the eager
context to resource tensors in the exported graph.
Returns:
A list of stand-in tensors which belong to the exported graph, corresponding
to the function's captures.
Raises:
AssertionError: If the function references a resource which is not part of
`resource_map`.
"""
export_captures = []
for exterior, interior in original_captures.items():
mapped_resource = resource_map.get(exterior, None)
if mapped_resource is None:
if exterior.dtype == dtypes.resource:
raise AssertionError(
("Tried to export a function which references untracked stateful "
"object {}. Stateful TensorFlow objects (e.g. tf.Variable) must "
"be tracked by the main object. Objects may be tracked by "
"assigning them to an attribute of another tracked object, or to "
"an attribute of the main object directly.")
.format(interior))
export_captures.append(mapped_resource)
return export_captures
def _map_function_arguments_to_created_inputs(
function_arguments, signature_key, function_name):
"""Creates exterior placeholders in the exported graph for function arguments.
Functions have two types of inputs: tensors captured from the outside (eager)
context, and arguments to the function which we expect to receive from the
user at each call. `_map_captures_to_created_tensors` replaces
captured tensors with stand-ins (typically these are resource dtype tensors
associated with variables). `_map_function_inputs_to_created_inputs` runs over
every argument, creating a new placeholder for each which will belong to the
exported graph rather than the function body.
Args:
function_arguments: A list of argument placeholders in the function body.
signature_key: The name of the signature being exported, for error messages.
function_name: The name of the function, for error messages.
Returns:
A tuple of (mapped_inputs, exterior_placeholders)
mapped_inputs: A list with entries corresponding to `function_arguments`
containing all of the inputs of the function gathered from the exported
graph (both captured resources and arguments).
exterior_argument_placeholders: A dictionary mapping from argument names
to placeholders in the exported graph, containing the explicit arguments
to the function which a user is expected to provide.
Raises:
ValueError: If argument names are not unique.
"""
# `exterior_argument_placeholders` holds placeholders which are outside the
# function body, directly contained in a MetaGraph of the SavedModel. The
# function body itself contains nearly identical placeholders used when
# running the function, but these exterior placeholders allow Session-based
# APIs to call the function using feeds and fetches which name Tensors in the
# MetaGraph.
exterior_argument_placeholders = {}
mapped_inputs = []
for placeholder in function_arguments:
# `export_captures` contains an exhaustive set of captures, so if we don't
# find the input there then we now know we have an argument.
user_input_name = compat.as_str_any(
placeholder.op.get_attr("_user_specified_name"))
# If the internal placeholders for a function have names which were
# uniquified by TensorFlow, then a single user-specified argument name
# must refer to multiple Tensors. The resulting signatures would be
# confusing to call. Instead, we throw an exception telling the user to
# specify explicit names.
if user_input_name != placeholder.op.name:
# This should be unreachable, since concrete functions may not be
# generated with non-unique argument names.
raise ValueError(
("Got non-flat/non-unique argument names for SavedModel "
"signature '{}': more than one argument to '{}' was named '{}'. "
"Signatures have one Tensor per named input, so to have "
"predictable names Python functions used to generate these "
"signatures should avoid *args and Tensors in nested "
"structures unless unique names are specified for each. Use "
"tf.TensorSpec(..., name=...) to provide a name for a Tensor "
"input.")
.format(signature_key, compat.as_str_any(function_name),
user_input_name))
arg_placeholder = array_ops.placeholder(
shape=placeholder.shape,
dtype=placeholder.dtype,
name="{}_{}".format(signature_key, user_input_name))
exterior_argument_placeholders[user_input_name] = arg_placeholder
mapped_inputs.append(arg_placeholder)
return mapped_inputs, exterior_argument_placeholders
def _call_function_with_mapped_captures(function, args, resource_map):
"""Calls `function` in the exported graph, using mapped resource captures."""
export_captures = _map_captures_to_created_tensors(
function.graph.captures, resource_map)
mapped_inputs = args + export_captures
# Calls the function quite directly, since we have new captured resource
# tensors we need to feed in which weren't part of the original function
# definition.
# pylint: disable=protected-access
outputs = function._build_call_outputs(
function._inference_function.call(context.context(), mapped_inputs))
return outputs
def _generate_signatures(signature_functions, resource_map):
"""Validates and calls `signature_functions` in the default graph.
Args:
signature_functions: A dictionary mapping string keys to concrete TensorFlow
functions (e.g. from `signature_serialization.canonicalize_signatures`)
which will be used to generate SignatureDefs.
resource_map: A dictionary mapping from resource tensors in the eager
context to resource tensors in the Graph being exported. This dictionary
is used to re-bind resources captured by functions to tensors which will
exist in the SavedModel.
Returns:
Each function in the `signature_functions` dictionary is called with
placeholder Tensors, generating a function call operation and output
Tensors. The placeholder Tensors, the function call operation, and the
output Tensors from the function call are part of the default Graph.
This function then returns a dictionary with the same structure as
`signature_functions`, with the concrete functions replaced by SignatureDefs
implicitly containing information about how to call each function from a
TensorFlow 1.x Session / the C++ Loader API. These SignatureDefs reference
the generated placeholders and Tensor outputs by name.
The caller is expected to include the default Graph set while calling this
function as a MetaGraph in a SavedModel, including the returned
SignatureDefs as part of that MetaGraph.
"""
signatures = {}
for signature_key, function in sorted(signature_functions.items()):
if function.graph.captures:
argument_inputs = function.graph.inputs[:-len(function.graph.captures)]
else:
argument_inputs = function.graph.inputs
mapped_inputs, exterior_argument_placeholders = (
_map_function_arguments_to_created_inputs(
argument_inputs, signature_key, function.name))
outputs = _call_function_with_mapped_captures(
function, mapped_inputs, resource_map)
signatures[signature_key] = signature_def_utils.build_signature_def(
_tensor_dict_to_tensorinfo(exterior_argument_placeholders),
_tensor_dict_to_tensorinfo(outputs),
method_name=signature_constants.PREDICT_METHOD_NAME)
return signatures
def _trace_resource_initializers(accessible_objects):
"""Create concrete functions from `TrackableResource` objects."""
resource_initializers = []
def _wrap_initializer(obj):
obj.initialize()
return constant_op.constant(1.) # Dummy control output
for obj in accessible_objects:
if isinstance(obj, tracking.TrackableResource):
resource_initializers.append(def_function.function(
functools.partial(_wrap_initializer, obj),
# All inputs are captures.
input_signature=[]).get_concrete_function())
return resource_initializers
_AssetInfo = collections.namedtuple(
"_AssetInfo", [
# List of AssetFileDef protocol buffers
"asset_defs",
# Map from asset variable resource Tensors to their init ops
"asset_initializers_by_resource",
# Map from base asset filenames to full paths
"asset_filename_map",
# Map from TrackableAsset to index of corresponding AssetFileDef
"asset_index"])
def _process_asset(trackable_asset, asset_info, resource_map):
"""Add `trackable_asset` to `asset_info` and `resource_map`."""
original_variable = trackable_asset.asset_path
with context.eager_mode():
original_path = original_variable.numpy()
path = builder_impl.get_asset_filename_to_add(
asset_filepath=original_path,
asset_filename_map=asset_info.asset_filename_map)
# TODO(andresp): Instead of mapping 1-1 between trackable asset
# and asset in the graph def consider deduping the assets that
# point to the same file.
asset_path_initializer = array_ops.placeholder(
shape=original_variable.shape,
dtype=dtypes.string,
name="asset_path_initializer")
asset_variable = resource_variable_ops.ResourceVariable(
asset_path_initializer)
asset_info.asset_filename_map[path] = original_path
asset_def = meta_graph_pb2.AssetFileDef()
asset_def.filename = path
asset_def.tensor_info.name = asset_path_initializer.name
asset_info.asset_defs.append(asset_def)
asset_info.asset_initializers_by_resource[original_variable] = (
asset_variable.initializer)
asset_info.asset_index[trackable_asset] = len(asset_info.asset_defs) - 1
resource_map[original_variable] = asset_variable
def _fill_meta_graph_def(meta_graph_def, saveable_view, signature_functions):
"""Generates a MetaGraph which calls `signature_functions`.
Args:
meta_graph_def: The MetaGraphDef proto to fill.
saveable_view: The _SaveableView being exported.
signature_functions: A dictionary mapping signature keys to concrete
functions containing signatures to add to the MetaGraph.
Returns:
An _AssetInfo, which contains information to help creating the SavedModel.
"""
# List objects from the eager context to make sure Optimizers give us the
# right Graph-dependent variables.
accessible_objects = saveable_view.nodes
resource_initializer_functions = _trace_resource_initializers(
accessible_objects)
exported_graph = ops.Graph()
resource_initializer_ops = []
with exported_graph.as_default():
object_map, resource_map, asset_info = saveable_view.map_resources()
for resource_initializer_function in resource_initializer_functions:
asset_dependencies = []
for capture in resource_initializer_function.graph.external_captures:
asset_initializer = asset_info.asset_initializers_by_resource.get(
capture, None)
if asset_initializer is not None:
asset_dependencies.append(asset_initializer)
with ops.control_dependencies(asset_dependencies):
resource_initializer_ops.append(
_call_function_with_mapped_captures(
resource_initializer_function, [], resource_map))
resource_initializer_ops.extend(
asset_info.asset_initializers_by_resource.values())
with ops.control_dependencies(resource_initializer_ops):
init_op = control_flow_ops.no_op()
# Add the same op to the main_op collection and to the init_op
# signature. The collection is for compatibility with older loader APIs;
# only one will be executed.
meta_graph_def.collection_def[constants.MAIN_OP_KEY].node_list.value.append(
init_op.name)
meta_graph_def.signature_def[constants.INIT_OP_SIGNATURE_KEY].CopyFrom(
signature_def_utils.op_signature_def(
init_op, constants.INIT_OP_SIGNATURE_KEY))
# Saving an object-based checkpoint again gathers variables. We need to do the
# gathering from the eager context so Optimizers save the right set of
# variables, but want any operations associated with the save/restore to be in
# the exported graph (thus the `to_graph` argument).
saver = functional_saver.Saver(
saveable_view.checkpoint_view.frozen_saveable_objects(
object_map=object_map, to_graph=exported_graph))
with exported_graph.as_default():
signatures = _generate_signatures(signature_functions, resource_map)
for concrete_function in saveable_view.concrete_functions:
concrete_function.add_to_graph()
saver_def = saver.to_proto()
meta_graph_def.saver_def.CopyFrom(saver_def)
graph_def = exported_graph.as_graph_def(add_shapes=True)
meta_graph_def.graph_def.CopyFrom(graph_def)
meta_graph_def.meta_info_def.tags.append(tag_constants.SERVING)
meta_graph_def.asset_file_def.extend(asset_info.asset_defs)
for signature_key, signature in signatures.items():
meta_graph_def.signature_def[signature_key].CopyFrom(signature)
meta_graph.strip_graph_default_valued_attrs(meta_graph_def)
return asset_info, exported_graph
def _write_object_graph(saveable_view, export_dir, asset_file_def_index):
"""Save a SavedObjectGraph proto for `root`."""
# SavedObjectGraph is similar to the CheckpointableObjectGraph proto in the
# checkpoint. It will eventually go into the SavedModel.
proto = saved_object_graph_pb2.SavedObjectGraph()
saveable_view.fill_object_graph_proto(proto)
coder = nested_structure_coder.StructureCoder()
for concrete_function in saveable_view.concrete_functions:
serialized = function_serialization.serialize_concrete_function(
concrete_function, saveable_view.captured_tensor_node_ids, coder)
if serialized is not None:
proto.concrete_functions[concrete_function.name].CopyFrom(
serialized)
for obj, obj_proto in zip(saveable_view.nodes, proto.nodes):
_write_object_proto(obj, obj_proto, asset_file_def_index)
extra_asset_dir = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.EXTRA_ASSETS_DIRECTORY))
file_io.recursive_create_dir(extra_asset_dir)
object_graph_filename = os.path.join(
extra_asset_dir, compat.as_bytes("object_graph.pb"))
file_io.write_string_to_file(object_graph_filename, proto.SerializeToString())
def _write_object_proto(obj, proto, asset_file_def_index):
"""Saves an object into SavedObject proto."""
if isinstance(obj, tracking.TrackableAsset):
proto.asset.SetInParent()
proto.asset.asset_file_def_index = asset_file_def_index[obj]
elif resource_variable_ops.is_resource_variable(obj):
proto.variable.SetInParent()
proto.variable.trainable = obj.trainable
proto.variable.dtype = obj.dtype.as_datatype_enum
proto.variable.shape.CopyFrom(obj.shape.as_proto())
elif isinstance(obj, def_function.Function):
proto.function.CopyFrom(
function_serialization.serialize_function(obj))
elif isinstance(obj, defun.ConcreteFunction):
proto.bare_concrete_function.CopyFrom(
function_serialization.serialize_bare_concrete_function(obj))
elif isinstance(obj, _CapturedConstant):
proto.constant.operation = obj.graph_tensor.op.name
else:
registered_type_proto = revived_types.serialize(obj)
if registered_type_proto is None:
# Fallback for types with no matching registration
registered_type_proto = saved_object_graph_pb2.SavedUserObject(
identifier="_generic_user_object",
version=versions_pb2.VersionDef(
producer=1, min_consumer=1, bad_consumers=[]))
proto.user_object.CopyFrom(registered_type_proto)
@tf_export("saved_model.save", v1=["saved_model.experimental.save"])
def save(obj, export_dir, signatures=None):
# pylint: disable=line-too-long
"""Exports the Checkpointable object `obj` to [SavedModel format](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md).
Example usage:
```python
class Adder(tf.train.Checkpoint):
@tf.function(input_signature=[tf.TensorSpec(shape=None, dtype=tf.float32)])
def add(self, x):
return x + x + 1.
to_export = Adder()
tf.saved_model.save(to_export, '/tmp/adder')
```
The resulting SavedModel is then servable with an input named "x", its value
having any shape and dtype float32.
The optional `signatures` argument controls which methods in `obj` will be
available to programs which consume `SavedModel`s, for example serving
APIs. Python functions may be decorated with
`@tf.function(input_signature=...)` and passed as signatures directly, or
lazily with a call to `get_concrete_function` on the method decorated with
`@tf.function`.
If the `signatures` argument is omitted, `obj` will be searched for
`@tf.function`-decorated methods. If exactly one `@tf.function` is found, that
method will be used as the default signature for the SavedModel. This behavior
is expected to change in the future, when a corresponding
`tf.saved_model.load` symbol is added. At that point signatures will be
completely optional, and any `@tf.function` attached to `obj` or its
dependencies will be exported for use with `load`.
When invoking a signature in an exported SavedModel, `Tensor` arguments are
identified by name. These names will come from the Python function's argument
names by default. They may be overridden by specifying a `name=...` argument
in the corresponding `tf.TensorSpec` object. Explicit naming is required if
multiple `Tensor`s are passed through a single argument to the Python
function.
The outputs of functions used as `signatures` must either be flat lists, in
which case outputs will be numbered, or a dictionary mapping string keys to
`Tensor`, in which case the keys will be used to name outputs.
Signatures are available in objects returned by `tf.saved_model.load` as a
`.signatures` attribute. This is a reserved attribute: `tf.saved_model.save`
on an object with a custom `.signatures` attribute will raise an exception.
Since `tf.keras.Model` objects are also Checkpointable, this function can be
used to export Keras models. For example, exporting with a signature
specified:
```python
class Model(tf.keras.Model):
@tf.function(input_signature=[tf.TensorSpec(shape=[None], dtype=tf.string)])
def serve(self, serialized):
...
m = Model()
tf.saved_model.save(m, '/tmp/saved_model/')
```
Exporting from a function without a fixed signature:
```python
class Model(tf.keras.Model):
@tf.function
def call(self, x):
...
m = Model()
tf.saved_model.save(
m, '/tmp/saved_model/',
signatures=m.call.get_concrete_function(
tf.TensorSpec(shape=[None, 3], dtype=tf.float32, name="inp")))
```
`tf.keras.Model` instances constructed from inputs and outputs already have a
signature and so do not require a `@tf.function` decorator or a `signatures`
argument. If neither are specified, the model's forward pass is exported.
```python
x = input_layer.Input((4,), name="x")
y = core.Dense(5, name="out")(x)
model = training.Model(x, y)
tf.saved_model.save(model, '/tmp/saved_model/')
# The exported SavedModel takes "x" with shape [None, 4] and returns "out"
# with shape [None, 5]
```
Variables must be tracked by assigning them to an attribute of a tracked
object or to an attribute of `obj` directly. TensorFlow objects (e.g. layers
from `tf.keras.layers`, optimizers from `tf.train`) track their variables
automatically. This is the same tracking scheme that `tf.train.Checkpoint`
uses, and an exported `Checkpoint` object may be restored as a training
checkpoint by pointing `tf.train.Checkpoint.restore` to the SavedModel's
"variables/" subdirectory. Currently variables are the only stateful objects
supported by `tf.saved_model.save`, but others (e.g. tables) will be supported
in the future.
`tf.function` does not hard-code device annotations from outside the function
body, instead using the calling context's device. This means for example that
exporting a model which runs on a GPU and serving it on a CPU will generally
work, with some exceptions. `tf.device` annotations inside the body of the
function will be hard-coded in the exported model; this type of annotation is
discouraged. Device-specific operations, e.g. with "cuDNN" in the name or with
device-specific layouts, may cause issues. Currently a `DistributionStrategy`
is another exception: active distribution strategies will cause device
placements to be hard-coded in a function. Exporting a single-device
computation and importing under a `DistributionStrategy` is not currently
supported, but may be in the future.
SavedModels exported with `tf.saved_model.save` [strip default-valued
attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes)
automatically, which removes one source of incompatibilities when the consumer
of a SavedModel is running an older TensorFlow version than the
producer. There are however other sources of incompatibilities which are not
handled automatically, such as when the exported model contains operations
which the consumer does not have definitions for.
The current implementation of `tf.saved_model.save` targets serving use-cases,
but omits information which will be necessary for the planned future
implementation of `tf.saved_model.load`. Exported models using the current
`save` implementation, and other existing SavedModels, will not be compatible
with `tf.saved_model.load` when it is implemented. Further, `save` will in the
future attempt to export `@tf.function`-decorated methods which it does not
currently inspect, so some objects which are exportable today will raise
exceptions on export in the future (e.g. due to complex/non-serializable
default arguments). Such backwards-incompatible API changes are expected only
prior to the TensorFlow 2.0 release.
Args:
obj: A checkpointable object to export.
export_dir: A directory in which to write the SavedModel.
signatures: Optional, either a `tf.function` with an input signature
specified or the result of `f.get_concrete_function` on a
`@tf.function`-decorated function `f`, in which case `f` will be used to
generate a signature for the SavedModel under the default serving
signature key. `signatures` may also be a dictionary, in which case it
maps from signature keys to either `tf.function` instances with input
signatures or concrete functions. The keys of such a dictionary may be
arbitrary strings, but will typically be from the
`tf.saved_model.signature_constants` module.
Raises:
ValueError: If `obj` is not checkpointable.
@compatibility(eager)
Not supported when graph building. From TensorFlow 1.x,
`tf.enable_eager_execution()` must run first. May not be called from within a
function body.
@end_compatibility
"""
if not context.executing_eagerly():
with ops.init_scope():
if context.executing_eagerly():
raise AssertionError(
"tf.saved_model.save is not supported inside a traced "
"@tf.function. Move the call to the outer eagerly-executed "
"context.")
else:
raise AssertionError(
"tf.saved_model.save is not supported when graph building. "
"tf.enable_eager_execution() must run first when calling it from "
"TensorFlow 1.x.")
# pylint: enable=line-too-long
if not isinstance(obj, base.Checkpointable):
raise ValueError(
"Expected a Checkpointable object for export, got {}.".format(obj))
checkpoint_graph_view = _AugmentedGraphView(obj)
if signatures is None:
signatures = signature_serialization.find_function_to_export(
checkpoint_graph_view)
signatures = signature_serialization.canonicalize_signatures(signatures)
signature_serialization.validate_saveable_view(checkpoint_graph_view)
signature_map = signature_serialization.create_signature_map(signatures)
checkpoint_graph_view.add_object(
parent_node=checkpoint_graph_view.root,
name_in_parent=signature_serialization.SIGNATURE_ATTRIBUTE_NAME,
subgraph_root=signature_map)
# Use _SaveableView to provide a frozen listing of properties and functions.
# Note we run this twice since, while constructing the view the first time
# there can be side effects of creating variables.
_ = _SaveableView(checkpoint_graph_view)
saveable_view = _SaveableView(checkpoint_graph_view)
# TODO(allenl): Factor out some subset of SavedModelBuilder which is 2.x
# compatible (no sessions) and share it with this export API rather than
# making a SavedModel proto and writing it directly.
saved_model = saved_model_pb2.SavedModel()
meta_graph_def = saved_model.meta_graphs.add()
object_saver = util.CheckpointableSaver(checkpoint_graph_view)
asset_info, exported_graph = _fill_meta_graph_def(
meta_graph_def, saveable_view, signatures)
saved_model.saved_model_schema_version = (
constants.SAVED_MODEL_SCHEMA_VERSION)
# So far we've just been generating protocol buffers with no I/O. Now we write
# the checkpoint, copy assets into the assets directory, and write out the
# SavedModel proto itself.
utils_impl.get_or_create_variables_dir(export_dir)
object_saver.save(utils_impl.get_variables_path(export_dir))
builder_impl.copy_assets_to_destination_dir(asset_info.asset_filename_map,
export_dir)
path = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
file_io.write_string_to_file(path, saved_model.SerializeToString())
_write_object_graph(saveable_view, export_dir, asset_info.asset_index)
# Clean reference cycles so repeated export()s don't make work for the garbage
# collector. Before this point we need to keep references to captured
# constants in the saved graph.
ops.dismantle_graph(exported_graph)
| |
# Copyright (c) 2014 Artem Rozumenko (artyom.rozumenko@gmail.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Process tools commands."""
from socket import gethostbyname, gethostname, gaierror
from uuid import getnode
from time import time, sleep
from operator import itemgetter
from os import getpid
from fnmatch import fnmatch
import psutil
from locust.common import parse_pids, parse_args_list, message_wrapper
def get_process(pids=None, names=None):
"""
Return a list of specified local processes.
Arguments:
pids - list of process PIDs to get;
names - list of process names to get.
Return: a list of process dicts.
"""
if not pids and not names:
processes = [process for process in psutil.process_iter()]
else:
pids = parse_pids(pids)
names = parse_args_list(names)
processes = [psutil.Process(pid) for pid in pids if
psutil.pid_exists(pid)]
if names and not pids:
# Do not add current python process to result list.
cur_pid = getpid()
local_processes = [proc for proc in psutil.process_iter() if
proc.pid != cur_pid]
for name in names:
for process in local_processes:
try:
if fnmatch(process.name(), name) or fnmatch(
' '.join(process.cmdline()), name):
processes.append(process)
except psutil.AccessDenied:
pass
result = []
for process in processes:
try:
try:
hostname = gethostbyname(gethostname())
except gaierror:
hostname = gethostbyname('localhost')
temp = {
'pid': process.pid,
'name': process.name(),
'status': str(process.status()),
'cmd': ' '.join(process.cmdline()),
'node': str(getnode()),
'endpoint': hostname
}
if pids or names:
temp['cpu'] = process.cpu_percent() / psutil.cpu_count()
temp['ram'] = long(process.memory_info()[0]) / 1024
if temp not in result:
result.append(temp)
except (psutil.NoSuchProcess, psutil.AccessDenied):
print 'NoSuchProcess or AccessDenied exception occurred'
return result
def list_process():
"""
Return a list of all local processes.
Arguments: None.
Return: a list of process dicts.
"""
return get_process()
def kill_process(names=None, pids=None):
"""
Kill local processes by names and PIDS.
Arguments:
names - list of processes names to kill;
pids - list of process PIDs to kill;
Return:
[{process1_dict}, {process2_dict}, ..., {processN_dict}]
"""
if not names and not pids:
return message_wrapper('Please, provide processes PIDs or names.',
status='error')
pids = parse_pids(pids)
names = parse_args_list(names)
processes = _list_presented_processes(names, pids)
return _kill_process_list(processes)
def _kill_process_list(processes):
"""
Kill local processes by list of processes.
Arguments:
processes - list of dictionaries to kill.
The dictionaries should contain name, pid,
status of process.
Return: [{process1_dict}, {process2_dict}, ..., {processN_dict}]
"""
for process in processes:
if process['status'] == 'present':
try:
_kill_process_by_pid(process['pid'])
end_time = time() + 60
while True:
try:
psutil.Process(process['pid'])
except psutil.NoSuchProcess:
process['status'] = 'killed'
break
sleep(3)
if time() > end_time:
process['status'] = 'not_killed'
break
except psutil.NoSuchProcess:
process['status'] = 'killed_by_another_process'
return processes
def _kill_process_by_pid(pid):
"""
Kill local process by pid
Arguments:
pid - PID of process to kill
"""
process = psutil.Process(pid)
process.kill()
def _list_presented_processes(names, pids):
"""
Create list of dictionaries with processes names, PIDs and statuses.
Arguments:
names - list of proc names to check availability in the local node;
pids - list of proc PIDs to check availability in the local node;
Return: [{process1_dict}, {process2_dict}, ..., {processN_dict}]
"""
cur_pid = getpid()
local_processes = [process for process in psutil.process_iter() if
process.pid != cur_pid]
process_to_kill = []
for name in set(names):
name_flag = True
for process in local_processes:
if fnmatch(process.name(), name) or fnmatch(
' '.join(process.cmdline()), name):
pids.append(process.pid)
name_flag = False
if name_flag:
process_to_kill.append(
{'pid': None, 'name': name, 'status': 'not_found'})
for pid in set(pids):
pid_flag = True
for process in local_processes:
if pid == process.pid:
process_to_kill.append(
{'pid': process.pid, 'name': process.name(),
'status': 'present'})
pid_flag = False
if pid_flag:
process_to_kill.append(
{'pid': pid, 'name': None, 'status': 'not_found'})
return sorted(process_to_kill, key=itemgetter('pid'))
def suspend_process(names=None, pids=None):
"""
Suspend local processes by names and PIDS.
Arguments:
names - list of processes names to suspend;
pids - list of process PIDs to suspend;
Return: [{process1_dict}, {process2_dict}, ..., {processN_dict}]
"""
if not names and not pids:
return message_wrapper('Please, provide processes PIDs or names.',
status='error')
pids = parse_pids(pids)
names = parse_args_list(names)
processes = _list_presented_processes(names, pids)
return _suspend_process_list(processes)
def _suspend_process_list(processes):
"""
Suspend local processes by list of processes.
Arguments:
processes - list of dictionaries to suspend.
The dictionaries should contain name, pid,
status of process.
Return: [{process1_dict}, {process2_dict}, ..., {processN_dict}]
"""
for process in processes:
if psutil.Process(process['pid']).status() == 'stopped':
process['status'] = 'was_stopped'
for process in processes:
if process['status'] == 'present':
if psutil.Process(process['pid']).status() == 'stopped':
process['status'] = 'stopped_by_another_process'
else:
try:
_suspend_process_by_pid(process['pid'])
end_time = time() + 60
while True:
if psutil.Process(
process['pid']).status() == 'stopped':
process['status'] = 'stopped'
break
sleep(3)
if time() > end_time:
process['status'] = 'not_stopped'
break
except psutil.NoSuchProcess:
process['status'] = 'killed_by_another_process'
return processes
def _suspend_process_by_pid(pid):
"""
Suspend local process by pid
Arguments:
pid - PID of process to suspend
"""
process = psutil.Process(pid)
process.suspend()
def resume_process(names=None, pids=None):
"""
Resume local processes by names and PIDS.
Arguments:
names - list of processes names to resume;
pids - list of process PIDs to resume;
Return: [{process1_dict}, {process2_dict}, ..., {processN_dict}]
"""
if not names and not pids:
return message_wrapper('Please, provide processes PIDs or names.',
status='error')
pids = parse_pids(pids)
names = parse_args_list(names)
processes = _list_presented_processes(names, pids)
return _resume_process_list(processes)
def _resume_process_list(processes):
"""
Resume local processes by list of processes.
Arguments:
processes - list of dictionaries to resume.
The dictionaries should contain name, pid,
status of process.
Return: [{process1_dict}, {process2_dict}, ..., {processN_dict}]
"""
for process in processes:
if psutil.Process(
process['pid']).status() == 'running' or psutil.Process(
process['pid']).status() == 'sleeping':
process['status'] = 'was_resumed'
for process in processes:
if process['status'] == 'present':
if psutil.Process(
process['pid']).status() == 'running' or psutil.Process(
process['pid']).status() == 'sleeping':
process['status'] = 'resumed_by_another_process'
else:
try:
_resume_process_by_pid(process['pid'])
end_time = time() + 10
while True:
if psutil.Process(process[
'pid']).status() == 'running' or psutil.Process(
process['pid']).status() == 'sleeping':
process['status'] = 'resumed'
break
sleep(3)
if time() > end_time:
process['status'] = 'not_resumed'
break
except psutil.NoSuchProcess:
process['status'] = 'killed_by_another_process'
return processes
def _resume_process_by_pid(pid):
"""
Resume local process by pid
Arguments:
pid - PID of process to resume.
"""
process = psutil.Process(pid)
process.resume()
| |
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Escaping/unescaping methods for HTML, JSON, URLs, and others.
Also includes a few other miscellaneous string manipulation functions that
have crept in over time.
"""
from __future__ import absolute_import, division, print_function, with_statement
import json
import re
import sys
from tornado.util import PY3, unicode_type, basestring_type
if PY3:
from urllib.parse import parse_qs as _parse_qs
import html.entities as htmlentitydefs
import urllib.parse as urllib_parse
unichr = chr
else:
from urlparse import parse_qs as _parse_qs
import htmlentitydefs
import urllib as urllib_parse
_XHTML_ESCAPE_RE = re.compile('[&<>"\']')
_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"',
'\'': '''}
def xhtml_escape(value):
"""Escapes a string so it is valid within HTML or XML.
Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``.
When used in attribute values the escaped strings must be enclosed
in quotes.
.. versionchanged:: 3.2
Added the single quote to the list of escaped characters.
"""
return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)],
to_basestring(value))
def xhtml_unescape(value):
"""Un-escapes an XML-escaped string."""
return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value))
# The fact that json_encode wraps json.dumps is an implementation detail.
# Please see https://github.com/tornadoweb/tornado/pull/706
# before sending a pull request that adds **kwargs to this function.
def json_encode(value):
"""JSON-encodes the given Python object."""
# JSON permits but does not require forward slashes to be escaped.
# This is useful when json data is emitted in a <script> tag
# in HTML, as it prevents </script> tags from prematurely terminating
# the javascript. Some json libraries do this escaping by default,
# although python's standard library does not, so we do it here.
# http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped
return json.dumps(value).replace("</", "<\\/")
def json_decode(value):
"""Returns Python objects for the given JSON string."""
return json.loads(to_basestring(value))
def squeeze(value):
"""Replace all sequences of whitespace chars with a single space."""
return re.sub(r"[\x00-\x20]+", " ", value).strip()
def url_escape(value, plus=True):
"""Returns a URL-encoded version of the given value.
If ``plus`` is true (the default), spaces will be represented
as "+" instead of "%20". This is appropriate for query strings
but not for the path component of a URL. Note that this default
is the reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
quote = urllib_parse.quote_plus if plus else urllib_parse.quote
return quote(utf8(value))
# python 3 changed things around enough that we need two separate
# implementations of url_unescape. We also need our own implementation
# of parse_qs since python 3's version insists on decoding everything.
if not PY3:
def url_unescape(value, encoding='utf-8', plus=True):
"""Decodes the given value from a URL.
The argument may be either a byte or unicode string.
If encoding is None, the result will be a byte string. Otherwise,
the result is a unicode string in the specified encoding.
If ``plus`` is true (the default), plus signs will be interpreted
as spaces (literal plus signs must be represented as "%2B"). This
is appropriate for query strings and form-encoded values but not
for the path component of a URL. Note that this default is the
reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
unquote = (urllib_parse.unquote_plus if plus else urllib_parse.unquote)
if encoding is None:
return unquote(utf8(value))
else:
return unicode_type(unquote(utf8(value)), encoding)
parse_qs_bytes = _parse_qs
else:
def url_unescape(value, encoding='utf-8', plus=True):
"""Decodes the given value from a URL.
The argument may be either a byte or unicode string.
If encoding is None, the result will be a byte string. Otherwise,
the result is a unicode string in the specified encoding.
If ``plus`` is true (the default), plus signs will be interpreted
as spaces (literal plus signs must be represented as "%2B"). This
is appropriate for query strings and form-encoded values but not
for the path component of a URL. Note that this default is the
reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
if encoding is None:
if plus:
# unquote_to_bytes doesn't have a _plus variant
value = to_basestring(value).replace('+', ' ')
return urllib_parse.unquote_to_bytes(value)
else:
unquote = (urllib_parse.unquote_plus if plus
else urllib_parse.unquote)
return unquote(to_basestring(value), encoding=encoding)
def parse_qs_bytes(qs, keep_blank_values=False, strict_parsing=False):
"""Parses a query string like urlparse.parse_qs, but returns the
values as byte strings.
Keys still become type str (interpreted as latin1 in python3!)
because it's too painful to keep them as byte strings in
python3 and in practice they're nearly always ascii anyway.
"""
# This is gross, but python3 doesn't give us another way.
# Latin1 is the universal donor of character encodings.
result = _parse_qs(qs, keep_blank_values, strict_parsing,
encoding='latin1', errors='strict')
encoded = {}
for k, v in result.items():
encoded[k] = [i.encode('latin1') for i in v]
return encoded
_UTF8_TYPES = (bytes, type(None))
def utf8(value):
"""Converts a string argument to a byte string.
If the argument is already a byte string or None, it is returned unchanged.
Otherwise it must be a unicode string and is encoded as utf8.
"""
if isinstance(value, _UTF8_TYPES):
return value
if not isinstance(value, unicode_type):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.encode("utf-8")
_TO_UNICODE_TYPES = (unicode_type, type(None))
def to_unicode(value):
"""Converts a string argument to a unicode string.
If the argument is already a unicode string or None, it is returned
unchanged. Otherwise it must be a byte string and is decoded as utf8.
"""
if isinstance(value, _TO_UNICODE_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8")
# to_unicode was previously named _unicode not because it was private,
# but to avoid conflicts with the built-in unicode() function/type
_unicode = to_unicode
# When dealing with the standard library across python 2 and 3 it is
# sometimes useful to have a direct conversion to the native string type
if str is unicode_type:
native_str = to_unicode
else:
native_str = utf8
_BASESTRING_TYPES = (basestring_type, type(None))
def to_basestring(value):
"""Converts a string argument to a subclass of basestring.
In python2, byte and unicode strings are mostly interchangeable,
so functions that deal with a user-supplied argument in combination
with ascii string constants can use either and should return the type
the user supplied. In python3, the two types are not interchangeable,
so this method is needed to convert byte strings to unicode.
"""
if isinstance(value, _BASESTRING_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8")
def recursive_unicode(obj):
"""Walks a simple data structure, converting byte strings to unicode.
Supports lists, tuples, and dictionaries.
"""
if isinstance(obj, dict):
return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items())
elif isinstance(obj, list):
return list(recursive_unicode(i) for i in obj)
elif isinstance(obj, tuple):
return tuple(recursive_unicode(i) for i in obj)
elif isinstance(obj, bytes):
return to_unicode(obj)
else:
return obj
# I originally used the regex from
# http://daringfireball.net/2010/07/improved_regex_for_matching_urls
# but it gets all exponential on certain patterns (such as too many trailing
# dots), causing the regex matcher to never return.
# This regex should avoid those problems.
# Use to_unicode instead of tornado.util.u - we don't want backslashes getting
# processed as escapes.
_URL_RE = re.compile(to_unicode(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)"""))
def linkify(text, shorten=False, extra_params="",
require_protocol=False, permitted_protocols=["http", "https"]):
"""Converts plain text into HTML with links.
For example: ``linkify("Hello http://tornadoweb.org!")`` would return
``Hello <a href="http://tornadoweb.org">http://tornadoweb.org</a>!``
Parameters:
* ``shorten``: Long urls will be shortened for display.
* ``extra_params``: Extra text to include in the link tag, or a callable
taking the link as an argument and returning the extra text
e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``,
or::
def extra_params_cb(url):
if url.startswith("http://example.com"):
return 'class="internal"'
else:
return 'class="external" rel="nofollow"'
linkify(text, extra_params=extra_params_cb)
* ``require_protocol``: Only linkify urls which include a protocol. If
this is False, urls such as www.facebook.com will also be linkified.
* ``permitted_protocols``: List (or set) of protocols which should be
linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp",
"mailto"])``. It is very unsafe to include protocols such as
``javascript``.
"""
if extra_params and not callable(extra_params):
extra_params = " " + extra_params.strip()
def make_link(m):
url = m.group(1)
proto = m.group(2)
if require_protocol and not proto:
return url # not protocol, no linkify
if proto and proto not in permitted_protocols:
return url # bad protocol, no linkify
href = m.group(1)
if not proto:
href = "http://" + href # no proto specified, use http
if callable(extra_params):
params = " " + extra_params(href).strip()
else:
params = extra_params
# clip long urls. max_len is just an approximation
max_len = 30
if shorten and len(url) > max_len:
before_clip = url
if proto:
proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for :
else:
proto_len = 0
parts = url[proto_len:].split("/")
if len(parts) > 1:
# Grab the whole host part plus the first bit of the path
# The path is usually not that interesting once shortened
# (no more slug, etc), so it really just provides a little
# extra indication of shortening.
url = url[:proto_len] + parts[0] + "/" + \
parts[1][:8].split('?')[0].split('.')[0]
if len(url) > max_len * 1.5: # still too long
url = url[:max_len]
if url != before_clip:
amp = url.rfind('&')
# avoid splitting html char entities
if amp > max_len - 5:
url = url[:amp]
url += "..."
if len(url) >= len(before_clip):
url = before_clip
else:
# full url is visible on mouse-over (for those who don't
# have a status bar, such as Safari by default)
params += ' title="%s"' % href
return u'<a href="%s"%s>%s</a>' % (href, params, url)
# First HTML-escape so that our strings are all safe.
# The regex is modified to avoid character entites other than & so
# that we won't pick up ", etc.
text = _unicode(xhtml_escape(text))
return _URL_RE.sub(make_link, text)
def _convert_entity(m):
if m.group(1) == "#":
try:
if m.group(2)[:1].lower() == 'x':
return unichr(int(m.group(2)[1:], 16))
else:
return unichr(int(m.group(2)))
except ValueError:
return "&#%s;" % m.group(2)
try:
return _HTML_UNICODE_MAP[m.group(2)]
except KeyError:
return "&%s;" % m.group(2)
def _build_unicode_map():
unicode_map = {}
for name, value in htmlentitydefs.name2codepoint.items():
unicode_map[name] = unichr(value)
return unicode_map
_HTML_UNICODE_MAP = _build_unicode_map()
| |
""" ENTITY_SENSOR_MIB
This module defines Entity MIB extensions for physical
sensors.
Copyright (C) The Internet Society (2002). This version
of this MIB module is part of RFC 3433; see the RFC
itself for full legal notices.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class EntitysensordatascaleEnum(Enum):
"""
EntitysensordatascaleEnum
An object using this data type represents a data scaling
factor, represented with an International System of Units
(SI) prefix. The actual data units are determined by
examining an object of this type together with the
associated EntitySensorDataType object.
An object of this type SHOULD be defined together with
objects of type EntitySensorDataType and
EntitySensorPrecision. Together, associated objects of
these three types are used to identify the semantics of an
object of type EntitySensorValue.
.. data:: yocto = 1
.. data:: zepto = 2
.. data:: atto = 3
.. data:: femto = 4
.. data:: pico = 5
.. data:: nano = 6
.. data:: micro = 7
.. data:: milli = 8
.. data:: units = 9
.. data:: kilo = 10
.. data:: mega = 11
.. data:: giga = 12
.. data:: tera = 13
.. data:: exa = 14
.. data:: peta = 15
.. data:: zetta = 16
.. data:: yotta = 17
"""
yocto = 1
zepto = 2
atto = 3
femto = 4
pico = 5
nano = 6
micro = 7
milli = 8
units = 9
kilo = 10
mega = 11
giga = 12
tera = 13
exa = 14
peta = 15
zetta = 16
yotta = 17
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _ENTITY_SENSOR_MIB as meta
return meta._meta_table['EntitysensordatascaleEnum']
class EntitysensordatatypeEnum(Enum):
"""
EntitysensordatatypeEnum
An object using this data type represents the Entity Sensor
measurement data type associated with a physical sensor
value. The actual data units are determined by examining an
object of this type together with the associated
EntitySensorDataScale object.
An object of this type SHOULD be defined together with
objects of type EntitySensorDataScale and
EntitySensorPrecision. Together, associated objects of
these three types are used to identify the semantics of an
object of type EntitySensorValue.
Valid values are\:
other(1)\: a measure other than those listed below
unknown(2)\: unknown measurement, or arbitrary,
relative numbers
voltsAC(3)\: electric potential
voltsDC(4)\: electric potential
amperes(5)\: electric current
watts(6)\: power
hertz(7)\: frequency
celsius(8)\: temperature
percentRH(9)\: percent relative humidity
rpm(10)\: shaft revolutions per minute
cmm(11),\: cubic meters per minute (airflow)
truthvalue(12)\: value takes { true(1), false(2) }
.. data:: other = 1
.. data:: unknown = 2
.. data:: voltsAC = 3
.. data:: voltsDC = 4
.. data:: amperes = 5
.. data:: watts = 6
.. data:: hertz = 7
.. data:: celsius = 8
.. data:: percentRH = 9
.. data:: rpm = 10
.. data:: cmm = 11
.. data:: truthvalue = 12
"""
other = 1
unknown = 2
voltsAC = 3
voltsDC = 4
amperes = 5
watts = 6
hertz = 7
celsius = 8
percentRH = 9
rpm = 10
cmm = 11
truthvalue = 12
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _ENTITY_SENSOR_MIB as meta
return meta._meta_table['EntitysensordatatypeEnum']
class EntitysensorstatusEnum(Enum):
"""
EntitysensorstatusEnum
An object using this data type represents the operational
status of a physical sensor.
The value 'ok(1)' indicates that the agent can obtain the
sensor value.
The value 'unavailable(2)' indicates that the agent
presently cannot obtain the sensor value.
The value 'nonoperational(3)' indicates that the agent
believes the sensor is broken. The sensor could have a hard
failure (disconnected wire), or a soft failure such as out\-
of\-range, jittery, or wildly fluctuating readings.
.. data:: ok = 1
.. data:: unavailable = 2
.. data:: nonoperational = 3
"""
ok = 1
unavailable = 2
nonoperational = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _ENTITY_SENSOR_MIB as meta
return meta._meta_table['EntitysensorstatusEnum']
class EntitySensorMib(object):
"""
.. attribute:: entphysensortable
This table contains one row per physical sensor represented by an associated row in the entPhysicalTable
**type**\: :py:class:`Entphysensortable <ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB.EntitySensorMib.Entphysensortable>`
"""
_prefix = 'ENTITY-SENSOR-MIB'
_revision = '2002-12-16'
def __init__(self):
self.entphysensortable = EntitySensorMib.Entphysensortable()
self.entphysensortable.parent = self
class Entphysensortable(object):
"""
This table contains one row per physical sensor represented
by an associated row in the entPhysicalTable.
.. attribute:: entphysensorentry
Information about a particular physical sensor. An entry in this table describes the present reading of a sensor, the measurement units and scale, and sensor operational status. Entries are created in this table by the agent. An entry for each physical sensor SHOULD be created at the same time as the associated entPhysicalEntry. An entry SHOULD be destroyed if the associated entPhysicalEntry is destroyed
**type**\: list of :py:class:`Entphysensorentry <ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB.EntitySensorMib.Entphysensortable.Entphysensorentry>`
"""
_prefix = 'ENTITY-SENSOR-MIB'
_revision = '2002-12-16'
def __init__(self):
self.parent = None
self.entphysensorentry = YList()
self.entphysensorentry.parent = self
self.entphysensorentry.name = 'entphysensorentry'
class Entphysensorentry(object):
"""
Information about a particular physical sensor.
An entry in this table describes the present reading of a
sensor, the measurement units and scale, and sensor
operational status.
Entries are created in this table by the agent. An entry
for each physical sensor SHOULD be created at the same time
as the associated entPhysicalEntry. An entry SHOULD be
destroyed if the associated entPhysicalEntry is destroyed.
.. attribute:: entphysicalindex <key>
**type**\: int
**range:** 1..2147483647
**refers to**\: :py:class:`entphysicalindex <ydk.models.cisco_ios_xe.ENTITY_MIB.EntityMib.Entphysicaltable.Entphysicalentry>`
.. attribute:: entphysensoroperstatus
The operational status of the sensor
**type**\: :py:class:`EntitysensorstatusEnum <ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB.EntitysensorstatusEnum>`
.. attribute:: entphysensorprecision
The number of decimal places of precision in fixed\-point sensor values returned by the associated entPhySensorValue object. This object SHOULD be set to '0' when the associated entPhySensorType value is not a fixed\-point type\: e.g., 'percentRH(9)', 'rpm(10)', 'cmm(11)', or 'truthvalue(12)'. This object SHOULD be set by the agent during entry creation, and the value SHOULD NOT change during operation
**type**\: int
**range:** \-8..9
.. attribute:: entphysensorscale
The exponent to apply to values returned by the associated entPhySensorValue object. This object SHOULD be set by the agent during entry creation, and the value SHOULD NOT change during operation
**type**\: :py:class:`EntitysensordatascaleEnum <ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB.EntitysensordatascaleEnum>`
.. attribute:: entphysensortype
The type of data returned by the associated entPhySensorValue object. This object SHOULD be set by the agent during entry creation, and the value SHOULD NOT change during operation
**type**\: :py:class:`EntitysensordatatypeEnum <ydk.models.cisco_ios_xe.ENTITY_SENSOR_MIB.EntitysensordatatypeEnum>`
.. attribute:: entphysensorunitsdisplay
A textual description of the data units that should be used in the display of entPhySensorValue
**type**\: str
.. attribute:: entphysensorvalue
The most recent measurement obtained by the agent for this sensor. To correctly interpret the value of this object, the associated entPhySensorType, entPhySensorScale, and entPhySensorPrecision objects must also be examined
**type**\: int
**range:** \-1000000000..1073741823
.. attribute:: entphysensorvaluetimestamp
The value of sysUpTime at the time the status and/or value of this sensor was last obtained by the agent
**type**\: int
**range:** 0..4294967295
.. attribute:: entphysensorvalueupdaterate
An indication of the frequency that the agent updates the associated entPhySensorValue object, representing in milliseconds. The value zero indicates\: \- the sensor value is updated on demand (e.g., when polled by the agent for a get\-request), \- the sensor value is updated when the sensor value changes (event\-driven), \- the agent does not know the update rate
**type**\: int
**range:** 0..4294967295
**units**\: milliseconds
"""
_prefix = 'ENTITY-SENSOR-MIB'
_revision = '2002-12-16'
def __init__(self):
self.parent = None
self.entphysicalindex = None
self.entphysensoroperstatus = None
self.entphysensorprecision = None
self.entphysensorscale = None
self.entphysensortype = None
self.entphysensorunitsdisplay = None
self.entphysensorvalue = None
self.entphysensorvaluetimestamp = None
self.entphysensorvalueupdaterate = None
@property
def _common_path(self):
if self.entphysicalindex is None:
raise YPYModelError('Key property entphysicalindex is None')
return '/ENTITY-SENSOR-MIB:ENTITY-SENSOR-MIB/ENTITY-SENSOR-MIB:entPhySensorTable/ENTITY-SENSOR-MIB:entPhySensorEntry[ENTITY-SENSOR-MIB:entPhysicalIndex = ' + str(self.entphysicalindex) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.entphysicalindex is not None:
return True
if self.entphysensoroperstatus is not None:
return True
if self.entphysensorprecision is not None:
return True
if self.entphysensorscale is not None:
return True
if self.entphysensortype is not None:
return True
if self.entphysensorunitsdisplay is not None:
return True
if self.entphysensorvalue is not None:
return True
if self.entphysensorvaluetimestamp is not None:
return True
if self.entphysensorvalueupdaterate is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _ENTITY_SENSOR_MIB as meta
return meta._meta_table['EntitySensorMib.Entphysensortable.Entphysensorentry']['meta_info']
@property
def _common_path(self):
return '/ENTITY-SENSOR-MIB:ENTITY-SENSOR-MIB/ENTITY-SENSOR-MIB:entPhySensorTable'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.entphysensorentry is not None:
for child_ref in self.entphysensorentry:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _ENTITY_SENSOR_MIB as meta
return meta._meta_table['EntitySensorMib.Entphysensortable']['meta_info']
@property
def _common_path(self):
return '/ENTITY-SENSOR-MIB:ENTITY-SENSOR-MIB'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.entphysensortable is not None and self.entphysensortable._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _ENTITY_SENSOR_MIB as meta
return meta._meta_table['EntitySensorMib']['meta_info']
| |
# This file is part of beets.
# Copyright 2012, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Fetches, embeds, and displays lyrics.
"""
from __future__ import print_function
import urllib
import re
import logging
from beets.plugins import BeetsPlugin
from beets import ui
from beets.ui import commands
# Global logger.
log = logging.getLogger('beets')
# Lyrics scrapers.
COMMENT_RE = re.compile(r'<!--.*-->', re.S)
DIV_RE = re.compile(r'<(/?)div>?')
TAG_RE = re.compile(r'<[^>]*>')
BREAK_RE = re.compile(r'<br\s*/?>')
def fetch_url(url):
"""Retrieve the content at a given URL, or return None if the source
is unreachable.
"""
try:
return urllib.urlopen(url).read()
except IOError as exc:
log.debug('failed to fetch: {0} ({1})'.format(url, str(exc)))
return None
def unescape(text):
"""Resolves &#xxx; HTML entities (and some others)."""
out = text.replace(' ', ' ')
def replchar(m):
num = m.group(1)
return unichr(int(num))
out = re.sub("&#(\d+);", replchar, out)
return out
def extract_text(html, starttag):
"""Extract the text from a <DIV> tag in the HTML starting with
``starttag``. Returns None if parsing fails.
"""
# Strip off the leading text before opening tag.
try:
_, html = html.split(starttag, 1)
except ValueError:
return
# Walk through balanced DIV tags.
level = 0
parts = []
pos = 0
for match in DIV_RE.finditer(html):
if match.group(1): # Closing tag.
level -= 1
if level == 0:
pos = match.end()
else: # Opening tag.
if level == 0:
parts.append(html[pos:match.start()])
level += 1
if level == -1:
parts.append(html[pos:match.start()])
break
else:
print('no closing tag found!')
return
lyrics = ''.join(parts)
# Strip cruft.
lyrics = COMMENT_RE.sub('', lyrics)
lyrics = unescape(lyrics)
lyrics = re.sub(r'\s+', ' ', lyrics) # Whitespace collapse.
lyrics = BREAK_RE.sub('\n', lyrics) # <BR> newlines.
lyrics = re.sub(r'\n +', '\n', lyrics)
lyrics = re.sub(r' +\n', '\n', lyrics)
lyrics = TAG_RE.sub('', lyrics) # Strip remaining HTML tags.
lyrics = lyrics.strip()
return lyrics
LYRICSWIKI_URL_PATTERN = 'http://lyrics.wikia.com/%s:%s'
def _lw_encode(s):
s = re.sub(r'\s+', '_', s)
s = s.replace("<", "Less_Than")
s = s.replace(">", "Greater_Than")
s = s.replace("#", "Number_")
s = re.sub(r'[\[\{]', '(', s)
s = re.sub(r'[\]\}]', ')', s)
if isinstance(s, unicode):
s = s.encode('utf8', 'ignore')
return urllib.quote(s)
def fetch_lyricswiki(artist, title):
"""Fetch lyrics from LyricsWiki."""
url = LYRICSWIKI_URL_PATTERN % (_lw_encode(artist), _lw_encode(title))
html = fetch_url(url)
if not html:
return
lyrics = extract_text(html, "<div class='lyricbox'>")
if lyrics and 'Unfortunately, we are not licensed' not in lyrics:
return lyrics
LYRICSCOM_URL_PATTERN = 'http://www.lyrics.com/%s-lyrics-%s.html'
def _lc_encode(s):
s = re.sub(r'\s+', '-', s)
if isinstance(s, unicode):
s = s.encode('utf8', 'ignore')
return urllib.quote(s)
def fetch_lyricscom(artist, title):
"""Fetch lyrics from Lyrics.com."""
url = LYRICSCOM_URL_PATTERN % (_lc_encode(title), _lc_encode(artist))
html = fetch_url(url)
if not html:
return
lyrics = extract_text(html, '<div id="lyric_space">')
if lyrics and 'Sorry, we do not have the lyric' not in lyrics:
parts = lyrics.split('\n---\nLyrics powered by', 1)
if parts:
return parts[0]
BACKENDS = [fetch_lyricswiki, fetch_lyricscom]
def get_lyrics(artist, title):
"""Fetch lyrics, trying each source in turn."""
for backend in BACKENDS:
lyrics = backend(artist, title)
if lyrics:
if isinstance(lyrics, str):
lyrics = lyrics.decode('utf8', 'ignore')
return lyrics
# Plugin logic.
def fetch_item_lyrics(lib, loglevel, item, write):
"""Fetch and store lyrics for a single item. If ``write``, then the
lyrics will also be written to the file itself. The ``loglevel``
parameter controls the visibility of the function's status log
messages.
"""
# Skip if the item already has lyrics.
if item.lyrics:
log.log(loglevel, u'lyrics already present: %s - %s' %
(item.artist, item.title))
return
# Fetch lyrics.
lyrics = get_lyrics(item.artist, item.title)
if not lyrics:
log.log(loglevel, u'lyrics not found: %s - %s' %
(item.artist, item.title))
return
log.log(loglevel, u'fetched lyrics: %s - %s' %
(item.artist, item.title))
item.lyrics = lyrics
if write:
item.write()
lib.store(item)
AUTOFETCH = True
class LyricsPlugin(BeetsPlugin):
def __init__(self):
super(LyricsPlugin, self).__init__()
self.import_stages = [self.imported]
def commands(self):
cmd = ui.Subcommand('lyrics', help='fetch song lyrics')
cmd.parser.add_option('-p', '--print', dest='printlyr',
action='store_true', default=False,
help='print lyrics to console')
def func(lib, config, opts, args):
# The "write to files" option corresponds to the
# import_write config value.
write = ui.config_val(config, 'beets', 'import_write',
commands.DEFAULT_IMPORT_WRITE, bool)
for item in lib.items(ui.decargs(args)):
fetch_item_lyrics(lib, logging.INFO, item, write)
if opts.printlyr and item.lyrics:
ui.print_(item.lyrics)
cmd.func = func
return [cmd]
def configure(self, config):
global AUTOFETCH
AUTOFETCH = ui.config_val(config, 'lyrics', 'autofetch', True, bool)
# Auto-fetch lyrics on import.
def imported(self, config, task):
if AUTOFETCH:
for item in task.all_items():
fetch_item_lyrics(config.lib, logging.DEBUG, item, False)
| |
# coding: utf-8
import os
import time
import json
from picklefield import PickledObjectField
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.html import escape
from madrona.common.utils import asKml
from madrona.common.jsonutils import get_properties_json, get_feature_json
from madrona.features import register
from madrona.analysistools.models import Analysis
from general.utils import miles_to_meters, feet_to_meters, meters_to_feet, mph_to_mps, mps_to_mph, format_precision
from django.contrib.gis.geos import MultiPolygon
from django.contrib.gis.db.models.aggregates import Union
from django.forms.models import model_to_dict
from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
# Make sure all users are added to the public group
@receiver(post_save, sender=User, dispatch_uid='update_groups')
def userSaveCallback(sender, **kwargs):
new_changes = False
public_groups = Group.objects.filter(name="Share with Public")
user = kwargs['instance']
if len(public_groups) != 1:
public_group = False
else:
public_group = public_groups[0]
staff_groups = Group.objects.filter(name="Share with Staff")
if len(staff_groups) != 1:
staff_group = False
else:
staff_group = staff_groups[0]
try:
if public_group and not public_group in user.groups.all():
user.groups.add(public_group)
new_changes = True
if user.is_staff and staff_group and not staff_group in user.groups.all():
user.groups.add(staff_group)
new_changes = True
except ValueError:
pass
if new_changes:
user.save()
@register
class Scenario(Analysis):
LIFESTAGE_CHOICES = (
('adults', 'Adults'),
('juveniles', 'Juveniles'),
('eggs', 'Eggs'),
('larvae', 'Larvae')
)
species = models.BooleanField(default=None)
species_input = models.CharField(max_length=255, blank=True, null=True)
lifestage = models.BooleanField(default=None)
lifestage_input = models.CharField(max_length=30, blank=True, null=True, choices=LIFESTAGE_CHOICES)
min_fthm = models.BooleanField(default=None)
mean_fthm = models.BooleanField(default=None)
mean_fthm_min = models.FloatField(null=True, blank=True)
mean_fthm_max = models.FloatField(null=True, blank=True)
max_fthm = models.BooleanField(default=None)
min_meter = models.BooleanField(default=None)
mean_meter = models.BooleanField(default=None)
mean_meter_min = models.IntegerField(null=True, blank=True)
mean_meter_max = models.IntegerField(null=True, blank=True)
max_meter = models.BooleanField(default=None)
sft_sub_m2 = models.BooleanField(default=None)
sft_sub_m2_input = models.TextField(null=True, blank=True)
mix_sub_m2 = models.BooleanField(default=None)
mix_sub_m2_input = models.TextField(null=True, blank=True)
hrd_sub_m2 = models.BooleanField(default=None)
hrd_sub_m2_input = models.TextField(null=True, blank=True)
rck_sub_m2 = models.BooleanField(default=None)
rck_sub_m2_input = models.TextField(null=True, blank=True)
cnt_cs = models.BooleanField(default=None)
cnt_cs_input = models.TextField(null=True, blank=True)
cnt_penn = models.BooleanField(default=None)
cnt_penn_input = models.TextField(null=True, blank=True)
ra_cs = models.BooleanField(default=None)
cs_obs = models.BooleanField(default=None)
cs_obs_input = models.TextField(null=True, blank=True)
cs_spm = models.BooleanField(default=None)
cs_spm_input = models.TextField(null=True, blank=True)
cs3500_obs = models.BooleanField(default=None)
cs3500_obs_input = models.TextField(null=True, blank=True)
cs3500_spm = models.BooleanField(default=None)
cs3500_spm_input = models.TextField(null=True, blank=True)
ra_penn = models.BooleanField(default=None)
hsalcy1_m2 = models.BooleanField(default=None)
hsalcy2_m2 = models.BooleanField(default=None)
hsalcy3_m2 = models.BooleanField(default=None)
hsalcy4_m2 = models.BooleanField(default=None)
hsall_m2 = models.BooleanField(default=False)
hsall_m2_min = models.FloatField(null=True, blank=True)
hsall_m2_checkboxes = models.TextField(null=True, blank=True, default=None)
hsall1_m2 = models.BooleanField(default=None)
hsall1_m2_min = models.FloatField(null=True, blank=True)
hsall1_m2_max = models.FloatField(null=True, blank=True)
hsall2_m2 = models.BooleanField(default=None)
hsall2_m2_min = models.FloatField(null=True, blank=True)
hsall2_m2_max = models.FloatField(null=True, blank=True)
hsall3_m2 = models.BooleanField(default=None)
hsall3_m2_min = models.FloatField(null=True, blank=True)
hsall3_m2_max = models.FloatField(null=True, blank=True)
hsall4_m2 = models.BooleanField(default=None)
hsall4_m2_min = models.FloatField(null=True, blank=True)
hsall4_m2_max = models.FloatField(null=True, blank=True)
hsanti1_m2 = models.BooleanField(default=None)
hsanti2_m2 = models.BooleanField(default=None)
hsanti3_m2 = models.BooleanField(default=None)
hsanti4_m2 = models.BooleanField(default=None)
hscalc1_m2 = models.BooleanField(default=None)
hscalc2_m2 = models.BooleanField(default=None)
hscalc3_m2 = models.BooleanField(default=None)
hscalc4_m2 = models.BooleanField(default=None)
hshola1_m2 = models.BooleanField(default=None)
hshola2_m2 = models.BooleanField(default=None)
hshola3_m2 = models.BooleanField(default=None)
hshola4_m2 = models.BooleanField(default=None)
hssclr1_m2 = models.BooleanField(default=None)
hssclr2_m2 = models.BooleanField(default=None)
hssclr3_m2 = models.BooleanField(default=None)
hssclr4_m2 = models.BooleanField(default=None)
hssclx1_m2 = models.BooleanField(default=None)
hssclx2_m2 = models.BooleanField(default=None)
hssclx3_m2 = models.BooleanField(default=None)
hssclx4_m2 = models.BooleanField(default=None)
hpc_est_m2 = models.BooleanField(default=None)
hpc_est_m2_min = models.FloatField(null=True, blank=True)
hpc_est_m2_max = models.FloatField(null=True, blank=True)
hpc_klp_m2 = models.BooleanField(default=None)
hpc_klp_m2_min = models.FloatField(null=True, blank=True)
hpc_klp_m2_max = models.FloatField(null=True, blank=True)
hpc_rck_m2 = models.BooleanField(default=None)
hpc_rck_m2_min = models.FloatField(null=True, blank=True)
hpc_rck_m2_max = models.FloatField(null=True, blank=True)
hpc_sgr_m2 = models.BooleanField(default=None)
hpc_sgr_m2_min = models.FloatField(null=True, blank=True)
hpc_sgr_m2_max = models.FloatField(null=True, blank=True)
description = models.TextField(null=True, blank=True)
satisfied = models.BooleanField(default=True, help_text="Am I satisfied?")
active = models.BooleanField(default=True)
grid_cells = models.TextField(verbose_name='Grid Cell IDs', null=True, blank=True)
geometry_final_area = models.FloatField(verbose_name='Total Area', null=True, blank=True)
geometry_dissolved = models.MultiPolygonField(srid=settings.GEOMETRY_DB_SRID, null=True, blank=True, verbose_name="Filter result dissolved")
# format attribute JSON and handle None values
def get_min_max_attributes(self, title, min_val, max_val, units):
if isinstance(min_val, (int, long)):
min_val_str = str(int(min_val))
else:
min_val_str = str(min_val)
if isinstance(max_val, (int, long)):
max_val_str = str(int(max_val))
else:
max_val_str = str(max_val)
attribute = {
'title': title,
'data': min_val_str + ' to ' + max_val_str + ' ' + units
}
return attribute
# format attribute JSON and handle None values
def get_min_attributes(self, title, min_val, units):
if isinstance(min_val, (int, long)):
min_val_str = str(int(min_val))
else:
min_val_str = str(min_val)
attribute = {
'title': title,
'data': 'At least ' + min_val_str + ' ' + units
}
return attribute
@property
def serialize_attributes(self):
"""
Return attributes in text format. Used to display information on click in the planner.
"""
attributes = []
#generic
if self.description:
attributes.append({
'title': 'Description',
'data': self.description
})
# Step 1
if self.species:
if self.lifestage:
if self.species_input is not None and self.lifestage_input is not None:
title = '%s %ss present' % (self.species_input.capitalize(), self.lifestage_input.capitalize())
attributes.append({
'title': title,
'data': ''
})
elif self.species_input is not None:
title = '%s present' % self.species_input.capitalize()
attributes.append({
'title': title,
'data': ''
})
if self.mean_fthm:
attributes.append(
self.get_min_max_attributes(
'Depth Range',
self.mean_fthm_min,
self.mean_fthm_max,
'fathoms'
)
)
if self.hsall_m2 and self.hsall_m2_checkboxes:
attributes.append({
'title': 'Predicted Presence of Deep Sea Coral Habitat Classes',
'data': ', '.join(eval(self.hsall_m2_checkboxes))
})
if self.hsall1_m2:
attributes.append(
self.get_min_attributes(
'Predicted Class 1 Deep Sea Coral Habitat',
float(format_precision(self.hsall1_m2_min / 2590000.0, 2)),
'mi<sup>2</sup>'
)
)
# float(format_precision(self.hsall1_m2_min / 1000000, 2)),
# 'km<sup>2</sup>'
if self.hsall2_m2:
attributes.append(
self.get_min_attributes(
'Predicted Class 2 Deep Sea Coral Habitat',
float(format_precision(self.hsall2_m2_min / 2590000.0, 2)),
'mi<sup>2</sup>'
)
)
if self.hsall3_m2:
attributes.append(
self.get_min_attributes(
'Predicted Class 3 Deep Sea Coral Habitat',
float(format_precision(self.hsall3_m2_min / 2590000.0, 2)),
'mi<sup>2</sup>'
)
)
if self.hsall4_m2:
attributes.append(
self.get_min_attributes(
'Predicted Class 4 Deep Sea Coral Habitat',
float(format_precision(self.hsall4_m2_min / 2590000.0, 2)),
'mi<sup>2</sup>'
)
)
# NOTE: The following 4 'm2' fields actually have sq miles for units,
# not sq meters. This is just easier since it's what the user inputs
# so we don't need to run a separate filter or convert user input
# before the filter.
if self.hpc_est_m2:
attributes.append(
self.get_min_attributes(
'Estuary Habitat',
self.hpc_est_m2_min,
'mi<sup>2</sup>'
)
)
if self.hpc_klp_m2:
attributes.append(
self.get_min_attributes(
'Kelp Habitat',
self.hpc_klp_m2_min,
'mi<sup>2</sup>'
)
)
if self.hpc_rck_m2:
attributes.append(
self.get_min_attributes(
'Rocky Reef Habitat',
self.hpc_rck_m2_min,
'mi<sup>2</sup>'
)
)
if self.hpc_sgr_m2:
attributes.append(
self.get_min_attributes(
'Seagrass Habitat',
self.hpc_sgr_m2_min,
'mi<sup>2</sup>'
)
)
# Step 2
if self.sft_sub_m2:
if self.sft_sub_m2_input == 'Y':
title = 'Contains soft substrate'
else:
title = 'Does not contain any soft substrate'
attributes.append({
'title': title,
'data': ''
})
if self.mix_sub_m2:
if self.mix_sub_m2_input == 'Y':
title = 'Contains mixed substrate'
else:
title = 'Does not contain any mixed substrate'
attributes.append({
'title': title,
'data': ''
})
if self.hrd_sub_m2:
if self.hrd_sub_m2_input == 'Y':
title = 'Contains hard substrate'
else:
title = 'Does not contain any hard substrate'
attributes.append({
'title': title,
'data': ''
})
if self.rck_sub_m2:
if self.rck_sub_m2_input == 'Y':
title = 'Contains inferred rock substrate'
else:
title = 'Does not contain any inferred rock substrate'
attributes.append({
'title': title,
'data': ''
})
if self.cnt_cs:
if self.cnt_cs_input == 'Y':
title = 'Contains coral and/or sponges'
else:
title = 'Not known to contain coral or sponges'
attributes.append({
'title': title,
'data': ''
})
if self.cnt_penn:
if self.cnt_penn_input == 'Y':
title = 'Contains pennatulids (sea pen/sea whip)'
else:
title = 'Not known to contain pennatulids'
attributes.append({
'title': title,
'data': ''
})
# if self.cs_obs:
# if self.cs_obs_input == 'Y':
# title = 'Contains observed corals and/or sponges'
# else:
# title = 'No corals and/or sponges observed'
# attributes.append({
# 'title': title,
# 'data': ''
# })
attributes.append({'title': 'Number of Grid Cells',
'data': '{:,}'.format(self.grid_cells.count(',')+1)})
return { 'event': 'click', 'attributes': attributes }
def geojson(self, srid):
props = get_properties_json(self)
props['absolute_url'] = self.get_absolute_url()
json_geom = self.geometry_dissolved.transform(srid, clone=True).json
return get_feature_json(json_geom, json.dumps(props))
def run(self):
# placing this import here to avoid circular dependency with views.py
from views import run_filter_query
(query, notes) = run_filter_query(model_to_dict(self))
if len(query) == 0:
self.satisfied = False;
# raise Exception("No lease blocks available with the current filters.")
dissolved_geom = query.aggregate(Union('geometry'))
if dissolved_geom['geometry__union']:
dissolved_geom = dissolved_geom['geometry__union']
else:
raise Exception("No lease blocks available with the current filters.")
if type(dissolved_geom) == MultiPolygon:
self.geometry_dissolved = dissolved_geom
else:
self.geometry_dissolved = MultiPolygon(dissolved_geom, srid=dissolved_geom.srid)
self.active = True # ??
# import datetime
# start=datetime.datetime.now()
self.geometry_final_area = self.geometry_dissolved.area
self.grid_cells = ','.join(str(i)
for i in query.values_list('id', flat=True))
# print("Elapsed:", datetime.datetime.now() - start)
if self.grid_cells == '':
self.satisfied = False
else:
self.satisfied = True
return True
def save(self, rerun=None, *args, **kwargs):
if rerun is None and self.pk is None:
rerun = True
if rerun is None and self.pk is not None: #if editing a scenario and no value for rerun is given
rerun = False
if not rerun:
orig = Scenario.objects.get(pk=self.pk)
#TODO: keeping this in here til I figure out why self.grid_cells and self.geometry_final_area are emptied when run() is not called
rerun = True
if not rerun:
for f in Scenario.input_fields():
# Is original value different from form value?
if getattr(orig, f.name) != getattr(self, f.name):
#print 'input_field, %s, has changed' %f.name
rerun = True
break
if not rerun:
'''
the substrates need to be grabbed, then saved, then grabbed again because
both getattr calls (orig and self) return the same original list until the model has been saved
(perhaps because form.save_m2m has to be called), after which calls to getattr will
return the same list (regardless of whether we use orig or self)
'''
orig_weas = set(getattr(self, 'input_wea').all())
orig_substrates = set(getattr(self, 'input_substrate').all())
orig_sediments = set(getattr(self, 'input_sediment').all())
super(Scenario, self).save(rerun=False, *args, **kwargs)
new_weas = set(getattr(self, 'input_wea').all())
new_substrates = set(getattr(self, 'input_substrate').all())
new_sediments = set(getattr(self, 'input_sediment').all())
if orig_substrates != new_substrates or orig_sediments != new_sediments or orig_weas != new_weas:
rerun = True
super(Scenario, self).save(rerun=rerun, *args, **kwargs)
else: #editing a scenario and rerun is provided
super(Scenario, self).save(rerun=rerun, *args, **kwargs)
def __unicode__(self):
return u'%s' % self.name
def support_filename(self):
return os.path.basename(self.support_file.name)
@classmethod
def mapnik_geomfield(self):
return "output_geom"
@classmethod
def mapnik_style(self):
import mapnik
polygon_style = mapnik.Style()
ps = mapnik.PolygonSymbolizer(mapnik.Color('#ffffff'))
ps.fill_opacity = 0.5
ls = mapnik.LineSymbolizer(mapnik.Color('#555555'),0.75)
ls.stroke_opacity = 0.5
r = mapnik.Rule()
r.symbols.append(ps)
r.symbols.append(ls)
polygon_style.rules.append(r)
return polygon_style
@classmethod
def input_parameter_fields(klass):
return [f for f in klass._meta.fields if f.attname.startswith('input_parameter_')]
@classmethod
def input_filter_fields(klass):
return [f for f in klass._meta.fields if f.attname.startswith('input_filter_')]
@property
def grid_cells_set(self):
if len(self.grid_cells) == 0: #empty result
gridcell_ids = []
else:
gridcell_ids = [int(id) for id in self.grid_cells.split(',')]
gridcells = GridCell.objects.filter(pk__in=gridcell_ids)
return gridcells
@property
def num_lease_blocks(self):
if self.grid_cells == '':
return 0
return len(self.grid_cells.split(','))
@property
def geometry_is_empty(self):
return len(self.grid_cells) == 0
@property
def input_wea_names(self):
return [wea.wea_name for wea in self.input_wea.all()]
@property
def input_substrate_names(self):
return [substrate.substrate_name for substrate in self.input_substrate.all()]
@property
def input_sediment_names(self):
return [sediment.sediment_name for sediment in self.input_sediment.all()]
#TODO: is this being used...? Yes, see show.html
@property
def has_wind_energy_criteria(self):
wind_parameters = Scenario.input_parameter_fields()
for wp in wind_parameters:
if getattr(self, wp.name):
return True
return False
@property
def has_shipping_filters(self):
shipping_filters = Scenario.input_filter_fields()
for sf in shipping_filters:
if getattr(self, sf.name):
return True
return False
@property
def has_military_filters(self):
return False
@property
def color(self):
try:
return Objective.objects.get(pk=self.input_objectives.values_list()[0][0]).color
except:
return '778B1A55'
@property
def get_id(self):
return self.id
class Options:
verbose_name = 'Spatial Design for Wind Energy'
icon_url = 'marco/img/multi.png'
form = 'scenarios.forms.ScenarioForm'
form_template = 'scenario/form.html'
show_template = 'scenario/show.html'
class GridCell(models.Model):
min_fthm = models.IntegerField(null=True, blank=True)
mean_fthm = models.IntegerField(null=True, blank=True)
max_fthm = models.IntegerField(null=True, blank=True)
min_meter = models.IntegerField(null=True, blank=True)
mean_meter = models.IntegerField(null=True, blank=True)
max_meter = models.IntegerField(null=True, blank=True)
sft_sub_m2 = models.IntegerField(null=True, blank=True)
mix_sub_m2 = models.IntegerField(null=True, blank=True)
hrd_sub_m2 = models.IntegerField(null=True, blank=True)
rck_sub_m2 = models.IntegerField(null=True, blank=True)
cnt_cs = models.IntegerField(null=True, blank=True)
cnt_penn = models.IntegerField(null=True, blank=True)
ra_cs = models.IntegerField(null=True, blank=True)
ra_penn = models.IntegerField(null=True, blank=True)
hsalcy1_m2 = models.IntegerField(null=True, blank=True)
hsalcy2_m2 = models.IntegerField(null=True, blank=True)
hsalcy3_m2 = models.IntegerField(null=True, blank=True)
hsalcy4_m2 = models.IntegerField(null=True, blank=True)
hsall1_m2 = models.IntegerField(null=True, blank=True)
hsall2_m2 = models.IntegerField(null=True, blank=True)
hsall3_m2 = models.IntegerField(null=True, blank=True)
hsall4_m2 = models.IntegerField(null=True, blank=True)
hsanti1_m2 = models.IntegerField(null=True, blank=True)
hsanti2_m2 = models.IntegerField(null=True, blank=True)
hsanti3_m2 = models.IntegerField(null=True, blank=True)
hsanti4_m2 = models.IntegerField(null=True, blank=True)
hscalc1_m2 = models.IntegerField(null=True, blank=True)
hscalc2_m2 = models.IntegerField(null=True, blank=True)
hscalc3_m2 = models.IntegerField(null=True, blank=True)
hscalc4_m2 = models.IntegerField(null=True, blank=True)
hshola1_m2 = models.IntegerField(null=True, blank=True)
hshola2_m2 = models.IntegerField(null=True, blank=True)
hshola3_m2 = models.IntegerField(null=True, blank=True)
hshola4_m2 = models.IntegerField(null=True, blank=True)
hssclr1_m2 = models.IntegerField(null=True, blank=True)
hssclr2_m2 = models.IntegerField(null=True, blank=True)
hssclr3_m2 = models.IntegerField(null=True, blank=True)
hssclr4_m2 = models.IntegerField(null=True, blank=True)
hssclx1_m2 = models.IntegerField(null=True, blank=True)
hssclx2_m2 = models.IntegerField(null=True, blank=True)
hssclx3_m2 = models.IntegerField(null=True, blank=True)
hssclx4_m2 = models.IntegerField(null=True, blank=True)
hpc_est_m2 = models.IntegerField(null=True, blank=True)
hpc_klp_m2 = models.IntegerField(null=True, blank=True)
hpc_rck_m2 = models.IntegerField(null=True, blank=True)
hpc_sgr_m2 = models.IntegerField(null=True, blank=True)
cs_obs = models.IntegerField(null=True, blank=True)
cs_spm = models.IntegerField(null=True, blank=True)
cs3500_obs = models.IntegerField(null=True, blank=True)
cs3500_spm = models.IntegerField(null=True, blank=True)
unique_id = models.IntegerField(null=True, blank=True)
centroid = models.PointField(
srid=settings.GEOMETRY_DB_SRID,
null=True,
blank=True
)
geometry = models.MultiPolygonField(
srid=settings.GEOMETRY_DB_SRID,
null=True, blank=True,
verbose_name="Grid Cell Geometry"
)
objects = models.GeoManager()
class SpeciesHabitatOccurence(models.Model):
LIFESTAGE_CHOICES = (
('adults', 'Adults'),
('juveniles', 'Juveniles'),
('eggs', 'Eggs'),
('larvae', 'Larvae')
)
SEX_CHOICES = (
('Both', 'Both'),
('Male', 'Male'),
('Female', 'Female'),
('Unknown', 'Unknown')
)
ASSOCIATION_CHOICES = (
('Strong', 'Strong'),
('Medium', 'Medium'),
('Weak', 'Weak'),
('Unknown', 'Unknown'),
('null', None)
)
SEASON_CHOICES = (
('Unknown', 'Unknown'),
('All Year', 'All Year'),
('Winter', 'Winter'),
('Spring', 'Spring'),
('Summer', 'Summer'),
('Autumn', 'Autumn')
)
LEVEL_1_HABITAT_CHOICES = (
('Estuarine', 'Estuarine'),
('Nearshore', 'Nearshore'),
('Shelf', 'Shelf'),
('Slope/Rise', 'Slope/Rise')
)
LEVEL_2_HABITAT_CHOICES = (
('Benthos', 'Benthos'),
('Unknown', 'Unknown'),
('Submarine Canyon', 'Submarine Canyon'),
('Intertidal Benthos', 'Intertidal Benthos'),
('Basin', 'Basin')
)
LEVEL_3_HABITAT_CHOICES = (
('Hard Bottom', 'Hard Bottom'),
('Unconsolidated', 'Unconsolidated'),
('Mixed Bottom', 'Mixed Bottom'),
('Unknown', 'Unknown'),
('Vegetated Bottom', 'Vegetated Bottom')
)
LEVEL_4_HABITAT_CHOICES = (
('Algal Beds/Macro', 'Algal Beds/Macro'),
('Bedrock', 'Bedrock'),
('Cobble', 'Cobble'),
('Gravel', 'Gravel'),
('Gravel/Cobble', 'Gravel/Cobble'),
('Mixed mud/sand', 'Mixed mud/sand'),
('Mud', 'Mud'),
('Mud/Boulders', 'Mud/Boulders'),
('Mud/Cobble', 'Mud/Cobble'),
('Mud/gravel', 'Mud/gravel'),
('Mud/Rock', 'Mud/Rock'),
('Rooted Vascular', 'Rooted Vascular'),
('Sand', 'Sand'),
('Sand/Boulders', 'Sand/Boulders'),
('Sand/Rock', 'Sand/Rock'),
('Silt', 'Silt'),
('Soft Bottom/Boulder', 'Soft Bottom/Boulder'),
('Soft Bottom/rock', 'Soft Bottom/rock'),
('Unknown', 'Unknown')
)
ACTIVITY_CHOICES = (
('All', 'All'),
('Feeding', 'Feeding'),
('Growth to Maturity', 'Growth to Maturity'),
('Unknown', 'Unknown')
)
object_id = models.IntegerField(primary_key=True)
species_common = models.CharField(max_length=255, blank=False, null=False)
species_sci = models.CharField(max_length=255, blank=False, null=False)
lifestage = models.CharField(max_length=30, blank=False, null=False, choices=LIFESTAGE_CHOICES)
sex = models.CharField(max_length=50, blank=False, null=False, choices=SEX_CHOICES)
habitat_association = models.CharField(max_length=30, null=True, choices=ASSOCIATION_CHOICES)
season = models.CharField(max_length=20, blank=False, null=False, choices=SEASON_CHOICES)
level_1_habitat = models.CharField(max_length=30, blank=False, null=False, choices=LEVEL_1_HABITAT_CHOICES)
level_2_habitat = models.CharField(max_length=30, blank=False, null=False, choices=LEVEL_2_HABITAT_CHOICES)
level_3_habitat = models.CharField(max_length=30, blank=False, null=False, choices=LEVEL_3_HABITAT_CHOICES)
level_4_habitat = models.CharField(max_length=30, blank=False, null=False, choices=LEVEL_4_HABITAT_CHOICES)
xwalk_sgh = models.CharField(max_length=10, blank=False, null=False)
sgh_lookup_code = models.CharField(max_length=30, blank=False, null=False)
activity = models.CharField(max_length=30, blank=False, null=False, choices=ACTIVITY_CHOICES)
activity_association = models.CharField(max_length=30, null=True, choices=ASSOCIATION_CHOICES)
preferred_min_depth = models.IntegerField(blank=True, null=True, default=None)
preferred_max_depth = models.IntegerField(blank=True, null=True, default=None)
absolute_min_depth = models.IntegerField(blank=True, null=True, default=None)
absolute_max_depth = models.IntegerField(blank=True, null=True, default=None)
class PlanningUnitHabitatLookup(models.Model):
sgh_lookup_code = models.CharField(primary_key=True, max_length=30, blank=False, null=False)
pug_ids = models.TextField(blank=False, null=False, default="[]", help_text="string list of Planning Unit Grid IDs")
class Species(models.Model):
common_name = models.CharField(max_length=255, blank=False, null=False)
scientific_name = models.CharField(max_length=255, blank=False, null=False)
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo.config import cfg
from heat.common import exception
from heat.common import short_id
from heat.common import template_format
from heat.db import api as db_api
from heat.engine import resource
from heat.engine.resources import user
from heat.engine import scheduler
from heat.tests.common import HeatTestCase
from heat.tests import fakes
from heat.tests import utils
user_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User"
}
}
}
'''
user_template_password = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User",
"Properties": {
"LoginProfile": { "Password": "myP@ssW0rd" }
}
}
}
}
'''
user_accesskey_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User"
},
"HostKeys" : {
"Type" : "AWS::IAM::AccessKey",
"Properties" : {
"UserName" : {"Ref": "CfnUser"}
}
}
}
}
'''
user_policy_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Just a User",
"Parameters" : {},
"Resources" : {
"CfnUser" : {
"Type" : "AWS::IAM::User",
"Properties" : {
"Policies" : [ { "Ref": "WebServerAccessPolicy"} ]
}
},
"WebServerAccessPolicy" : {
"Type" : "OS::Heat::AccessPolicy",
"Properties" : {
"AllowedResources" : [ "WikiDatabase" ]
}
},
"WikiDatabase" : {
"Type" : "AWS::EC2::Instance",
}
}
}
'''
class UserTest(HeatTestCase):
def setUp(self):
super(UserTest, self).setUp()
self.username = 'test_stack-CfnUser-aabbcc'
self.fc = fakes.FakeKeystoneClient(username=self.username)
cfg.CONF.set_default('heat_stack_user_role', 'stack_user_role')
utils.setup_dummy_db()
self.resource_id = str(uuid.uuid4())
def create_user(self, t, stack, resource_name,
project_id='stackproject', user_id='dummy_user',
password=None):
self.m.StubOutWithMock(user.User, 'keystone')
user.User.keystone().MultipleTimes().AndReturn(self.fc)
self.m.StubOutWithMock(fakes.FakeKeystoneClient,
'create_stack_domain_project')
fakes.FakeKeystoneClient.create_stack_domain_project(
stack.id).AndReturn(project_id)
self.m.StubOutWithMock(short_id, 'get_id')
short_id.get_id(self.resource_id).MultipleTimes().AndReturn('aabbcc')
self.m.StubOutWithMock(fakes.FakeKeystoneClient,
'create_stack_domain_user')
fakes.FakeKeystoneClient.create_stack_domain_user(
username=self.username, password=password,
project_id=project_id).AndReturn(user_id)
self.m.ReplayAll()
rsrc = user.User(resource_name,
t['Resources'][resource_name],
stack)
self.assertIsNone(rsrc.validate())
with utils.UUIDStub(self.resource_id):
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def test_user(self):
t = template_format.parse(user_template)
stack = utils.parse_stack(t)
rsrc = self.create_user(t, stack, 'CfnUser')
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'Foo')
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertRaises(resource.UpdateReplace,
rsrc.handle_update, {}, {}, {})
self.assertIsNone(rsrc.handle_suspend())
self.assertIsNone(rsrc.handle_resume())
rsrc.resource_id = None
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
rsrc.resource_id = self.fc.access
rsrc.state_set(rsrc.CREATE, rsrc.COMPLETE)
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
rsrc.state_set(rsrc.CREATE, rsrc.COMPLETE)
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_user_password(self):
t = template_format.parse(user_template_password)
stack = utils.parse_stack(t)
rsrc = self.create_user(t, stack, 'CfnUser', password=u'myP@ssW0rd')
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_user_validate_policies(self):
t = template_format.parse(user_policy_template)
stack = utils.parse_stack(t)
rsrc = self.create_user(t, stack, 'CfnUser')
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual([u'WebServerAccessPolicy'],
rsrc.properties['Policies'])
# OK
self.assertTrue(rsrc._validate_policies([u'WebServerAccessPolicy']))
# Resource name doesn't exist in the stack
self.assertFalse(rsrc._validate_policies([u'NoExistAccessPolicy']))
# Resource name is wrong Resource type
self.assertFalse(rsrc._validate_policies([u'NoExistAccessPolicy',
u'WikiDatabase']))
# Wrong type (AWS embedded policy format, not yet supported)
dict_policy = {"PolicyName": "AccessForCFNInit",
"PolicyDocument":
{"Statement": [{"Effect": "Allow",
"Action":
"cloudformation:DescribeStackResource",
"Resource": "*"}]}}
# However we should just ignore it to avoid breaking existing templates
self.assertTrue(rsrc._validate_policies([dict_policy]))
self.m.VerifyAll()
def test_user_create_bad_policies(self):
t = template_format.parse(user_policy_template)
t['Resources']['CfnUser']['Properties']['Policies'] = ['NoExistBad']
stack = utils.parse_stack(t)
resource_name = 'CfnUser'
rsrc = user.User(resource_name,
t['Resources'][resource_name],
stack)
self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.handle_create)
def test_user_access_allowed(self):
self.m.StubOutWithMock(user.AccessPolicy, 'access_allowed')
user.AccessPolicy.access_allowed('a_resource').AndReturn(True)
user.AccessPolicy.access_allowed('b_resource').AndReturn(False)
self.m.ReplayAll()
t = template_format.parse(user_policy_template)
stack = utils.parse_stack(t)
rsrc = self.create_user(t, stack, 'CfnUser')
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertTrue(rsrc.access_allowed('a_resource'))
self.assertFalse(rsrc.access_allowed('b_resource'))
self.m.VerifyAll()
def test_user_access_allowed_ignorepolicy(self):
self.m.StubOutWithMock(user.AccessPolicy, 'access_allowed')
user.AccessPolicy.access_allowed('a_resource').AndReturn(True)
user.AccessPolicy.access_allowed('b_resource').AndReturn(False)
self.m.ReplayAll()
t = template_format.parse(user_policy_template)
t['Resources']['CfnUser']['Properties']['Policies'] = [
'WebServerAccessPolicy', {'an_ignored': 'policy'}]
stack = utils.parse_stack(t)
rsrc = self.create_user(t, stack, 'CfnUser')
self.assertEqual('dummy_user', rsrc.resource_id)
self.assertEqual(self.username, rsrc.FnGetRefId())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertTrue(rsrc.access_allowed('a_resource'))
self.assertFalse(rsrc.access_allowed('b_resource'))
self.m.VerifyAll()
class AccessKeyTest(HeatTestCase):
def setUp(self):
super(AccessKeyTest, self).setUp()
utils.setup_dummy_db()
self.username = utils.PhysName('test_stack', 'CfnUser')
self.credential_id = 'acredential123'
self.fc = fakes.FakeKeystoneClient(username=self.username,
user_id='dummy_user',
credential_id=self.credential_id)
cfg.CONF.set_default('heat_stack_user_role', 'stack_user_role')
def create_user(self, t, stack, resource_name,
project_id='stackproject', user_id='dummy_user',
password=None):
self.m.StubOutWithMock(user.User, 'keystone')
user.User.keystone().MultipleTimes().AndReturn(self.fc)
self.m.ReplayAll()
rsrc = stack[resource_name]
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def create_access_key(self, t, stack, resource_name):
rsrc = user.AccessKey(resource_name,
t['Resources'][resource_name],
stack)
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def test_access_key(self):
t = template_format.parse(user_accesskey_template)
stack = utils.parse_stack(t)
self.create_user(t, stack, 'CfnUser')
rsrc = self.create_access_key(t, stack, 'HostKeys')
self.m.VerifyAll()
self.assertRaises(resource.UpdateReplace,
rsrc.handle_update, {}, {}, {})
self.assertEqual(self.fc.access,
rsrc.resource_id)
self.assertEqual(self.fc.secret,
rsrc._secret)
# Ensure the resource data has been stored correctly
rs_data = db_api.resource_data_get_all(rsrc)
self.assertEqual(self.fc.secret, rs_data.get('secret_key'))
self.assertEqual(self.fc.credential_id, rs_data.get('credential_id'))
self.assertEqual(2, len(rs_data.keys()))
self.assertEqual(utils.PhysName(stack.name, 'CfnUser'),
rsrc.FnGetAtt('UserName'))
rsrc._secret = None
self.assertEqual(self.fc.secret,
rsrc.FnGetAtt('SecretAccessKey'))
self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'Foo')
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_access_key_get_from_keystone(self):
self.m.StubOutWithMock(user.AccessKey, 'keystone')
user.AccessKey.keystone().MultipleTimes().AndReturn(self.fc)
self.m.ReplayAll()
t = template_format.parse(user_accesskey_template)
stack = utils.parse_stack(t)
self.create_user(t, stack, 'CfnUser')
rsrc = self.create_access_key(t, stack, 'HostKeys')
# Delete the resource data for secret_key, to test that existing
# stacks which don't have the resource_data stored will continue
# working via retrieving the keypair from keystone
db_api.resource_data_delete(rsrc, 'credential_id')
db_api.resource_data_delete(rsrc, 'secret_key')
rs_data = db_api.resource_data_get_all(rsrc)
self.assertEqual(0, len(rs_data.keys()))
rsrc._secret = None
self.assertEqual(self.fc.secret,
rsrc.FnGetAtt('SecretAccessKey'))
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_access_key_no_user(self):
self.m.ReplayAll()
t = template_format.parse(user_accesskey_template)
# Set the resource properties UserName to an unknown user
t['Resources']['HostKeys']['Properties']['UserName'] = 'NonExistent'
stack = utils.parse_stack(t)
stack['CfnUser'].resource_id = self.fc.user_id
rsrc = user.AccessKey('HostKeys',
t['Resources']['HostKeys'],
stack)
create = scheduler.TaskRunner(rsrc.create)
self.assertRaises(exception.ResourceFailure, create)
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
class AccessPolicyTest(HeatTestCase):
def test_accesspolicy_create_ok(self):
t = template_format.parse(user_policy_template)
stack = utils.parse_stack(t)
resource_name = 'WebServerAccessPolicy'
rsrc = user.AccessPolicy(resource_name,
t['Resources'][resource_name],
stack)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
def test_accesspolicy_create_ok_empty(self):
t = template_format.parse(user_policy_template)
resource_name = 'WebServerAccessPolicy'
t['Resources'][resource_name]['Properties']['AllowedResources'] = []
stack = utils.parse_stack(t)
rsrc = user.AccessPolicy(resource_name,
t['Resources'][resource_name],
stack)
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
def test_accesspolicy_create_err_notfound(self):
t = template_format.parse(user_policy_template)
resource_name = 'WebServerAccessPolicy'
t['Resources'][resource_name]['Properties']['AllowedResources'] = [
'NoExistResource']
stack = utils.parse_stack(t)
self.assertRaises(exception.StackValidationFailed, stack.validate)
def test_accesspolicy_update(self):
t = template_format.parse(user_policy_template)
resource_name = 'WebServerAccessPolicy'
stack = utils.parse_stack(t)
rsrc = user.AccessPolicy(resource_name,
t['Resources'][resource_name],
stack)
self.assertRaises(resource.UpdateReplace,
rsrc.handle_update, {}, {}, {})
def test_accesspolicy_access_allowed(self):
t = template_format.parse(user_policy_template)
resource_name = 'WebServerAccessPolicy'
stack = utils.parse_stack(t)
rsrc = user.AccessPolicy(resource_name,
t['Resources'][resource_name],
stack)
self.assertTrue(rsrc.access_allowed('WikiDatabase'))
self.assertFalse(rsrc.access_allowed('NotWikiDatabase'))
self.assertFalse(rsrc.access_allowed(None))
| |
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at: http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distrib-
# uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# specific language governing permissions and limitations under the License.
"""Utilities used throughout crisismap."""
import base64
import calendar
import datetime
from HTMLParser import HTMLParseError
from HTMLParser import HTMLParser
import os
import random
import re
import time
# Regions in the world that use miles (as opposed to kilometers) for measuring
# distance. List is compiled based on http://en.wikipedia.org/wiki/Mile
COUNTRIES_USING_MILES = [
'AS', # American Samoa
'BS', # Bahamas
'BZ', # Belize
'DM', # Dominica
'FK', # Falkland Islands
'GD', # Grenada
'GU', # Guam
'KN', # St. Kitts & Nevis
'KY', # Cayman Islands
'LC', # St. Lucia
'LR', # Liberia
'MM', # Myanmar
'MP', # The N. Mariana Islands
'SH', # St. Helena
'TC', # the Turks & Caicos Islands
'UK', # United Kingdom
'US', # United States
'VC', # St. Vincent & The Grenadines
'VG', # British Virgin Islands,
'VI', # the U.S. Virgin Islands
'WS', # Samoa
]
def GetDistanceUnitsForCountry(country_code):
"""Returns distance unit used by a given region.
Args:
country_code: two letter country code in all capitals (ISO standard)
Returns:
'mi' for regions using miles, 'km' for all others
"""
return 'mi' if country_code in COUNTRIES_USING_MILES else 'km'
def IsDevelopmentServer():
"""Returns True if the app is running in development."""
server = os.environ.get('SERVER_SOFTWARE', '')
return 'Development' in server or 'testutil' in server
class Struct(object):
"""A simple bag of attributes."""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __iter__(self):
return iter(self.__dict__)
def __setattr__(self, name, value):
raise TypeError('%r has read-only attributes' % self)
@classmethod
def FromModel(cls, model):
"""Populates a new Struct from an ndb.Model (doesn't take a db.Model)."""
def GetValue(name):
# Work around a bug in ndb: repeated properties sometimes return lists
# of _BaseValue objects; copying the list fixes up these objects. See:
# https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=208
value = getattr(model, name, None)
if type(value) is list:
value = value[:]
return value
# ._properties is actually a public API; it's just named with "_" in order
# to avoid collision with property names (see http://goo.gl/xAcU4).
# We pass None as 3rd arg to getattr to tolerate entities in the datastore
# with extra properties that aren't defined in the Python model class.
if model:
props = model._properties # pylint: disable=protected-access
return cls(id=model.key.id(), key=model.key,
**{name: GetValue(name) for name in props})
def StructFromModel(model):
"""Copies the properties of the given db.Model into a Struct.
Note that we use Property.get_value_for_datastore to prevent fetching
of referenced objects into the Struct. The other effect of using
get_value_for_datastore is that all date/time methods return
datetime.datetime values.
Args:
model: A db.Model entity, or None.
Returns:
A Struct containing the properties of the given db.Model, with additional
'key', 'name', and 'id' properties for the entity's key(), key().name(),
and key().id(). Returns None if 'model' is None.
"""
if model:
return Struct(key=model.key(), id=model.key().id(), name=model.key().name(),
**dict((name, prop.get_value_for_datastore(model))
for (name, prop) in model.properties().iteritems()))
def ResultIterator(query):
"""Returns a generator that yields Struct objects."""
for result in query:
yield StructFromModel(result)
def SetAndTest(set_func, test_func, sleep_delta=0.05, num_tries=20):
"""Calls set_func, then waits until test_func passes before returning.
Sometimes we need to be able to see changes to the datastore immediately
and are willing to accept a small latency for that. This function calls
set_func (which presumably makes a small change to the datastore), then
calls test_func in a while not test_func(): sleep() loop until either
test_func passes or the maximum number of tries has been reached.
Args:
set_func: a function that sets some state in an AppEngine Entity
test_func: a function that returns true when the change made by set_func
is now visible
sleep_delta: (defaults to 0.05) the number of seconds to sleep between
calls to test_func, or None to not sleep.
num_tries: (defaults to 20) the number of times to try test_func before
giving up
Returns:
True if test_func eventually returned true; False otherwise.
"""
set_func()
for _ in range(num_tries):
if test_func():
return True
if sleep_delta:
time.sleep(sleep_delta)
return False
def IsValidEmail(email):
return re.match(r'^[^@]+@([\w-]+\.)+[\w-]+$', email)
class HtmlStripper(HTMLParser):
"""Helper class for StripHtmlTags."""
def __init__(self, tag_sub=None, tag_whitelist=None):
HTMLParser.__init__(self)
self.reset()
self.fed = []
self.tag_sub = tag_sub or ''
self.tag_whitelist = tag_whitelist or []
def handle_starttag(self, tag, attrs):
if tag in self.tag_whitelist:
# Preserve opening tags that are in the whitelist, drop attrs
self.fed.append('<%s>' % tag)
def handle_endtag(self, tag):
if tag in self.tag_whitelist:
# Preserve closing tags that are in the whitelist
self.fed.append('</%s>' % tag)
def handle_data(self, d):
self.fed.append(d)
def handle_entityref(self, name):
self.fed.append('&%s;' % name)
def handle_charref(self, name):
self.fed.append('&#%s;' % name)
def GetData(self):
return self.tag_sub.join(self.fed)
def StripHtmlTags(value, tag_sub=None, tag_whitelist=None):
"""Returns the given HTML with tags stripped (minus those in tag_whitelist).
Example usage:
StripHtmlTags('<b onclick="xss()">Shelter</b> 120 E Street<br>' +
'<script>SomeHack</script>',
['b', 'br'], ' ')
returns
'<b>Shelter</b> 120 E Street<br> SomeHack'
Note that all attributes on whitelisted tags are removed, even though tags
themselves are returned in the result string.
Args:
value: String to process
tag_sub: String to replace tags with (by default uses an empty string)
tag_whitelist: A list of strings that specify which html tags should not
be stripped (e.g. ['b', 'u', 'br'])
Returns:
Original string with all html tags stripped besides those in tag_whitelist
"""
s = HtmlStripper(tag_sub, tag_whitelist)
try:
s.feed(value)
s.close()
except HTMLParseError:
return value
else:
return s.GetData()
def UtcToTimestamp(dt):
"""Converts a UTC datetime object to a scalar POSIX timestamp."""
return calendar.timegm(dt.utctimetuple()) + dt.microsecond / 1e6
def TimestampToUtc(timestamp):
"""Converts a scalar POSIX timestamp to a UTC datetime object."""
return datetime.datetime.utcfromtimestamp(timestamp)
def MakeRandomId():
"""Generates a random identifier made of 12 URL-safe characters."""
# urlsafe_b64encode encodes 12 random bytes as exactly 16 characters,
# which can include digits, letters, hyphens, and underscores. Because
# the length is a multiple of 4, it won't have trailing "=" signs.
return base64.urlsafe_b64encode(
''.join(chr(random.randrange(256)) for i in xrange(12)))
def ShortAge(dt):
"""Returns a short string describing a relative time in the past.
Args:
dt: A datetime.
Returns:
A short string like "5d" (5 days) or "32m" (32 minutes).
"""
# TODO(kpy): This is English-specific and needs localization.
seconds = time.time() - UtcToTimestamp(dt)
minutes = int(seconds / 60 + 0.5)
hours = int(seconds / 3600 + 0.5)
days = int(seconds / 86400 + 0.5)
if seconds < 60:
return 'just now'
if minutes < 100:
return '%dm ago' % minutes
if hours < 48:
return '%dh ago' % hours
return '%dd ago' % days
| |
# Copyright (c) 2016, NECST Laboratory, Politecnico di Milano
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = "Marco Rabozzi, Luca Cerina, Giuseppe Natale"
__copyright__ = "Copyright 2016, NECST Laboratory, Politecnico di Milano"
import time
import struct
from pynq import MMIO
from pynq.iop import request_iop
from pynq.iop import iop_const
from pynq.iop import Pmod_IO
from pynq.iop import Arduino_IO
from pynq.iop import PMODA
from pynq.iop import PMODB
from pynq.iop import ARDUINO
from pynq.iop import PMOD_GROVE_G3
from pynq.iop import PMOD_GROVE_G4
from pynq.iop import ARDUINO_GROVE_I2C
PMOD_GROVE_FINGER_HR_PROGRAM = "pmod_grove_finger_hr.bin"
ARDUINO_GROVE_FINGER_HR_PROGRAM = "arduino_grove_finger_hr.bin"
GROVE_FINGER_HR_LOG_START = iop_const.MAILBOX_OFFSET+16
GROVE_FINGER_HR_LOG_END = GROVE_FINGER_HR_LOG_START+(1000*4)
class Grove_FingerHR(object):
"""This class controls the Grove finger clip heart rate sensor.
Grove Finger sensor based on the TCS3414CS.
Hardware version: v1.3.
Attributes
----------
iop : _IOP
I/O processor instance used by Grove_FingerHR.
mmio : MMIO
Memory-mapped I/O instance to read and write instructions and data.
log_running : int
The state of the log (0: stopped, 1: started).
"""
def __init__(self, pmod_id, gr_id):
"""Return a new instance of an Grove_FingerHR object.
Parameters
----------
if_id : int
IOP ID (1, 2, 3) corresponding to (PMODA, PMODB, ARDUINO).
gr_pin: list
A group of pins on stickit connector or arduino shield.
"""
if if_id in [PMODA, PMODB]:
if not gr_pin in [PMOD_GROVE_G3, \
PMOD_GROVE_G4]:
raise ValueError("FingerHR group number can only be G3 - G4.")
GROVE_FINGER_HR_PROGRAM = PMOD_GROVE_FINGER_HR_PROGRAM
elif if_id in [ARDUINO]:
if not gr_pin in [ARDUINO_GROVE_I2C]:
raise ValueError("FingerHR group number can only be I2C.")
GROVE_EAR_HR_PROGRAM = ARDUINO_GROVE_EAR_HR_PROGRAM
else:
raise ValueError("No such IOP for grove device.")
self.iop = request_iop(if_id, GROVE_FINGER_HR_PROGRAM)
self.mmio = self.iop.mmio
self.log_interval_ms = 1000
self.log_running = 0
self.iop.start()
if if_id in [PMODA, PMODB]:
#: Write SCL and SDA Pin Config
self.mmio.write(iop_const.MAILBOX_OFFSET, gr_pin[0])
self.mmio.write(iop_const.MAILBOX_OFFSET+4, gr_pin[1])
# Write configuration and wait for ACK
self.mmio.write(iop_const.MAILBOX_OFFSET + \
iop_const.MAILBOX_PY2IOP_CMD_OFFSET, 1)
while (self.mmio.read(iop_const.MAILBOX_OFFSET + \
iop_const.MAILBOX_PY2IOP_CMD_OFFSET) == 1):
pass
def read(self):
"""Read the heart rate value from the Grove Finger HR peripheral.
Parameters
----------
None
Returns
-------
tuple
A integer representing the heart rate frequency
"""
self.mmio.write(iop_const.MAILBOX_OFFSET+\
iop_const.MAILBOX_PY2IOP_CMD_OFFSET, 2)
while (self.mmio.read(iop_const.MAILBOX_OFFSET+\
iop_const.MAILBOX_PY2IOP_CMD_OFFSET) == 2):
pass
return(self.mmio.read(iop_const.MAILBOX_OFFSET))
def start_log(self, log_interval_ms = 100):
"""Start recording multiple heart rate values in a log.
This method will first call set the log interval before writing to
the MMIO.
Parameters
----------
log_interval_ms : int
The time between two samples in milliseconds.
Returns
-------
None
"""
if (log_interval_ms < 0):
raise ValueError("Time between samples cannot be less than zero.")
self.log_running = 1
self.log_interval_ms = log_interval_ms
self.mmio.write(iop_const.MAILBOX_OFFSET+4, self.log_interval_ms)
self.mmio.write(iop_const.MAILBOX_OFFSET+\
iop_const.MAILBOX_PY2IOP_CMD_OFFSET, 3)
def stop_log(self):
"""Stop recording the values in the log.
Simply write 0xC to the MMIO to stop the log.
Parameters
----------
None
Returns
-------
None
"""
if(self.log_running == 1):
self.mmio.write(iop_const.MAILBOX_OFFSET+\
iop_const.MAILBOX_PY2IOP_CMD_OFFSET, 13)
self.log_running = 0
else:
raise RuntimeError("No grove finger HR log running.")
def get_log(self):
"""Return list of logged samples.
Parameters
----------
None
Returns
-------
list
List of integers containing the heart rate.
"""
#: Stop logging
self.stop_log()
#: Prep iterators and results list
head_ptr = self.mmio.read(iop_const.MAILBOX_OFFSET+0x8)
tail_ptr = self.mmio.read(iop_const.MAILBOX_OFFSET+0xC)
readings = list()
#: Sweep circular buffer for samples
if head_ptr == tail_ptr:
return None
elif head_ptr < tail_ptr:
for i in range(head_ptr,tail_ptr,4):
readings.append(self.mmio.read(i))
else:
for i in range(head_ptr,GROVE_FINGER_HR_LOG_END,4):
readings.append(self.mmio.read(i))
for i in range(GROVE_FINGER_HR_LOG_START,tail_ptr,4):
readings.append(self.mmio.read(i))
return readings
| |
import json
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render
from django.utils.translation.trans_real import to_language
import commonware.log
from rest_framework import mixins
from rest_framework.exceptions import MethodNotAllowed
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.status import (HTTP_201_CREATED, HTTP_202_ACCEPTED,
HTTP_400_BAD_REQUEST)
from rest_framework.viewsets import GenericViewSet
import mkt
from lib.metrics import record_action
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.api.forms import NewPackagedForm, PreviewJSONForm
from mkt.api.permissions import (AllowAppOwner, AllowRelatedAppOwner, AnyOf,
GroupPermission)
from mkt.constants import PLATFORMS_NAMES
from mkt.developers import tasks
from mkt.developers.decorators import dev_required
from mkt.developers.forms import (AppFormMedia, CategoryForm, NewManifestForm,
PreviewForm, PreviewFormSet)
from mkt.developers.utils import escalate_prerelease_permissions
from mkt.files.models import FileUpload
from mkt.site.decorators import login_required, use_master
from mkt.submit.forms import AppDetailsBasicForm
from mkt.submit.models import AppSubmissionChecklist
from mkt.submit.serializers import (AppStatusSerializer, FileUploadSerializer,
PreviewSerializer)
from mkt.users.models import UserProfile
from mkt.webapps.models import AddonUser, Preview, Webapp
from . import forms
from .decorators import read_dev_agreement_required, submit_step
log = commonware.log.getLogger('z.submit')
def submit(request):
"""Determine which step to redirect user to."""
if not request.user.is_authenticated():
return proceed(request)
# If dev has already agreed, continue to next step.
user = UserProfile.objects.get(pk=request.user.id)
if not user.read_dev_agreement:
return redirect('submit.app.terms')
return manifest(request)
def proceed(request):
"""
This is a fake "Terms" view that we overlay the login.
We link here from the Developer Hub landing page.
"""
if request.user.is_authenticated():
return submit(request)
agreement_form = forms.DevAgreementForm({'read_dev_agreement': True},
instance=None, request=request)
return render(request, 'submit/terms.html',
{'step': 'terms', 'agreement_form': agreement_form,
'proceed': True})
@login_required
@submit_step('terms')
def terms(request):
# If dev has already agreed, continue to next step.
if request.user.is_authenticated() and request.user.read_dev_agreement:
return manifest(request)
agreement_form = forms.DevAgreementForm(
request.POST or {'read_dev_agreement': True},
instance=request.user,
request=request)
if request.POST and agreement_form.is_valid():
agreement_form.save()
return redirect('submit.app')
return render(request, 'submit/terms.html',
{'step': 'terms', 'agreement_form': agreement_form})
@login_required
@read_dev_agreement_required
@submit_step('manifest')
def manifest(request):
form = forms.NewWebappForm(request.POST or None, request=request)
features_form = forms.AppFeaturesForm(request.POST or None)
features_form_valid = features_form.is_valid()
if (request.method == 'POST' and form.is_valid() and
features_form_valid):
upload = form.cleaned_data['upload']
addon = Webapp.from_upload(upload, is_packaged=form.is_packaged())
file_obj = addon.latest_version.all_files[0]
if form.is_packaged():
validation = json.loads(upload.validation)
escalate_prerelease_permissions(
addon, validation, addon.latest_version)
# Set the device type.
for device in form.get_devices():
addon.addondevicetype_set.get_or_create(
device_type=device.id)
# Set the premium type, only bother if it's not free.
premium = form.get_paid()
if premium:
addon.update(premium_type=premium)
if addon.has_icon_in_manifest(file_obj):
# Fetch the icon, do polling.
addon.update(icon_type='image/png')
else:
# In this case there is no need to do any polling.
addon.update(icon_type='')
AddonUser(addon=addon, user=request.user).save()
# Checking it once. Checking it twice.
AppSubmissionChecklist.objects.create(addon=addon, terms=True,
manifest=True, details=False)
# Create feature profile.
addon.latest_version.features.update(**features_form.cleaned_data)
tasks.fetch_icon.delay(addon.pk, file_obj.pk)
return redirect('submit.app.details', addon.app_slug)
return render(request, 'submit/manifest.html',
{'step': 'manifest', 'features_form': features_form,
'form': form, 'PLATFORMS_NAMES': PLATFORMS_NAMES})
@dev_required
@submit_step('details')
def details(request, addon_id, addon):
# Name, Slug, Description, Privacy Policy, Homepage URL, Support URL,
# Support Email.
form_basic = AppDetailsBasicForm(request.POST or None, instance=addon,
request=request)
form_cats = CategoryForm(request.POST or None, product=addon,
request=request)
form_icon = AppFormMedia(request.POST or None, request.FILES or None,
instance=addon, request=request)
form_previews = PreviewFormSet(request.POST or None, prefix='files',
queryset=addon.get_previews())
# For empty webapp-locale (or no-locale) fields that have
# form-locale values, duplicate them to satisfy the requirement.
form_locale = request.COOKIES.get('current_locale', '')
app_locale = to_language(addon.default_locale)
for name, value in request.POST.items():
if value:
if name.endswith(form_locale):
basename = name[:-len(form_locale)]
else:
basename = name + '_'
othername = basename + app_locale
if not request.POST.get(othername, None):
request.POST[othername] = value
forms = {
'form_basic': form_basic,
'form_cats': form_cats,
'form_icon': form_icon,
'form_previews': form_previews,
}
if request.POST and all(f.is_valid() for f in forms.itervalues()):
addon = form_basic.save(addon)
form_cats.save()
form_icon.save(addon)
for preview in form_previews.forms:
preview.save(addon)
# If this is an incomplete app from the legacy submission flow, it may
# not have device types set yet - so assume it works everywhere.
if not addon.device_types:
for device in mkt.DEVICE_TYPES:
addon.addondevicetype_set.create(device_type=device)
AppSubmissionChecklist.objects.get(addon=addon).update(details=True)
if addon.needs_payment():
# Paid apps get STATUS_NULL until payment information and content
# ratings entered.
addon.update(status=mkt.STATUS_NULL,
highest_status=mkt.STATUS_PENDING)
# Mark as pending in special regions (i.e., China).
# By default, the column is set to pending when the row is inserted.
# But we need to set a nomination date so we know to list the app
# in the China Review Queue now (and sort it by that date too).
for region in mkt.regions.SPECIAL_REGIONS:
addon.geodata.set_nominated_date(region, save=True)
log.info(u'[Webapp:%s] Setting nomination date to '
u'now for region (%s).' % (addon, region.slug))
record_action('app-submitted', request, {'app-id': addon.pk})
return redirect('submit.app.done', addon.app_slug)
ctx = {
'step': 'details',
'addon': addon,
}
ctx.update(forms)
return render(request, 'submit/details.html', ctx)
@dev_required
def done(request, addon_id, addon):
# No submit step forced on this page, we don't really care.
return render(request, 'submit/next_steps.html',
{'step': 'next_steps', 'addon': addon})
@dev_required
def resume(request, addon_id, addon):
try:
# If it didn't go through the app submission
# checklist. Don't die. This will be useful for
# creating apps with an API later.
step = addon.appsubmissionchecklist.get_next()
except ObjectDoesNotExist:
step = None
return _resume(addon, step)
def _resume(addon, step):
if step:
if step in ['terms', 'manifest']:
return redirect('submit.app.%s' % step)
return redirect(reverse('submit.app.%s' % step,
args=[addon.app_slug]))
return redirect(addon.get_dev_url('edit'))
class ValidationViewSet(CORSMixin, mixins.CreateModelMixin,
mixins.RetrieveModelMixin, GenericViewSet):
cors_allowed_methods = ['get', 'post']
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
permission_classes = [AllowAny]
model = FileUpload
queryset = FileUpload.objects.all()
serializer_class = FileUploadSerializer
@use_master
def create(self, request, *args, **kwargs):
"""
Custom create method allowing us to re-use form logic and distinguish
packaged app from hosted apps, applying delays to the validation task
if necessary.
Doesn't rely on any serializer, just forms.
"""
data = self.request.data
packaged = 'upload' in data
form = (NewPackagedForm(data) if packaged
else NewManifestForm(data))
if not form.is_valid():
return Response(form.errors, status=HTTP_400_BAD_REQUEST)
if not packaged:
upload = FileUpload.objects.create(
user=request.user if request.user.is_authenticated() else None)
# The hosted app validator is pretty fast.
tasks.fetch_manifest(form.cleaned_data['manifest'], upload.pk)
else:
upload = form.file_upload
# The packaged app validator is much heavier.
tasks.validator.delay(upload.pk)
log.info('Validation created: %s' % upload.pk)
self.kwargs = {'pk': upload.pk}
# Re-fetch the object, fetch_manifest() might have altered it.
upload = self.get_object()
serializer = self.get_serializer(upload)
status = HTTP_201_CREATED if upload.processed else HTTP_202_ACCEPTED
return Response(serializer.data, status=status)
class StatusViewSet(mixins.RetrieveModelMixin, mixins.UpdateModelMixin,
GenericViewSet):
queryset = Webapp.objects.all()
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [AnyOf(AllowAppOwner,
GroupPermission('Admin', '%s'))]
serializer_class = AppStatusSerializer
def update(self, request, *args, **kwargs):
# PUT is disallowed, only PATCH is accepted for this endpoint.
if request.method == 'PUT':
raise MethodNotAllowed('PUT')
return super(StatusViewSet, self).update(request, *args, **kwargs)
class PreviewViewSet(CORSMixin, MarketplaceView, mixins.RetrieveModelMixin,
mixins.DestroyModelMixin, GenericViewSet):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [AllowRelatedAppOwner]
queryset = Preview.objects.all()
cors_allowed_methods = ['get', 'post', 'delete']
serializer_class = PreviewSerializer
def _create(self, request, *args, **kwargs):
"""
Handle creation. This is directly called by the @action on AppViewSet,
allowing the URL to depend on the app id. AppViewSet passes this method
a Webapp instance in kwargs['app'] (optionally raising a 404 if the
app in the URL doesn't exist, or a 403 if the app belongs to someone
else).
Note: this method is called '_create' and not 'create' because DRF
would automatically make an 'app-preview-list' url name if this
method was called 'create', which we don't want - the app-preview-list
url name needs to be generated by AppViewSet's @action to include the
app pk.
"""
app = kwargs['app']
data_form = PreviewJSONForm(request.data)
if not data_form.is_valid():
return Response(data_form.errors, status=HTTP_400_BAD_REQUEST)
form = PreviewForm(data_form.cleaned_data)
if not form.is_valid():
return Response(data_form.errors, status=HTTP_400_BAD_REQUEST)
form.save(app)
log.info('Preview created: %s' % form.instance)
serializer = self.get_serializer(form.instance)
return Response(serializer.data, status=HTTP_201_CREATED)
| |
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:8332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:8332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Snorcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Snorcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| |
from kombu.tests.utils import unittest
from kombu import BrokerConnection, Exchange
from kombu import compat
from kombu.tests.mocks import Transport, Channel
class test_misc(unittest.TestCase):
def test_iterconsume(self):
class Connection(object):
drained = 0
def drain_events(self, *args, **kwargs):
self.drained += 1
return self.drained
class Consumer(object):
active = False
def consume(self, *args, **kwargs):
self.active = True
conn = Connection()
consumer = Consumer()
it = compat._iterconsume(conn, consumer)
self.assertEqual(it.next(), 1)
self.assertTrue(consumer.active)
it2 = compat._iterconsume(conn, consumer, limit=10)
self.assertEqual(list(it2), [2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
def test_entry_to_queue(self):
defs = {"binding_key": "foo.#",
"exchange": "fooex",
"exchange_type": "topic",
"durable": True,
"auto_delete": False}
q1 = compat.entry_to_queue("foo", **dict(defs))
self.assertEqual(q1.name, "foo")
self.assertEqual(q1.routing_key, "foo.#")
self.assertEqual(q1.exchange.name, "fooex")
self.assertEqual(q1.exchange.type, "topic")
self.assertTrue(q1.durable)
self.assertTrue(q1.exchange.durable)
self.assertFalse(q1.auto_delete)
self.assertFalse(q1.exchange.auto_delete)
q2 = compat.entry_to_queue("foo", **dict(defs,
exchange_durable=False))
self.assertTrue(q2.durable)
self.assertFalse(q2.exchange.durable)
q3 = compat.entry_to_queue("foo", **dict(defs,
exchange_auto_delete=True))
self.assertFalse(q3.auto_delete)
self.assertTrue(q3.exchange.auto_delete)
q4 = compat.entry_to_queue("foo", **dict(defs,
queue_durable=False))
self.assertFalse(q4.durable)
self.assertTrue(q4.exchange.durable)
q5 = compat.entry_to_queue("foo", **dict(defs,
queue_auto_delete=True))
self.assertTrue(q5.auto_delete)
self.assertFalse(q5.exchange.auto_delete)
self.assertEqual(compat.entry_to_queue("foo", **dict(defs)),
compat.entry_to_queue("foo", **dict(defs)))
class test_Publisher(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_constructor",
routing_key="rkey")
self.assertIsInstance(pub.backend, Channel)
self.assertEqual(pub.exchange.name, "test_Publisher_constructor")
self.assertTrue(pub.exchange.durable)
self.assertFalse(pub.exchange.auto_delete)
self.assertEqual(pub.exchange.type, "direct")
pub2 = compat.Publisher(self.connection,
exchange="test_Publisher_constructor2",
routing_key="rkey",
auto_delete=True,
durable=False)
self.assertTrue(pub2.exchange.auto_delete)
self.assertFalse(pub2.exchange.durable)
explicit = Exchange("test_Publisher_constructor_explicit",
type="topic")
pub3 = compat.Publisher(self.connection,
exchange=explicit)
self.assertEqual(pub3.exchange, explicit)
def test_send(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_send",
routing_key="rkey")
pub.send({"foo": "bar"})
self.assertIn("basic_publish", pub.backend)
pub.close()
def test__enter__exit__(self):
pub = compat.Publisher(self.connection,
exchange="test_Publisher_send",
routing_key="rkey")
x = pub.__enter__()
self.assertIs(x, pub)
x.__exit__()
self.assertTrue(pub._closed)
class test_Consumer(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self, n="test_Consumer_constructor"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertIsInstance(c.backend, Channel)
q = c.queues[0]
self.assertTrue(q.durable)
self.assertTrue(q.exchange.durable)
self.assertFalse(q.auto_delete)
self.assertFalse(q.exchange.auto_delete)
self.assertEqual(q.name, n)
self.assertEqual(q.exchange.name, n)
c2 = compat.Consumer(self.connection, queue=n + "2",
exchange=n + "2",
routing_key="rkey", durable=False,
auto_delete=True, exclusive=True)
q2 = c2.queues[0]
self.assertFalse(q2.durable)
self.assertFalse(q2.exchange.durable)
self.assertTrue(q2.auto_delete)
self.assertTrue(q2.exchange.auto_delete)
def test__enter__exit__(self, n="test__enter__exit__"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
x = c.__enter__()
self.assertIs(x, c)
x.__exit__()
self.assertIn("close", c.backend)
self.assertTrue(c._closed)
def test_iter(self, n="test_iterqueue"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.close()
def test_process_next(self, n="test_process_next"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertRaises(NotImplementedError, c.process_next)
c.close()
def test_iterconsume(self, n="test_iterconsume"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.close()
def test_discard_all(self, n="test_discard_all"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
c.discard_all()
self.assertIn("queue_purge", c.backend)
def test_fetch(self, n="test_fetch"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertIsNone(c.fetch())
self.assertIsNone(c.fetch(no_ack=True))
self.assertIn("basic_get", c.backend)
callback_called = [False]
def receive(payload, message):
callback_called[0] = True
c.backend.to_deliver.append("42")
self.assertEqual(c.fetch().payload, "42")
c.backend.to_deliver.append("46")
c.register_callback(receive)
self.assertEqual(c.fetch(enable_callbacks=True).payload, "46")
self.assertTrue(callback_called[0])
def test_discard_all_filterfunc_not_supported(self, n="xjf21j21"):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertRaises(NotImplementedError, c.discard_all,
filterfunc=lambda x: x)
c.close()
def test_wait(self, n="test_wait"):
class C(compat.Consumer):
def iterconsume(self, limit=None):
for i in range(limit):
yield i
c = C(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertEqual(c.wait(10), range(10))
c.close()
def test_iterqueue(self, n="test_iterqueue"):
i = [0]
class C(compat.Consumer):
def fetch(self, limit=None):
z = i[0]
i[0] += 1
return z
c = C(self.connection, queue=n, exchange=n,
routing_key="rkey")
self.assertEqual(list(c.iterqueue(limit=10)), range(10))
c.close()
class test_ConsumerSet(unittest.TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
def test_constructor(self, prefix="0daf8h21"):
dcon = {"%s.xyx" % prefix: {"exchange": "%s.xyx" % prefix,
"routing_key": "xyx"},
"%s.xyz" % prefix: {"exchange": "%s.xyz" % prefix,
"routing_key": "xyz"}}
consumers = [compat.Consumer(self.connection, queue=prefix + str(i),
exchange=prefix + str(i))
for i in range(3)]
c = compat.ConsumerSet(self.connection, consumers=consumers)
c2 = compat.ConsumerSet(self.connection, from_dict=dcon)
self.assertEqual(len(c.queues), 3)
self.assertEqual(len(c2.queues), 2)
c.add_consumer(compat.Consumer(self.connection,
queue=prefix + "xaxxxa",
exchange=prefix + "xaxxxa"))
self.assertEqual(len(c.queues), 4)
for cq in c.queues:
self.assertIs(cq.channel, c.channel)
c2.add_consumer_from_dict({"%s.xxx" % prefix: {
"exchange": "%s.xxx" % prefix,
"routing_key": "xxx"}})
self.assertEqual(len(c2.queues), 3)
for c2q in c2.queues:
self.assertIs(c2q.channel, c2.channel)
c.discard_all()
self.assertEqual(c.channel.called.count("queue_purge"), 4)
c.consume()
c.close()
c2.close()
self.assertIn("basic_cancel", c.channel)
self.assertIn("close", c.channel)
self.assertIn("close", c2.channel)
| |
# Copyright (C) 2012-2013 Claudio Guarnieri.
# Copyright (C) 2014-2018 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import logging
import os
import re
import time
import xml.etree.ElementTree as ET
from cuckoo.common.config import config
from cuckoo.common.exceptions import CuckooCriticalError
from cuckoo.common.exceptions import CuckooMachineError
from cuckoo.common.exceptions import CuckooOperationalError
from cuckoo.common.exceptions import CuckooReportError
from cuckoo.common.exceptions import CuckooDependencyError
from cuckoo.common.files import Folders
from cuckoo.common.objects import Dictionary
from cuckoo.core.database import Database
from cuckoo.misc import cwd, make_list
try:
import libvirt
HAVE_LIBVIRT = True
except ImportError:
HAVE_LIBVIRT = False
log = logging.getLogger(__name__)
class Configuration(object):
skip = (
"family", "extra",
)
# Single entry values.
keywords1 = (
"type", "version", "magic", "campaign",
)
# Multiple entry values.
keywords2 = (
"cnc", "url", "mutex", "user_agent", "referrer",
)
# Encryption key values.
keywords3 = (
"des3key", "rc4key", "xorkey", "pubkey", "privkey", "iv",
)
# Normalize keys.
mapping = {
"cncs": "cnc",
"urls": "url",
"user-agent": "user_agent",
}
def __init__(self):
self.entries = []
self.order = []
self.families = {}
def add(self, entry):
self.entries.append(entry)
if entry["family"] not in self.families:
self.families[entry["family"]] = {
"family": entry["family"],
}
self.order.append(entry["family"])
family = self.families[entry["family"]]
for key, value in entry.items():
if key in self.skip or not value:
continue
key = self.mapping.get(key, key)
if key in self.keywords1:
if family.get(key) and family[key] != value:
log.error(
"Duplicate value for %s => %r vs %r",
key, family[key], value
)
continue
family[key] = value
elif key in self.keywords2:
if key not in family:
family[key] = []
for value in make_list(value):
if value and value not in family[key]:
family[key].append(value)
elif key in self.keywords3:
if "key" not in family:
family["key"] = {}
if key not in family["key"]:
family["key"][key] = []
if value not in family["key"][key]:
family["key"][key].append(value)
elif key not in family.get("extra", {}):
if "extra" not in family:
family["extra"] = {}
family["extra"][key] = [value]
elif value not in family["extra"][key]:
family["extra"][key].append(value)
def get(self, family, *keys):
r = self.families.get(family, {})
for key in keys:
r = r.get(key, {})
return r or None
def family(self, name):
return self.families.get(name) or {}
def results(self):
ret = []
for family in self.order:
ret.append(self.families[family])
return ret
class Auxiliary(object):
"""Base abstract class for auxiliary modules."""
def __init__(self):
self.task = None
self.machine = None
self.guest_manager = None
self.options = None
@classmethod
def init_once(cls):
pass
def set_task(self, task):
self.task = task
def set_machine(self, machine):
self.machine = machine
def set_guest_manager(self, guest_manager):
self.guest_manager = guest_manager
def set_options(self, options):
self.options = Dictionary(options)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
class Machinery(object):
"""Base abstract class for machinery modules."""
# Default label used in machinery configuration file to supply virtual
# machine name/label/vmx path. Override it if you dubbed it in another
# way.
LABEL = "label"
def __init__(self):
self.options = None
self.db = Database()
self.remote_control = False
# Machine table is cleaned to be filled from configuration file
# at each start.
self.db.clean_machines()
@classmethod
def init_once(cls):
pass
def pcap_path(self, task_id):
"""Returns the .pcap path for this task id."""
return cwd("storage", "analyses", "%s" % task_id, "dump.pcap")
def set_options(self, options):
"""Set machine manager options.
@param options: machine manager options dict.
"""
self.options = options
def initialize(self, module_name):
"""Read, load, and verify machines configuration.
@param module_name: module name.
"""
# Load.
self._initialize(module_name)
# Run initialization checks.
self._initialize_check()
def _initialize(self, module_name):
"""Read configuration.
@param module_name: module name.
"""
machinery = self.options.get(module_name)
for vmname in machinery["machines"]:
options = self.options.get(vmname)
# If configured, use specific network interface for this
# machine, else use the default value.
if options.get("interface"):
interface = options["interface"]
else:
interface = machinery.get("interface")
if options.get("resultserver_ip"):
ip = options["resultserver_ip"]
else:
ip = config("cuckoo:resultserver:ip")
if options.get("resultserver_port"):
port = options["resultserver_port"]
else:
# The ResultServer port might have been dynamically changed,
# get it from the ResultServer singleton. Also avoid import
# recursion issues by importing ResultServer here.
from cuckoo.core.resultserver import ResultServer
port = ResultServer().port
self.db.add_machine(
name=vmname,
label=options[self.LABEL],
ip=options.ip,
platform=options.platform,
options=options.get("options", ""),
tags=options.tags,
interface=interface,
snapshot=options.snapshot,
resultserver_ip=ip,
resultserver_port=port
)
def _initialize_check(self):
"""Runs checks against virtualization software when a machine manager
is initialized.
@note: in machine manager modules you may override or superclass
his method.
@raise CuckooMachineError: if a misconfiguration or a unkown vm state
is found.
"""
try:
configured_vms = self._list()
except NotImplementedError:
return
for machine in self.machines():
# If this machine is already in the "correct" state, then we
# go on to the next machine.
if machine.label in configured_vms and \
self._status(machine.label) in [self.POWEROFF, self.ABORTED]:
continue
# This machine is currently not in its correct state, we're going
# to try to shut it down. If that works, then the machine is fine.
try:
self.stop(machine.label)
except CuckooMachineError as e:
raise CuckooCriticalError(
"Please update your configuration. Unable to shut '%s' "
"down or find the machine in its proper state: %s" %
(machine.label, e)
)
if not config("cuckoo:timeouts:vm_state"):
raise CuckooCriticalError(
"Virtual machine state change timeout has not been set "
"properly, please update it to be non-null."
)
def machines(self):
"""List virtual machines.
@return: virtual machines list
"""
return self.db.list_machines()
def availables(self):
"""How many machines are free.
@return: free machines count.
"""
return self.db.count_machines_available()
def acquire(self, machine_id=None, platform=None, tags=None):
"""Acquire a machine to start analysis.
@param machine_id: machine ID.
@param platform: machine platform.
@param tags: machine tags
@return: machine or None.
"""
if machine_id:
return self.db.lock_machine(label=machine_id)
elif platform:
return self.db.lock_machine(platform=platform, tags=tags)
else:
return self.db.lock_machine(tags=tags)
def release(self, label=None):
"""Release a machine.
@param label: machine name.
"""
self.db.unlock_machine(label)
def running(self):
"""Returns running virtual machines.
@return: running virtual machines list.
"""
return self.db.list_machines(locked=True)
def shutdown(self):
"""Shutdown the machine manager. Kills all alive machines.
@raise CuckooMachineError: if unable to stop machine.
"""
if len(self.running()) > 0:
log.info("Still %s guests alive. Shutting down...",
len(self.running()))
for machine in self.running():
try:
self.stop(machine.label)
except CuckooMachineError as e:
log.warning("Unable to shutdown machine %s, please check "
"manually. Error: %s", machine.label, e)
def set_status(self, label, status):
"""Set status for a virtual machine.
@param label: virtual machine label
@param status: new virtual machine status
"""
self.db.set_machine_status(label, status)
def start(self, label, task):
"""Start a machine.
@param label: machine name.
@param task: task object.
@raise NotImplementedError: this method is abstract.
"""
raise NotImplementedError
def stop(self, label=None):
"""Stop a machine.
@param label: machine name.
@raise NotImplementedError: this method is abstract.
"""
raise NotImplementedError
def _list(self):
"""Lists virtual machines configured.
@raise NotImplementedError: this method is abstract.
"""
raise NotImplementedError
def dump_memory(self, label, path):
"""Takes a memory dump of a machine.
@param path: path to where to store the memory dump.
"""
raise NotImplementedError
def enable_remote_control(self, label):
"""Enable remote control interface (RDP/VNC/SSH).
@param label: machine name.
@return: None
"""
raise NotImplementedError
def disable_remote_control(self, label):
"""Disable remote control interface (RDP/VNC/SSH).
@param label: machine name.
@return: None
"""
raise NotImplementedError
def get_remote_control_params(self, label):
"""Return connection details for remote control.
@param label: machine name.
@return: dict with keys: protocol, host, port
"""
raise NotImplementedError
def _wait_status(self, label, *states):
"""Waits for a vm status.
@param label: virtual machine name.
@param state: virtual machine status, accepts multiple states as list.
@raise CuckooMachineError: if default waiting timeout expire.
"""
# This block was originally suggested by Loic Jaquemet.
waitme = 0
try:
current = self._status(label)
except NameError:
return
while current not in states:
log.debug("Waiting %i cuckooseconds for machine %s to switch "
"to status %s", waitme, label, states)
if waitme > config("cuckoo:timeouts:vm_state"):
raise CuckooMachineError(
"Timeout hit while for machine %s to change status" % label
)
time.sleep(1)
waitme += 1
current = self._status(label)
class LibVirtMachinery(Machinery):
"""Libvirt based machine manager.
If you want to write a custom module for a virtualization software
supported by libvirt you have just to inherit this machine manager and
change the connection string.
"""
# VM states.
RUNNING = "running"
PAUSED = "paused"
POWEROFF = "poweroff"
ERROR = "machete"
ABORTED = "abort"
def __init__(self):
if not HAVE_LIBVIRT:
raise CuckooDependencyError(
"The libvirt package has not been installed "
"(`pip install libvirt-python`)"
)
super(LibVirtMachinery, self).__init__()
def initialize(self, module):
"""Initialize machine manager module. Override default to set proper
connection string.
@param module: machine manager module
"""
super(LibVirtMachinery, self).initialize(module)
def _initialize_check(self):
"""Runs all checks when a machine manager is initialized.
@raise CuckooMachineError: if libvirt version is not supported.
"""
# Version checks.
if not self._version_check():
raise CuckooMachineError("Libvirt version is not supported, "
"please get an updated version")
# Preload VMs
self.vms = self._fetch_machines()
# Base checks. Also attempts to shutdown any machines which are
# currently still active.
super(LibVirtMachinery, self)._initialize_check()
def start(self, label, task):
"""Starts a virtual machine.
@param label: virtual machine name.
@param task: task object.
@raise CuckooMachineError: if unable to start virtual machine.
"""
log.debug("Starting machine %s", label)
if self._status(label) != self.POWEROFF:
msg = "Trying to start a virtual machine that has not " \
"been turned off {0}".format(label)
raise CuckooMachineError(msg)
conn = self._connect()
vm_info = self.db.view_machine_by_label(label)
snapshot_list = self.vms[label].snapshotListNames(flags=0)
# If a snapshot is configured try to use it.
if vm_info.snapshot and vm_info.snapshot in snapshot_list:
# Revert to desired snapshot, if it exists.
log.debug("Using snapshot {0} for virtual machine "
"{1}".format(vm_info.snapshot, label))
try:
vm = self.vms[label]
snapshot = vm.snapshotLookupByName(vm_info.snapshot, flags=0)
self.vms[label].revertToSnapshot(snapshot, flags=0)
except libvirt.libvirtError:
msg = "Unable to restore snapshot {0} on " \
"virtual machine {1}".format(vm_info.snapshot, label)
raise CuckooMachineError(msg)
finally:
self._disconnect(conn)
elif self._get_snapshot(label):
snapshot = self._get_snapshot(label)
log.debug("Using snapshot {0} for virtual machine "
"{1}".format(snapshot.getName(), label))
try:
self.vms[label].revertToSnapshot(snapshot, flags=0)
except libvirt.libvirtError:
raise CuckooMachineError("Unable to restore snapshot on "
"virtual machine {0}".format(label))
finally:
self._disconnect(conn)
else:
self._disconnect(conn)
raise CuckooMachineError("No snapshot found for virtual machine "
"{0}".format(label))
# Check state.
self._wait_status(label, self.RUNNING)
def stop(self, label):
"""Stops a virtual machine. Kill them all.
@param label: virtual machine name.
@raise CuckooMachineError: if unable to stop virtual machine.
"""
log.debug("Stopping machine %s", label)
if self._status(label) == self.POWEROFF:
raise CuckooMachineError("Trying to stop an already stopped "
"machine {0}".format(label))
# Force virtual machine shutdown.
conn = self._connect()
try:
if not self.vms[label].isActive():
log.debug("Trying to stop an already stopped machine %s. "
"Skip", label)
else:
self.vms[label].destroy() # Machete's way!
except libvirt.libvirtError as e:
raise CuckooMachineError("Error stopping virtual machine "
"{0}: {1}".format(label, e))
finally:
self._disconnect(conn)
# Check state.
self._wait_status(label, self.POWEROFF)
def shutdown(self):
"""Override shutdown to free libvirt handlers - they print errors."""
super(LibVirtMachinery, self).shutdown()
# Free handlers.
self.vms = None
def dump_memory(self, label, path):
"""Takes a memory dump.
@param path: path to where to store the memory dump.
"""
log.debug("Dumping memory for machine %s", label)
conn = self._connect()
try:
# Resolve permission issue as libvirt creates the file as
# root/root in mode 0600, preventing us from reading it. This
# supposedly still doesn't allow us to remove it, though..
open(path, "wb").close()
self.vms[label].coreDump(path, flags=libvirt.VIR_DUMP_MEMORY_ONLY)
except libvirt.libvirtError as e:
raise CuckooMachineError("Error dumping memory virtual machine "
"{0}: {1}".format(label, e))
finally:
self._disconnect(conn)
def _status(self, label):
"""Gets current status of a vm.
@param label: virtual machine name.
@return: status string.
"""
log.debug("Getting status for %s", label)
# Stetes mapping of python-libvirt.
# virDomainState
# VIR_DOMAIN_NOSTATE = 0
# VIR_DOMAIN_RUNNING = 1
# VIR_DOMAIN_BLOCKED = 2
# VIR_DOMAIN_PAUSED = 3
# VIR_DOMAIN_SHUTDOWN = 4
# VIR_DOMAIN_SHUTOFF = 5
# VIR_DOMAIN_CRASHED = 6
# VIR_DOMAIN_PMSUSPENDED = 7
conn = self._connect()
try:
state = self.vms[label].state(flags=0)
except libvirt.libvirtError as e:
raise CuckooMachineError("Error getting status for virtual "
"machine {0}: {1}".format(label, e))
finally:
self._disconnect(conn)
if state:
if state[0] == 1:
status = self.RUNNING
elif state[0] == 3:
status = self.PAUSED
elif state[0] == 4 or state[0] == 5:
status = self.POWEROFF
else:
status = self.ERROR
# Report back status.
if status:
self.set_status(label, status)
return status
else:
raise CuckooMachineError("Unable to get status for "
"{0}".format(label))
def _connect(self):
"""Connects to libvirt subsystem.
@raise CuckooMachineError: when unable to connect to libvirt.
"""
# Check if a connection string is available.
if not self.dsn:
raise CuckooMachineError("You must provide a proper "
"connection string")
try:
return libvirt.open(self.dsn)
except libvirt.libvirtError:
raise CuckooMachineError("Cannot connect to libvirt")
def _disconnect(self, conn):
"""Disconnects to libvirt subsystem.
@raise CuckooMachineError: if cannot disconnect from libvirt.
"""
try:
conn.close()
except libvirt.libvirtError:
raise CuckooMachineError("Cannot disconnect from libvirt")
def _fetch_machines(self):
"""Fetch machines handlers.
@return: dict with machine label as key and handle as value.
"""
vms = {}
for vm in self.machines():
vms[vm.label] = self._lookup(vm.label)
return vms
def _lookup(self, label):
"""Search for a virtual machine.
@param conn: libvirt connection handle.
@param label: virtual machine name.
@raise CuckooMachineError: if virtual machine is not found.
"""
conn = self._connect()
try:
vm = conn.lookupByName(label)
except libvirt.libvirtError:
raise CuckooMachineError("Cannot find machine "
"{0}".format(label))
finally:
self._disconnect(conn)
return vm
def _list(self):
"""List available virtual machines.
@raise CuckooMachineError: if unable to list virtual machines.
"""
conn = self._connect()
try:
names = conn.listDefinedDomains()
except libvirt.libvirtError:
raise CuckooMachineError("Cannot list domains")
finally:
self._disconnect(conn)
return names
def _version_check(self):
"""Check if libvirt release supports snapshots.
@return: True or false.
"""
if libvirt.getVersion() >= 8000:
return True
else:
return False
def _get_snapshot(self, label):
"""Get current snapshot for virtual machine
@param label: virtual machine name
@return None or current snapshot
@raise CuckooMachineError: if cannot find current snapshot or
when there are too many snapshots available
"""
def _extract_creation_time(node):
"""Extracts creation time from a KVM vm config file.
@param node: config file node
@return: extracted creation time
"""
xml = ET.fromstring(node.getXMLDesc(flags=0))
return xml.findtext("./creationTime")
snapshot = None
conn = self._connect()
try:
vm = self.vms[label]
# Try to get the currrent snapshot, otherwise fallback on the latest
# from config file.
if vm.hasCurrentSnapshot(flags=0):
snapshot = vm.snapshotCurrent(flags=0)
else:
log.debug("No current snapshot, using latest snapshot")
# No current snapshot, try to get the last one from config file.
snapshot = sorted(vm.listAllSnapshots(flags=0),
key=_extract_creation_time,
reverse=True)[0]
except libvirt.libvirtError:
raise CuckooMachineError("Unable to get snapshot for "
"virtual machine {0}".format(label))
finally:
self._disconnect(conn)
return snapshot
def enable_remote_control(self, label):
# TODO: we can't dynamically enable/disable this right now
pass
def disable_remote_control(self, label):
pass
def get_remote_control_params(self, label):
conn = self._connect()
try:
vm = conn.lookupByName(label)
if not vm:
log.warning("No such VM: %s", label)
return {}
port = 0
desc = ET.fromstring(vm.XMLDesc())
for elem in desc.findall("./devices/graphics"):
if elem.attrib.get("type") == "vnc":
# Future work: passwd, listen, socket (addr:port)
port = elem.attrib.get("port")
if port:
port = int(port)
break
finally:
self._disconnect(conn)
if port <= 0:
log.error("VM %s does not have a valid VNC port", label)
return {}
# TODO The Cuckoo Web Interface may be running at a different host
# than the actual Cuckoo daemon (and as such, the VMs).
return {
"protocol": "vnc",
"host": "127.0.0.1",
"port": port,
}
class Processing(object):
"""Base abstract class for processing module."""
order = 1
enabled = True
def __init__(self):
self.analysis_path = ""
self.baseline_path = ""
self.logs_path = ""
self.task = None
self.machine = None
self.options = None
self.results = {}
@classmethod
def init_once(cls):
pass
def set_options(self, options):
"""Set processing options.
@param options: processing options dict.
"""
self.options = Dictionary(options)
def set_task(self, task):
"""Add task information.
@param task: task dictionary.
"""
self.task = task
def set_machine(self, machine):
"""Add machine information."""
self.machine = machine
def set_baseline(self, baseline_path):
"""Set the path to the baseline directory."""
self.baseline_path = baseline_path
def set_path(self, analysis_path):
"""Set paths.
@param analysis_path: analysis folder path.
"""
self.analysis_path = analysis_path
self.log_path = os.path.join(self.analysis_path, "analysis.log")
self.cuckoolog_path = os.path.join(self.analysis_path, "cuckoo.log")
self.file_path = os.path.realpath(os.path.join(self.analysis_path,
"binary"))
self.dropped_path = os.path.join(self.analysis_path, "files")
self.dropped_meta_path = os.path.join(self.analysis_path, "files.json")
self.extracted_path = os.path.join(self.analysis_path, "extracted")
self.package_files = os.path.join(self.analysis_path, "package_files")
self.buffer_path = os.path.join(self.analysis_path, "buffer")
self.logs_path = os.path.join(self.analysis_path, "logs")
self.shots_path = os.path.join(self.analysis_path, "shots")
self.pcap_path = os.path.join(self.analysis_path, "dump.pcap")
self.pmemory_path = os.path.join(self.analysis_path, "memory")
self.memory_path = os.path.join(self.analysis_path, "memory.dmp")
self.mitmout_path = os.path.join(self.analysis_path, "mitm.log")
self.mitmerr_path = os.path.join(self.analysis_path, "mitm.err")
self.tlsmaster_path = os.path.join(self.analysis_path, "tlsmaster.txt")
self.suricata_path = os.path.join(self.analysis_path, "suricata")
self.network_path = os.path.join(self.analysis_path, "network")
self.taskinfo_path = os.path.join(self.analysis_path, "task.json")
def set_results(self, results):
"""Set the results - the fat dictionary."""
self.results = results
def run(self):
"""Start processing.
@raise NotImplementedError: this method is abstract.
"""
raise NotImplementedError
class Signature(object):
"""Base class for Cuckoo signatures."""
name = ""
description = ""
severity = 1
order = 1
categories = []
families = []
authors = []
references = []
platform = None
alert = False
enabled = True
minimum = None
maximum = None
# Maximum amount of marks to record.
markcount = 50
# Basic filters to reduce the amount of events sent to this signature.
filter_apinames = []
filter_categories = []
# If no on_call() handler is present and this field has been set, then
# dispatch on a per-API basis to the accompanying API. That is, rather
# than calling the generic on_call(), call, e.g., on_call_CreateFile().
on_call_dispatch = False
def __init__(self, caller):
"""
@param caller: calling object. Stores results in caller.results
"""
self.marks = []
self.matched = False
self._caller = caller
# These are set by the caller, they represent the process identifier
# and call index respectively.
self.pid = None
self.cid = None
self.call = None
@classmethod
def init_once(cls):
pass
def _check_value(self, pattern, subject, regex=False, all=False):
"""Checks a pattern against a given subject.
@param pattern: string or expression to check for.
@param subject: target of the check.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@return: boolean with the result of the check.
"""
ret = set()
if regex:
exp = re.compile(pattern, re.IGNORECASE)
if isinstance(subject, list):
for item in subject:
if exp.match(item):
ret.add(item)
else:
if exp.match(subject):
ret.add(subject)
else:
if isinstance(subject, list):
for item in subject:
if item.lower() == pattern.lower():
ret.add(item)
else:
if subject == pattern:
ret.add(subject)
# Return all elements.
if all:
return list(ret)
# Return only the first element, if available. Otherwise return None.
elif ret:
return ret.pop()
def get_results(self, key=None, default=None):
if key:
return self._caller.results.get(key, default)
return self._caller.results
def get_processes(self, name=None):
"""Get a list of processes.
@param name: If set only return processes with that name.
@return: List of processes or empty list
"""
for item in self.get_results("behavior", {}).get("processes", []):
if name is None or item["process_name"] == name:
yield item
def get_process_by_pid(self, pid=None):
"""Get a process by its process identifier.
@param pid: pid to search for.
@return: process.
"""
for item in self.get_results("behavior", {}).get("processes", []):
if item["pid"] == pid:
return item
def get_summary(self, key=None, default=[]):
"""Get one or all values related to the global summary."""
summary = self.get_results("behavior", {}).get("summary", {})
return summary.get(key, default) if key else summary
def get_summary_generic(self, pid, actions):
"""Get generic info from summary.
@param pid: pid of the process. None for all
@param actions: A list of actions to get
"""
ret = []
for process in self.get_results("behavior", {}).get("generic", []):
if pid is not None and process["pid"] != pid:
continue
for action in actions:
if action in process["summary"]:
ret += process["summary"][action]
return ret
def get_files(self, pid=None, actions=None):
"""Get files read, queried, or written to optionally by a
specific process.
@param pid: the process or None for all
@param actions: actions to search for. None is all
@return: yields files
"""
if actions is None:
actions = [
"file_opened", "file_written",
"file_read", "file_deleted",
"file_exists", "file_failed",
]
return self.get_summary_generic(pid, actions)
def get_dll_loaded(self, pid=None):
"""Get DLLs loaded by a specific process.
@param pid: the process or None for all
@return: yields DLLs loaded
"""
return self.get_summary_generic(pid, ["dll_loaded"])
def get_keys(self, pid=None, actions=None):
"""Get registry keys.
@param pid: The pid to look in or None for all.
@param actions: the actions as a list.
@return: yields registry keys
"""
if actions is None:
actions = [
"regkey_opened", "regkey_written",
"regkey_read", "regkey_deleted",
]
return self.get_summary_generic(pid, actions)
def check_file(self, pattern, regex=False, actions=None, pid=None,
all=False):
"""Checks for a file being opened.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@param actions: a list of key actions to use.
@param pid: The process id to check. If it is set to None, all
processes will be checked.
@return: boolean with the result of the check.
"""
if actions is None:
actions = [
"file_opened", "file_written",
"file_read", "file_deleted",
"file_exists", "file_failed",
]
return self._check_value(pattern=pattern,
subject=self.get_files(pid, actions),
regex=regex,
all=all)
def check_dll_loaded(self, pattern, regex=False, actions=None, pid=None,
all=False):
"""Checks for DLLs being loaded.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@param pid: The process id to check. If it is set to None, all
processes will be checked.
@return: boolean with the result of the check.
"""
return self._check_value(pattern=pattern,
subject=self.get_dll_loaded(pid),
regex=regex,
all=all)
def check_command_line(self, pattern, regex=False, all=False):
"""Checks for a command line being opened.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@return: boolean with the result of the check.
"""
return self._check_value(pattern=pattern,
subject=self.get_summary("command_line"),
regex=regex,
all=all)
def check_key(self, pattern, regex=False, actions=None, pid=None,
all=False):
"""Checks for a registry key being accessed.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@param actions: a list of key actions to use.
@param pid: The process id to check. If it is set to None, all
processes will be checked.
@return: boolean with the result of the check.
"""
if actions is None:
actions = [
"regkey_written", "regkey_opened",
"regkey_read", "regkey_deleted",
]
return self._check_value(pattern=pattern,
subject=self.get_keys(pid, actions),
regex=regex,
all=all)
def get_mutexes(self, pid=None):
"""
@param pid: Pid to filter for
@return:List of mutexes
"""
return self.get_summary_generic(pid, ["mutex"])
def check_mutex(self, pattern, regex=False, all=False):
"""Checks for a mutex being opened.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@return: boolean with the result of the check.
"""
return self._check_value(pattern=pattern,
subject=self.get_mutexes(),
regex=regex,
all=all)
def get_command_lines(self):
"""Retrieves all command lines used."""
return self.get_summary("command_line")
def get_wmi_queries(self):
"""Retrieves all executed WMI queries."""
return self.get_summary("wmi_query")
def get_net_generic(self, subtype):
"""Generic getting network data.
@param subtype: subtype string to search for.
"""
return self.get_results("network", {}).get(subtype, [])
def get_net_hosts(self):
"""Returns a list of all hosts."""
return self.get_net_generic("hosts")
def get_net_domains(self):
"""Returns a list of all domains."""
return self.get_net_generic("domains")
def get_net_http(self):
"""Returns a list of all http data."""
return self.get_net_generic("http")
def get_net_http_ex(self):
"""Returns a list of all http data."""
return \
self.get_net_generic("http_ex") + self.get_net_generic("https_ex")
def get_net_udp(self):
"""Returns a list of all udp data."""
return self.get_net_generic("udp")
def get_net_icmp(self):
"""Returns a list of all icmp data."""
return self.get_net_generic("icmp")
def get_net_irc(self):
"""Returns a list of all irc data."""
return self.get_net_generic("irc")
def get_net_smtp(self):
"""Returns a list of all smtp data."""
return self.get_net_generic("smtp")
def get_net_smtp_ex(self):
""""Returns a list of all smtp data"""
return self.get_net_generic("smtp_ex")
def get_virustotal(self):
"""Returns the information retrieved from virustotal."""
return self.get_results("virustotal", {})
def get_volatility(self, module=None):
"""Returns the data that belongs to the given module."""
volatility = self.get_results("memory", {})
return volatility if module is None else volatility.get(module, {})
def get_apkinfo(self, section=None, default={}):
"""Returns the apkinfo results for this analysis."""
apkinfo = self.get_results("apkinfo", {})
return apkinfo if section is None else apkinfo.get(section, default)
def get_droidmon(self, section=None, default={}):
"""Returns the droidmon results for this analysis."""
droidmon = self.get_results("droidmon", {})
return droidmon if section is None else droidmon.get(section, default)
def get_googleplay(self, section=None, default={}):
"""Returns the Google Play results for this analysis."""
googleplay = self.get_results("googleplay", {})
return googleplay if section is None else googleplay.get(section, default)
def check_ip(self, pattern, regex=False, all=False):
"""Checks for an IP address being contacted.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@return: boolean with the result of the check.
"""
return self._check_value(pattern=pattern,
subject=self.get_net_hosts(),
regex=regex,
all=all)
def check_domain(self, pattern, regex=False, all=False):
"""Checks for a domain being contacted.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@return: boolean with the result of the check.
"""
domains = set()
for item in self.get_net_domains():
domains.add(item["domain"])
return self._check_value(pattern=pattern,
subject=list(domains),
regex=regex,
all=all)
def check_url(self, pattern, regex=False, all=False):
"""Checks for a URL being contacted.
@param pattern: string or expression to check for.
@param regex: boolean representing if the pattern is a regular
expression or not and therefore should be compiled.
@return: boolean with the result of the check.
"""
urls = set()
for item in self.get_net_http():
urls.add(item["uri"])
return self._check_value(pattern=pattern,
subject=list(urls),
regex=regex,
all=all)
def check_suricata_alerts(self, pattern):
"""Check for pattern in Suricata alert signature
@param pattern: string or expression to check for.
@return: True/False
"""
for alert in self.get_results("suricata", {}).get("alerts", []):
if re.findall(pattern, alert.get("signature", ""), re.I):
return True
return False
def init(self):
"""Allow signatures to initialize themselves."""
def mark_call(self, *args, **kwargs):
"""Mark the current call as explanation as to why this signature
matched."""
mark = {
"type": "call",
"pid": self.pid,
"cid": self.cid,
"call": self.call,
}
if args or kwargs:
log.warning(
"You have provided extra arguments to the mark_call() method "
"which no longer supports doing so. Please report explicit "
"IOCs through mark_ioc()."
)
self.marks.append(mark)
def mark_ioc(self, category, ioc, description=None):
"""Mark an IOC as explanation as to why the current signature
matched."""
mark = {
"type": "ioc",
"category": category,
"ioc": ioc,
"description": description,
}
# Prevent duplicates.
if mark not in self.marks:
self.marks.append(mark)
def mark_vol(self, plugin, **kwargs):
"""Mark output of a Volatility plugin as explanation as to why the
current signature matched."""
mark = {
"type": "volatility",
"plugin": plugin,
}
mark.update(kwargs)
self.marks.append(mark)
def mark_config(self, config):
"""Mark configuration from this malware family."""
if not isinstance(config, dict) or "family" not in config:
raise CuckooCriticalError("Invalid call to mark_config().")
self.marks.append({
"type": "config",
"config": config,
})
def mark(self, **kwargs):
"""Mark arbitrary data."""
mark = {
"type": "generic",
}
mark.update(kwargs)
self.marks.append(mark)
def has_marks(self, count=None):
"""Returns true if this signature has one or more marks."""
if count is not None:
return len(self.marks) >= count
return not not self.marks
def on_call(self, call, process):
"""Notify signature about API call. Return value determines
if this signature is done or could still match.
Only called if signature is "active".
@param call: logged API call.
@param process: proc object.
"""
# Dispatch this call to a per-API specific handler.
if self.on_call_dispatch:
return getattr(self, "on_call_%s" % call["api"])(call, process)
raise NotImplementedError
def on_signature(self, signature):
"""Event yielded when another signatures has matched. Some signatures
only take effect when one or more other signatures have matched as
well.
@param signature: The signature that just matched
"""
def on_process(self, process):
"""Called on process change.
Can be used for cleanup of flags, re-activation of the signature, etc.
@param process: dictionary describing this process
"""
def on_yara(self, category, filepath, match):
"""Called on YARA match.
@param category: yara match category
@param filepath: path to the file that matched
@param match: yara match information
The Yara match category can be one of the following.
extracted: an extracted PE image from a process memory dump
procmem: a process memory dump
dropped: a dropped file
"""
def on_extract(self, match):
"""Called on an Extracted match.
@param match: extracted match information
"""
def on_complete(self):
"""Signature is notified when all API calls have been processed."""
def results(self):
"""Turn this signature into actionable results."""
return dict(name=self.name,
description=self.description,
severity=self.severity,
families=self.families,
references=self.references,
marks=self.marks[:self.markcount],
markcount=len(self.marks))
@property
def cfgextr(self):
return self._caller.c
class Report(object):
"""Base abstract class for reporting module."""
order = 1
def __init__(self):
self.analysis_path = ""
self.reports_path = ""
self.task = None
self.options = None
@classmethod
def init_once(cls):
pass
def _get_analysis_path(self, subpath):
return os.path.join(self.analysis_path, subpath)
def set_path(self, analysis_path):
"""Set analysis folder path.
@param analysis_path: analysis folder path.
"""
self.analysis_path = analysis_path
self.file_path = os.path.realpath(self._get_analysis_path("binary"))
self.reports_path = self._get_analysis_path("reports")
self.shots_path = self._get_analysis_path("shots")
self.pcap_path = self._get_analysis_path("dump.pcap")
try:
Folders.create(self.reports_path)
except CuckooOperationalError as e:
raise CuckooReportError(e)
def set_options(self, options):
"""Set report options.
@param options: report options dict.
"""
self.options = Dictionary(options)
def set_task(self, task):
"""Add task information.
@param task: task dictionary.
"""
self.task = task
def run(self, results):
"""Start report processing.
@raise NotImplementedError: this method is abstract.
"""
raise NotImplementedError
class BehaviorHandler(object):
"""Base class for behavior handlers inside of BehaviorAnalysis."""
key = "undefined"
# Behavior event types this handler is interested in.
event_types = []
def __init__(self, behavior_analysis):
self.analysis = behavior_analysis
def handles_path(self, logpath):
"""Needs to return True for the log files this handler wants to
process."""
return False
def parse(self, logpath):
"""Called after handles_path succeeded, should generate behavior
events."""
raise NotImplementedError
def handle_event(self, event):
"""Handle an event that gets passed down the stack."""
raise NotImplementedError
def run(self):
"""Return the handler specific structure, gets placed into
behavior[self.key]."""
raise NotImplementedError
class ProtocolHandler(object):
"""Abstract class for protocol handlers coming out of the analysis."""
def __init__(self, handler, version=None):
self.handler = handler
self.version = version
def init(self):
pass
def close(self):
pass
class Extractor(object):
"""One piece in a series of recursive extractors & unpackers."""
yara_rules = []
# Minimum and maximum supported version in Cuckoo.
minimum = None
maximum = None
@classmethod
def init_once(cls):
pass
def __init__(self, parent):
self.parent = parent
def handle_yara(self, filepath, match):
raise NotImplementedError
def push_command_line(self, cmdline, process=None):
self.parent.push_command_line(cmdline, process)
def push_script(self, process, command):
self.parent.push_script(process, command)
def push_script_recursive(self, command):
self.parent.push_script_recursive(command)
def push_shellcode(self, sc):
self.parent.push_shellcode(sc)
def push_blob(self, blob, category, externals, info=None):
self.parent.push_blob(blob, category, externals, info)
def push_blob_noyara(self, blob, category, info=None):
self.parent.push_blob_noyara(blob, category, info)
def push_config(self, config):
self.parent.push_config(config)
def enhance(self, filepath, key, value):
self.parent.enhance(filepath, key, value)
| |
import os
from ..java import (
Annotation, Class as JavaClass, Code as JavaCode, ConstantElementValue,
Field as JavaField, Method as JavaMethod, RuntimeVisibleAnnotations,
SourceFile, opcodes as JavaOpcodes,
)
from .blocks import Block, IgnoreBlock
from .methods import (
InitMethod, ClosureInitMethod, Method
)
from .types import java, python
from .types.primitives import (
ALOAD_name, ASTORE_name, free_name,
)
# from .debug import DEBUG, DEBUG_value
class Class(Block):
def __init__(
self, module, name,
namespace=None, bases=None, extends=None, implements=None,
public=True, final=False, methods=None, fields=None, init=None,
verbosity=0, include_default_constructor=True):
super().__init__(parent=module, verbosity=verbosity)
self.name = name
if namespace is None:
self.namespace = '%s.%s' % (module.namespace, module.name)
else:
self.namespace = namespace
self.bases = bases if bases else []
self._extends = extends
self.implements = implements if implements else []
self.public = public
self.final = final
self.methods = methods if methods else []
self.fields = fields if fields else {}
self.init = init
self.include_default_constructor = include_default_constructor
# Track constructors when they are added
self.init_method = None
# Mark this class as being a VOC generated class.
self.fields["__VOC__"] = "Lorg/python/Object;"
self.methods.append(self.constructor())
@property
def descriptor(self):
return '/'.join([self.namespace.replace('.', '/'), self.name])
@property
def class_name(self):
return '.'.join(self.namespace.split('.') + [self.name])
@property
def module(self):
return self._parent
def visitor_setup(self):
self.add_opcodes(
# DEBUG("STATIC BLOCK OF " + self.klass.descriptor),
# Force the loading and instantiation of the module
# that contains the class.
JavaOpcodes.LDC_W(self.module.full_name),
JavaOpcodes.ACONST_NULL(),
JavaOpcodes.ACONST_NULL(),
JavaOpcodes.ACONST_NULL(),
JavaOpcodes.ICONST_0(),
JavaOpcodes.INVOKESTATIC(
'org/python/ImportLib',
'__import__',
args=[
'Ljava/lang/String;',
'Ljava/util/Map;',
'Ljava/util/Map;',
'[Ljava/lang/String;',
'I',
],
returns='Lorg/python/types/Module;'
),
JavaOpcodes.POP(),
# Set __base__ on the type
python.Type.for_name(self.descriptor),
python.Type.for_name(self.extends_descriptor),
# DEBUG_value("__base__ for %s should be %s; is" % (self.klass, self.extends_descriptor), dup=True),
JavaOpcodes.PUTFIELD('org/python/types/Type', '__base__', 'Lorg/python/types/Type;'),
# Set __bases__ on the type
python.Type.for_name(self.descriptor),
java.New('org/python/types/Tuple'),
java.List(),
)
if self.extends:
self.add_opcodes(
JavaOpcodes.DUP(),
python.Str(self.extends.replace('.', '/')),
java.List.add(),
)
for base in self.bases:
base_namespace = self.namespace.replace('.', '/') + '/'
self.add_opcodes(
JavaOpcodes.DUP(),
python.Str(base if base.startswith('org/python/') else base_namespace + base),
java.List.add()
)
self.add_opcodes(
java.Init('org/python/types/Tuple', 'Ljava/util/List;'),
JavaOpcodes.PUTFIELD('org/python/types/Type', '__bases__', 'Lorg/python/types/Tuple;'),
# Load the globals module
JavaOpcodes.GETSTATIC('python/sys/__init__', 'modules', 'Lorg/python/types/Dict;'),
python.Str(self.module.full_name),
python.Object.get_item(),
)
self.store_name('__module__')
self.add_opcodes(
python.Str(self.name),
)
self.store_name('__qualname__')
# self.add_opcodes(
# DEBUG("STATIC BLOCK OF " + self.klass.descriptor + " DONE"),
# )
def store_name(self, name):
self.add_opcodes(
ASTORE_name('#value'),
python.Type.for_name(self.descriptor),
ALOAD_name('#value'),
python.Object.set_attr(name),
free_name('#value')
)
def store_dynamic(self):
self.add_opcodes(
ASTORE_name('#value'),
python.Type.for_name(self.descriptor),
JavaOpcodes.GETFIELD('org/python/types/Type', '__dict__', 'Ljava/util/Map;'),
ALOAD_name('#value'),
java.Map.putAll(),
free_name('#value')
)
def load_name(self, name):
self.add_opcodes(
python.Type.for_name(self.descriptor),
python.Object.get_attribute(name),
)
def load_globals(self):
self.add_opcodes(
JavaOpcodes.GETSTATIC('python/sys/__init__', 'modules', 'Lorg/python/types/Dict;'),
python.Str(self.module.full_name),
python.Object.get_item(),
JavaOpcodes.CHECKCAST('org/python/types/Module'),
JavaOpcodes.GETFIELD('org/python/types/Module', '__dict__', 'Ljava/util/Map;'),
)
def load_locals(self):
self.load_globals()
def load_vars(self):
self.load_globals()
def delete_name(self, name):
self.add_opcodes(
python.Type.for_name(self.descriptor),
python.Object.del_attr(name),
)
def constructor(self):
# Make sure there is a Java constructor
return InitMethod(self)
def add_function(self, name, code, parameter_signatures, return_signature):
if False: # FIXME code.co_flags & CO_GENERATOR:
raise Exception("Can't handle Generator instance methods (yet)")
else:
method = Method(
self,
name=name,
code=code,
parameters=parameter_signatures,
returns=return_signature,
static=True,
)
# Add the method to the list that need to be
# transpiled into Java methods
self.methods.append(method)
# Add a definition of the callable object
self.add_callable(method)
if method.name == '__init__':
self.init_method = method
return method
def visitor_teardown(self):
self.add_opcodes(
JavaOpcodes.RETURN()
)
@property
def extends(self):
if self._extends:
return self._extends
else:
if 'Exception' in self.bases:
return 'org.python.exceptions.Exception'
else:
return 'org.python.types.Object'
@property
def extends_descriptor(self):
return self.extends.replace('.', '/')
def transpile(self):
classfile = JavaClass(
self.descriptor,
extends=self.extends_descriptor,
implements=self.implements,
public=self.public,
final=self.final
)
classfile.attributes.append(
SourceFile(os.path.basename(self.module.sourcefile))
)
classfile.attributes.append(
RuntimeVisibleAnnotations([
Annotation(
'Lorg/python/Method;',
{
'__doc__': ConstantElementValue("Python Class (insert docs here)")
}
)
])
)
try:
# If we have block content, add a static block to the class
static_init = JavaMethod('<clinit>', '()V', public=False, static=True)
static_init.attributes.append(super().transpile())
classfile.methods.append(static_init)
except IgnoreBlock:
pass
# Add any manually defined fields
classfile.fields.extend([
JavaField(name, descriptor)
for name, descriptor in self.fields.items()
])
# Add any methods
for method in self.methods:
classfile.methods.extend(method.transpile())
# Ensure the class has a class protected, no-args init() so that
# instances can be instantiated.
if self.include_default_constructor:
classfile.methods.append(
JavaMethod(
'<init>',
'()V',
public=False,
static=False,
attributes=[
JavaCode(
max_stack=1,
max_locals=1,
code=[
JavaOpcodes.ALOAD_0(),
JavaOpcodes.INVOKESPECIAL(self.extends_descriptor, '<init>', args=[], returns='V'),
JavaOpcodes.RETURN(),
]
)
]
)
)
return self.namespace, self.name, classfile
class ClosureClass(Class):
def __init__(self, module, name, closure_var_names, verbosity=0):
super().__init__(
module=module,
name=name,
extends='org/python/types/Closure',
implements=['org/python/Callable'],
verbosity=verbosity,
include_default_constructor=False,
)
self.closure_var_names = closure_var_names
def constructor(self):
# Make sure there is a default constructor
return ClosureInitMethod(self)
| |
"""
Convenience methods for executing common Ferret commands.
These method generate appropriate Ferret command strings and then
execute them using the pyferret.run method.
"""
import numbers
import pyferret
def setwindow(num=1, plotasp=None, axisasp=None, color=None, pal=None,
thick=None, logo=None, outline=None):
"""
Assigns the plot window to use for subsequent plotting commands.
Also provides assignment of common window plots.
Note that plotasp and axisasp cannot both be given.
num (int): window number 1-8 to use for plots.
plotasp (float): aspect ratio (Y/X) for the plot window.
If not given, the current ratio is unchanged.
The default ratio on start-up is 0.86
axisasp (float): aspect ratio (Y/X) for the plot axes.
If not given, the current ratio is unchanged.
The default ratio on start-up is 0.75
color (string, tuple of int): background color for the plot;
can be one of the color names 'black', 'blue', 'green',
'lightblue', 'purple', or 'red', or a tuple
of int values in [0,100] giving RGB or RGBA values.
If not given, the current value is unchanged.
The default background color on start-up is opaque white.
pal (string): default color palette to use in plots.
If not given, thr current value is unchanged.
thick (float): line thickness scaling factor for the plot.
If not given, the current scaling factor is unchanged.
The default line thickness scaling factor on start-up is 1.0
logo (boolean): include the Ferret logo in the plot?
If not given, the current value is unchanged.
The default on start-up is to include the logo.
outline (float): if positive, thickness of polygon outlines;
used to fix the 'thin white line' issue in plots.
If not given, the current value is unchanged.
The default on start-up is zero (no outlines drawn).
Raises a ValueError if a problem occurs.
"""
# create and execute the SET WINDOW command
cmdstr = 'SET WINDOW'
if (plotasp is not None) and (axisasp is not None):
raise ValueError('only one of plotasp and axisasp can be given')
if plotasp is not None:
if (not isinstance(plotasp, numbers.Real)) or (plotasp <= 0):
raise ValueError('plotasp, if given, must be a positive number')
cmdstr += '/ASPECT=' + str(plotasp)
if axisasp is not None:
if (not isinstance(axisasp, numbers.Real)) or (axisasp <= 0):
raise ValueError('axisasp, if given, must be a positive number')
cmdstr += '/ASPECT=' + str(axisasp) + ':AXIS'
if thick is not None:
if (not isinstance(thick, numbers.Real)) or (thick <= 0):
raise ValueError('thick, if given, must be a positive number')
cmdstr += '/THICK=' + str(thick)
if outline is not None:
if (not isinstance(outline, numbers.Real)) or (outline < 0):
raise ValueErrror('outline, if given, must be a non-negative number')
cmdstr += '/OUTLINE=' + str(outline)
if color is not None:
if isinstance(color, str):
cmdstr += '/COLOR=' + color
elif isinstance(color, tuple):
if (len(color) < 3) or (len(color) > 4):
raise ValueError('a color tuple must have three or four integer values')
cmdstr += '/COLOR=' + str(color)
else:
raise ValueError('given color %s is not a string or tuple' % str(color))
if (not isinstance(num, numbers.Integral)) or (num <= 0) or (num > 8):
raise ValueError('window number %s is not a integer in [1,8]' % str(num))
cmdstr += ' ' + str(num)
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Problems executing Ferret command %s: %s' % (cmdstr, errmsg))
if pal is not None:
cmdstr = 'PALETTE ' + str(pal)
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Problems executing Ferret command %s: %s' % (cmdstr, errmsg))
# create and execute the mode logo command if logo is given
if logo is not None:
if logo:
cmdstr = 'SET MODE LOGO'
else:
cmdstr = 'CANCEL MODE LOGO'
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Problems executing Ferret command %s: %s' % (cmdstr, errmsg))
def settextstyle(font='', color='', bold = False, italic=False):
"""
Sets the text style for any text in plots generated after this command
using the Ferret SET TEXT command.
font (string): name of the font to use; if empty, 'Arial' is used.
color (string): color name, RGB tuple, or RGBA tuple describing the
color of the text. The R,G,B, and A components are integer
percentages; thus values in [0,100]
bold (bool): use bold font?
italic (bool): use italic font?
"""
# First run CANCEL TEXT to clear any /BOLD and /ITALIC
(errval, errmsg) = pyferret.run('CANCEL TEXT/ALL')
if errval != pyferret.FERR_OK:
raise ValueError('problems resetting text style to default: %s' % errmsg)
# Now run SET TEXT with the appropriate qualifiers
cmdstr = 'SET TEXT'
if font:
cmdstr += '/FONT='
cmdstr += font
else:
cmdstr += '/FONT=Arial'
if color:
cmdstr += '/COLOR=' + str(color)
if bold:
cmdstr += '/BOLD'
if italic:
cmdstr += '/ITALIC'
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('problems setting text style (%s): %s' % (cmdstr, errmsg))
def showdata(brief=True, qual=''):
"""
Show the Ferret information about all datasets currently open in Ferret.
This uses the Ferret SHOW DATA command to create and display the information.
brief (boolean): if True (default), a brief report is shown;
otherwise a full report is shown.
qual (string): Ferret qualifiers to add to the SHOW DATA command
"""
if not isinstance(qual, str):
raise ValueError('qual (Ferret qualifiers) must be a string')
cmdstr = 'SHOW DATA'
if not brief:
cmdstr += '/FULL'
if qual:
cmdstr += qual
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret command "%s" failed: %s' % (cmdstr, errmsg))
def contourplot(fvar, region=None, over=False, pal=None, qual=''):
"""
Create a contour plot of the specified Ferret variable using the Ferret CONTOUR command.
Using the fill method to generated a color-filled contour plot.
The variable needs to be 2D (or qualifiers need to be added to specify a 2D slice).
fvar (string or FerVar): Ferret variable to plot
region (FerRegion): space-time region to plot;
if None, the full extents of the data will be used
over (bool): overlay on an existing plot?
pal (string): color palette to use
qual (string): qualifiers to add to the Ferret SHADE command
"""
if not isinstance(qual, str):
raise ValueError('qual (Ferret qualifiers) must be a string')
if isinstance(fvar, str):
plotvar = fvar
elif isinstance(fvar, pyferret.FerVar):
plotvar = fvar._definition
else:
raise ValueError('fvar (Ferret variable to plot) must be a string or FerVar')
cmdstr = 'CONTOUR'
if over:
cmdstr += '/OVER'
if region is not None:
if not isinstance(region, pyferret.FerRegion):
raise ValueError('region, if given, must be a FerRegion')
cmdstr += region._ferretqualifierstr();
if pal is not None:
cmdstr += '/PALETTE=' + str(pal)
if qual:
cmdstr += qual
cmdstr += ' '
cmdstr += plotvar
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret shade command (%s) failed: %s' % (cmdstr, errmsg))
def fillplot(fvar, region=None, line=False, over=False, pal=None, qual=''):
"""
Create a color-filled contour plot of the specified Ferret variable using the Ferret
FILL command. Drawing of the contour lines themselves is optional.
The variable needs to be 2D (or qualifiers need to be added to specify a 2D slice).
fvar (string or FerVar): Ferret variable to plot
region (FerRegion): space-time region to plot;
if None, the full extents of the data will be used
line (bool): draw the contour lines?
over (bool): overlay on an existing plot?
pal (string): color palette to use
qual (string): qualifiers to add to the Ferret SHADE command
"""
if not isinstance(qual, str):
raise ValueError('qual (Ferret qualifiers) must be a string')
if isinstance(fvar, str):
plotvar = fvar
elif isinstance(fvar, pyferret.FerVar):
plotvar = fvar._definition
else:
raise ValueError('fvar (Ferret variable to plot) must be a string or FerVar')
cmdstr = 'FILL'
if line:
cmdstr += '/LINE'
if over:
cmdstr += '/OVER'
if region is not None:
if not isinstance(region, pyferret.FerRegion):
raise ValueError('region, if given, must be a FerRegion')
cmdstr += region._ferretqualifierstr();
if pal is not None:
cmdstr += '/PALETTE=' + str(pal)
if qual:
cmdstr += qual
cmdstr += ' '
cmdstr += plotvar
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret shade command (%s) failed: %s' % (cmdstr, errmsg))
def shadeplot(fvar, region=None, over=False, pal=None, qual=''):
"""
Create a colored plot of the specified Ferret variable using the Ferret SHADE command.
(Plot coloring grid cells based on the variable value in that cell.)
The variable needs to be 2D (or qualifiers need to be added to specify a 2D slice).
fvar (string or FerVar): Ferret variable to plot
region (FerRegion): space-time region to plot;
if None, the full extents of the data will be used
over (bool): overlay on an existing plot?
pal (string): color palette to use
qual (string): qualifiers to add to the Ferret SHADE command
"""
if not isinstance(qual, str):
raise ValueError('qual (Ferret qualifiers) must be a string')
if isinstance(fvar, str):
plotvar = fvar
elif isinstance(fvar, pyferret.FerVar):
plotvar = fvar._definition
else:
raise ValueError('fvar (Ferret variable to plot) must be a string or FerVar')
cmdstr = 'SHADE'
if over:
cmdstr += '/OVER'
if region is not None:
if not isinstance(region, pyferret.FerRegion):
raise ValueError('region, if given, must be a FerRegion')
cmdstr += region._ferretqualifierstr();
if pal is not None:
cmdstr += '/PALETTE=' + str(pal)
if qual:
cmdstr += qual
cmdstr += ' '
cmdstr += plotvar
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret shade command (%s) failed: %s' % (cmdstr, errmsg))
def shadeland(res=20, color='gray', over=True, solid=True, X=None, Y=None):
"""
Shades land masses for the current longitude-latitude plot or the specified X-Y region.
res (int): ETOPO dataset resolution (in minutes of a degree) to use;
the corresponding ETOPO dataset (eg, etopo20.cdf for 20) must be available.
Typically 5, 10, 20, 40, 60, 120 are available from Ferret's standard datasets.
color (str): name of the color or color palette to used for land.
over (bool): if true, overlay onto the current longitude-latitude plot;
if False, create a new plot of the given region
solid (bool): if True, shade the land in a single solid color;
if False, shade different elevations using the given color palette
X (str): longitude limits for the region as low:high
If not given and over is False, '0E:360E' is used.
if not given and over is True, the full range of the given plot is used.
Y (str): latitude limits for the region as low:high
If not given and over is False, '90S:90N' is used.
If not given and over is True, the full range of the given plot is used.
"""
cmdstr = 'GO fland'
cmdstr += ' ' + str(res)
cmdstr += ' ' + str(color)
if over:
cmdstr += ' OVERLAY'
else:
cmdstr += ' BASEMAP'
if solid:
cmdstr += ' SOLID'
else:
cmdstr += ' DETAILED'
if X is not None:
cmdstr += ' X=' + str(X)
elif not over:
# assign the default here even though this matches the script
cmdstr += ' X=0E:360E'
elif Y is not None:
# if Y is given, then have to have an X argument;
# needs to be a double wrap for a complete overlay
cmdstr += ' X=0E:720E'
if Y is not None:
cmdstr += ' Y=' + str(Y)
elif not over:
# assign the default here even though this matches the script
cmdstr += ' Y=90S:90N'
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret script command (%s) failed: %s' % (cmdstr, errmsg))
def shadewater(res=20, color='gray', over=True, solid=True, X=None, Y=None):
"""
Shades water masses for the current longitude-latitude plot or the specified region.
res (int): ETOPO dataset resolution (in minutes of a degree) to use;
the corresponding ETOPO dataset (eg, etopo20.cdf for 20) must be available.
Typically 5, 10, 20, 40, 60, 120 are available from Ferret's standard datasets.
color (str): name of the color or color palette to used for water masses
over (bool): if true, overlay onto the current longitude-latitude plot;
if False, create a new plot of the given region
solid (bool): if True, shade the water masses in a single solid color;
if False, shade different depths using the given color palette
X (str): longitude limits for the region as low:high;
if not given and over is False, '0E:360E' is used
Y (str): latitude limits for the region as low:high;
if not given and over is False, '90S:90N'
"""
cmdstr = 'GO focean'
cmdstr += ' ' + str(res)
cmdstr += ' ' + str(color)
if over:
cmdstr += ' OVERLAY'
else:
cmdstr += ' BASEMAP'
if solid:
cmdstr += ' SOLID'
else:
cmdstr += ' DETAILED'
if X is not None:
cmdstr += ' X=' + str(X)
elif not over:
# assign the default here even though this matches the script
cmdstr += ' X=0E:360E'
elif Y is not None:
# if Y is given, then have to have an X argument;
# needs to be a double wrap for a complete overlay
cmdstr += ' X=0E:720E'
if Y is not None:
cmdstr += ' Y=' + str(Y)
elif not over:
# assign the default here even though this matches the script
cmdstr += ' Y=90S:90N'
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret script command (%s) failed: %s' % (cmdstr, errmsg))
def pointplot(fvar, vs=None, color=None, sym=None, symsize=None, thick=None,
line=False, title=None, region=None, over=False, label=True, qual=''):
"""
Create a point plot of the given value, or the given value versus another value
(if vs is given), possibly colored by another value (if color is a FerVar).
To create a line plot with symbols, use the pointplot command with the line
option set to True.
fvar (string or FerVar): Ferret variable to plot
vs (string or FerVar): if given, plot the above variable versus this variables
color: line color or variable used to determine line color; if
None: Ferret default color used,
color name (string): name of color to use,
color tuple (3 or 4-tupe of [0,100] int values): RGB or RGBA of color to use,
FerVar or variable name string: color according to the value of this variable
Note: color name strings are limited to (case insensitive)
'black', 'red', 'green', 'blue', 'lightblue', 'purple'
other strings are assumed to be variable names
sym (int): Ferret symbol number of the symbol to draw for the points.
If not given, Ferret selects an appropriate symbol.
symsize (float): size of the symbol in inches.
If not given, Ferret select an appropriate size.
thick (float): line thickness scaling factor when drawing symbols and lines
line (bool): if True, draw a line between symbols/points
title (string): title for the plot; if not given, Ferret's default title is used
region (FerRegion): space-time region to plot;
if None, the full extents of the data will be used
over (bool): overlay onto an existing plot
label (bool): if False, suppress all plot labels
qual (string): qualifiers to add to the Ferret PLOT/LINE command
"""
if not isinstance(qual, str):
raise ValueError('qual (Ferret qualifiers) must be a string')
if isinstance(fvar, str):
plotvar = fvar
elif isinstance(fvar, pyferret.FerVar):
plotvar = fvar._definition
else:
raise ValueError('fvar (Ferret variable to plot) must be a string or FerVar')
cmdstr = 'PLOT'
if vs is not None:
cmdstr += '/VS'
plotvar += ','
if isinstance(vs, str):
plotvar += vs
elif isinstance(vs, pyferret.FerVar):
plotvar += vs._definition
else:
raise ValueError('vs (second Ferret variable to plot) must be a string or FerVar')
if color is not None:
if isinstance(color, tuple):
cmdstr += '/COLOR=' + str(color)
elif isinstance(color, pyferret.FerVar):
cmdstr += '/RIBBON'
plotvar += ',' + color._definition
elif isinstance(color, str):
if color.upper() in ('BLACK','RED','GREEN','BLUE','LIGHTBLUE','PURPLE'):
cmdstr += '/COLOR=' + color
else:
cmdstr += '/RIBBON'
plotvar += ',' + color
else:
raise ValueError('color must be a tuple, string, or FerVar')
# always draw the symbols
cmdstr += '/SYMBOL'
if sym is not None:
if (not isinstance(sym, numbers.Integral)) or (sym < 0) or (sym > 88):
raise ValueError('sym is not a valid Ferret symbol number')
if sym == 0:
cmdstr += '=DOT'
else:
cmdstr += '=' + str(sym)
if symsize is not None:
if (not isinstance(symsize, numbers.Real)) or (symsize <= 0):
raise ValueError('symsize must be a positive number')
cmdstr += '/SIZE=' + str(symsize)
if thick is not None:
if (not isinstance(thick, numbers.Real)) or (thick <= 0):
raise ValueError('thick must be a positive number')
cmdstr += '/THICK=' + str(thick)
if line:
cmdstr += '/LINE'
if title is not None:
if not isinstance(title, str):
raise ValueError('title must be a string')
cmdstr += '/TITLE="' + title + '"'
if over:
cmdstr += '/OVER'
if region is not None:
if not isinstance(region, pyferret.FerRegion):
raise ValueError('region, if given, must be a FerRegion')
cmdstr += region._ferretqualifierstr();
if not label:
cmdstr += '/NOLABEL'
if qual:
cmdstr += qual
cmdstr += ' '
cmdstr += plotvar
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret plot command (%s) failed: %s' % (cmdstr, errmsg))
def lineplot(fvar, vs=None, color=None, thick=None, dash=None, title=None,
region=None, along=None, over=False, label=True, qual=''):
"""
Create a line plot of the given value, or the given value versus another value
(if vs is given), possibly colored by another value (if color is a FerVar).
To create a line plot with symbols, use the pointplot command with the line
option set to True.
fvar (string or FerVar): Ferret variable to plot
vs (string or FerVar): if given, plot the above variable versus this variables
color: line color or variable used to determine line color; if
None: Ferret default color used,
color name (string): name of color to use,
color tuple (3 or 4-tupe of [0,100] int values): RGB or RGBA of color to use,
FerVar or variable name string: color according to the value of this variable
Note: color name strings are limited to (case insensitive)
'black', 'red', 'green', 'blue', 'lightblue', 'purple'
other strings are assumed to be variable names
thick (float): line thickness scaling factor
dash (4-tuple of float): draws the line as a dashed line where the four values
are the first drawn stroke length, first undrawn stroke length,
second drawn stroke length, second undrawn stroke length of two dashes
title (string): title for the plot; if not given, Ferret's default title is used
region (FerRegion): space-time region to plot;
if None, the full extents of the data will be used
along (string; one of 'X','Y','Z','T','E','F', or lowercase): make a set of line
plots from two-dimensional data with this axis as the horizontal axis.
over (bool): overlay onto an existing plot
label (bool): if False, suppress all plot labels
qual (string): qualifiers to add to the Ferret PLOT/LINE command
"""
if not isinstance(qual, str):
raise ValueError('qual (Ferret qualifiers) must be a string')
if isinstance(fvar, str):
plotvar = fvar
elif isinstance(fvar, pyferret.FerVar):
plotvar = fvar._definition
else:
raise ValueError('fvar (Ferret variable to plot) must be a string or FerVar')
cmdstr = 'PLOT/LINE'
if vs is not None:
cmdstr += '/VS'
plotvar += ','
if isinstance(vs, str):
plotvar += vs
elif isinstance(vs, pyferret.FerVar):
plotvar += vs._definition
else:
raise ValueError('vs (second Ferret variable to plot) must be a string or FerVar')
if color is not None:
if isinstance(color, tuple):
cmdstr += '/COLOR=' + str(color)
elif isinstance(color, pyferret.FerVar):
cmdstr += '/RIBBON'
plotvar += ',' + color._definition
elif isinstance(color, str):
if color.upper() in ('BLACK','RED','GREEN','BLUE','LIGHTBLUE','PURPLE'):
cmdstr += '/COLOR=' + color
else:
cmdstr += '/RIBBON'
plotvar += ',' + color
else:
raise ValueError('color must be a tuple, string, or FerVar')
if thick is not None:
if (not isinstance(thick, numbers.Real)) or (thick <= 0):
raise ValueError('thick must be a positive number')
cmdstr += '/THICK=' + str(thick)
if dash is not None:
if (not isinstance(dash, tuple)) or (len(dash) != 4):
raise ValueError('dash must be a tuple of four floats');
cmdstr += '/DASH=' + str(dash)
if title is not None:
if not isinstance(title, str):
raise ValueError('title must be a string')
cmdstr += '/TITLE="' + title + '"'
if along is not None:
axisnames = ('X','Y','Z','T','E','F','x','y','z','t','e','f')
if not along in axisnames:
raise ValueError('along must be one of ' + str(axisnames))
cmdstr += '/ALONG=' + along.upper()
if over:
cmdstr += '/OVER'
if region is not None:
if not isinstance(region, pyferret.FerRegion):
raise ValueError('region must be a FerRegion')
cmdstr += region._ferretqualifierstr();
if not label:
cmdstr += '/NOLABEL'
if qual:
cmdstr += qual
cmdstr += ' '
cmdstr += plotvar
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret plot command (%s) failed: %s' % (cmdstr, errmsg))
def saveplot(name, fmt='', xpix=None, ypix=None, xinch=None, yinch=None, qual=''):
"""
Save the current plot. If format is not given,
the format is guessed from the filename extension.
name (string): name of the file to contain the plot
fmt (string): format of the plot file
xpix (int): number of pixels in width of the saved raster (eg, PNG) plot
ypix (int): number of pixels in the height of the saved raster (eg, PNG) plot
xinch (float): inch width of the saved vector (eg, PDF) plot
yinch (float): inch height of the save vector (eg, PDF) plot
qual (string): qualifiers to add to the Ferret FRAME command
"""
if not isinstance(name, str):
raise ValueError('name (plot file name) must be a string')
cmdstr = 'FRAME/FILE="%s"' % name
if not isinstance(fmt, str):
raise ValueError('fmt (plot file format) must be a string')
if fmt:
cmdstr += '/FORMAT=%s' % fmt
if xpix is not None:
if (not isinstance(xpix, int)) or (xpix <= 0):
raise ValueError('xpix must be a positive integer')
cmdstr += '/XPIX=' + str(xpix)
if ypix is not None:
if (not isinstance(ypix, int)) or (ypix <= 0):
raise ValueError('ypix must be a positive integer')
cmdstr += '/YPIX=' + str(ypix)
if (xpix is not None) and (ypix is not None):
raise ValueError('xpix and ypix cannot both be given')
if xinch is not None:
if (not isinstance(xinch, numbers.Real)) or (xinch <= 0.0):
raise ValueError('xinch must be a positive number')
cmdstr += '/XINCH=' + str(xinch)
if yinch is not None:
if (not isinstance(yinch, numbers.Real)) or (yinch <= 0.0):
raise ValueError('yinch must be a positive number')
cmdstr += '/YINCH=' + str(yinch)
if (xinch is not None) and (yinch is not None):
raise ValueError('xinch and yinch cannot both be given')
if not isinstance(qual, str):
raise ValueError('qual (Ferret qualifiers) must be a string')
if qual:
cmdstr += qual
(errval, errmsg) = pyferret.run(cmdstr)
if errval != pyferret.FERR_OK:
raise ValueError('Ferret frame command (%s) failed: %s' % (cmdstr, errmsg))
| |
"""Test structuring of collections and primitives."""
from typing import (
Any,
Dict,
FrozenSet,
List,
MutableSet,
Optional,
Set,
Tuple,
Union,
)
import attr
from hypothesis import assume, given
from hypothesis.strategies import (
binary,
booleans,
data,
floats,
frozensets,
integers,
just,
lists,
one_of,
sampled_from,
sets,
text,
tuples,
)
from pytest import raises
from cattr import Converter
from cattr._compat import is_bare, is_union_type
from cattr.converters import NoneType
from cattr.errors import StructureHandlerNotFoundError
from . import (
dicts_of_primitives,
enums_of_primitives,
lists_of_primitives,
primitive_strategies,
seqs_of_primitives,
)
from ._compat import change_type_param
ints_and_type = tuples(integers(), just(int))
floats_and_type = tuples(floats(allow_nan=False), just(float))
strs_and_type = tuples(text(), just(str))
bytes_and_type = tuples(binary(), just(bytes))
primitives_and_type = one_of(
ints_and_type, floats_and_type, strs_and_type, bytes_and_type
)
mut_set_types = sampled_from([Set, MutableSet])
set_types = one_of(mut_set_types, just(FrozenSet))
def create_generic_type(generic_types, param_type):
"""Create a strategy for generating parameterized generic types."""
return one_of(
generic_types,
generic_types.map(lambda t: t[Any]),
generic_types.map(lambda t: t[param_type]),
)
mut_sets_of_primitives = primitive_strategies.flatmap(
lambda e: tuples(sets(e[0]), create_generic_type(mut_set_types, e[1]))
)
frozen_sets_of_primitives = primitive_strategies.flatmap(
lambda e: tuples(
frozensets(e[0]), create_generic_type(just(FrozenSet), e[1])
)
)
sets_of_primitives = one_of(mut_sets_of_primitives, frozen_sets_of_primitives)
@given(primitives_and_type)
def test_structuring_primitives(primitive_and_type):
"""Test just structuring a primitive value."""
converter = Converter()
val, t = primitive_and_type
assert converter.structure(val, t) == val
assert converter.structure(val, Any) == val
@given(seqs_of_primitives)
def test_structuring_seqs(seq_and_type):
"""Test structuring sequence generic types."""
converter = Converter()
iterable, t = seq_and_type
converted = converter.structure(iterable, t)
for x, y in zip(iterable, converted):
assert x == y
@given(sets_of_primitives, set_types)
def test_structuring_sets(set_and_type, set_type):
"""Test structuring generic sets."""
converter = Converter()
set_, input_set_type = set_and_type
if input_set_type not in (Set, FrozenSet, MutableSet):
set_type = set_type[input_set_type.__args__[0]]
converted = converter.structure(set_, set_type)
assert converted == set_
# Set[int] can't be used with isinstance any more.
non_generic = (
set_type.__origin__ if set_type.__origin__ is not None else set_type
)
assert isinstance(converted, non_generic)
converted = converter.structure(set_, Any)
assert converted == set_
assert isinstance(converted, type(set_))
@given(sets_of_primitives)
def test_stringifying_sets(set_and_type):
"""Test structuring generic sets and converting the contents to str."""
converter = Converter()
set_, input_set_type = set_and_type
if is_bare(input_set_type):
input_set_type = input_set_type[str]
else:
input_set_type.__args__ = (str,)
converted = converter.structure(set_, input_set_type)
assert len(converted) == len(set_)
for e in set_:
assert str(e) in converted
@given(lists(primitives_and_type, min_size=1))
def test_structuring_hetero_tuples(list_of_vals_and_types):
"""Test structuring heterogenous tuples."""
converter = Converter()
types = tuple(e[1] for e in list_of_vals_and_types)
vals = [e[0] for e in list_of_vals_and_types]
t = Tuple[types]
converted = converter.structure(vals, t)
assert isinstance(converted, tuple)
for x, y in zip(vals, converted):
assert x == y
for x, y in zip(types, converted):
assert isinstance(y, x)
@given(lists(primitives_and_type))
def test_stringifying_tuples(list_of_vals_and_types):
"""Stringify all elements of a heterogeneous tuple."""
converter = Converter()
vals = [e[0] for e in list_of_vals_and_types]
t = Tuple[(str,) * len(list_of_vals_and_types)]
converted = converter.structure(vals, t)
assert isinstance(converted, tuple)
for x, y in zip(vals, converted):
assert str(x) == y
for x in converted:
# this should just be unicode, but in python2, '' is not unicode
assert isinstance(x, str)
@given(dicts_of_primitives)
def test_structuring_dicts(dict_and_type):
converter = Converter()
d, t = dict_and_type
converted = converter.structure(d, t)
assert converted == d
assert converted is not d
@given(dicts_of_primitives, data())
def test_structuring_dicts_opts(dict_and_type, data):
"""Structure dicts, but with optional primitives."""
converter = Converter()
d, t = dict_and_type
assume(not is_bare(t))
t.__args__ = (t.__args__[0], Optional[t.__args__[1]])
d = {k: v if data.draw(booleans()) else None for k, v in d.items()}
converted = converter.structure(d, t)
assert converted == d
assert converted is not d
@given(dicts_of_primitives)
def test_stringifying_dicts(dict_and_type):
converter = Converter()
d, t = dict_and_type
converted = converter.structure(d, Dict[str, str])
for k, v in d.items():
assert converted[str(k)] == str(v)
@given(primitives_and_type)
def test_structuring_optional_primitives(primitive_and_type):
"""Test structuring Optional primitive types."""
converter = Converter()
val, type = primitive_and_type
assert converter.structure(val, Optional[type]) == val
assert converter.structure(None, Optional[type]) is None
@given(lists_of_primitives().filter(lambda lp: not is_bare(lp[1])))
def test_structuring_lists_of_opt(list_and_type):
"""Test structuring lists of Optional primitive types."""
converter = Converter()
l, t = list_and_type
l.append(None)
args = t.__args__
is_optional = args[0] is Optional or (
is_union_type(args[0])
and len(args[0].__args__) == 2
and args[0].__args__[1] is NoneType
)
if not is_bare(t) and (args[0] not in (Any, str) and not is_optional):
with raises((TypeError, ValueError)):
converter.structure(l, t)
optional_t = Optional[args[0]]
# We want to create a generic type annotation with an optional
# type parameter.
t = change_type_param(t, optional_t)
converted = converter.structure(l, t)
for x, y in zip(l, converted):
assert x == y
t.__args__ = args
@given(lists_of_primitives())
def test_stringifying_lists_of_opt(list_and_type):
"""Test structuring Optional primitive types into strings."""
converter = Converter()
l, t = list_and_type
l.append(None)
converted = converter.structure(l, List[Optional[str]])
for x, y in zip(l, converted):
if x is None:
assert x is y
else:
assert str(x) == y
@given(lists(integers()))
def test_structuring_primitive_union_hook(ints):
"""Registering a union loading hook works."""
converter = Converter()
def structure_hook(val, cl):
"""Even ints are passed through, odd are stringified."""
return val if val % 2 == 0 else str(val)
converter.register_structure_hook(Union[str, int], structure_hook)
converted = converter.structure(ints, List[Union[str, int]])
for x, y in zip(ints, converted):
if x % 2 == 0:
assert x == y
else:
assert str(x) == y
def test_structure_hook_func():
"""testing the hook_func method"""
converter = Converter()
def can_handle(cls):
return cls.__name__.startswith("F")
def handle(obj, cls):
return "hi"
class Foo(object):
pass
class Bar(object):
pass
converter.register_structure_hook_func(can_handle, handle)
assert converter.structure(10, Foo) == "hi"
with raises(StructureHandlerNotFoundError) as exc:
converter.structure(10, Bar)
assert exc.value.type_ is Bar
@given(data(), enums_of_primitives())
def test_structuring_enums(data, enum):
"""Test structuring enums by their values."""
converter = Converter()
val = data.draw(sampled_from(list(enum)))
assert converter.structure(val.value, enum) == val
def test_structuring_unsupported():
"""Loading unsupported classes should throw."""
converter = Converter()
with raises(StructureHandlerNotFoundError) as exc:
converter.structure(1, Converter)
assert exc.value.type_ is Converter
with raises(StructureHandlerNotFoundError) as exc:
converter.structure(1, Union[int, str])
assert exc.value.type_ is Union[int, str]
def test_subclass_registration_is_honored():
"""If a subclass is registered after a superclass,
that subclass handler should be dispatched for
structure
"""
converter = Converter()
class Foo(object):
def __init__(self, value):
self.value = value
class Bar(Foo):
pass
converter.register_structure_hook(Foo, lambda obj, cls: cls("foo"))
assert converter.structure(None, Foo).value == "foo"
assert converter.structure(None, Bar).value == "foo"
converter.register_structure_hook(Bar, lambda obj, cls: cls("bar"))
assert converter.structure(None, Foo).value == "foo"
assert converter.structure(None, Bar).value == "bar"
def test_structure_union_edge_case():
converter = Converter()
@attr.s(auto_attribs=True)
class A:
a1: Any
a2: Optional[Any] = None
@attr.s(auto_attribs=True)
class B:
b1: Any
b2: Optional[Any] = None
assert converter.structure(
[{"a1": "foo"}, {"b1": "bar"}], List[Union[A, B]]
) == [A("foo"), B("bar")]
| |
# Copyright 2014 Open vStorage NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module containing certain helper classes providing various logic
"""
import os
import imp
import copy
import inspect
import hashlib
from ovs.extensions.storage.volatilefactory import VolatileFactory
from ovs.extensions.storage.persistentfactory import PersistentFactory
from ovs.log.logHandler import LogHandler
logger = LogHandler.get('dal', name='helper')
class Descriptor(object):
"""
The descriptor class contains metadata to instanciate objects that can be serialized.
It points towards the sourcefile, class name and class type
"""
object_cache = {}
def __init__(self, object_type=None, guid=None, cached=True):
"""
Initializes a descriptor for a given type. Optionally already providing a guid for the
instanciator
"""
# Initialize super class
super(Descriptor, self).__init__()
if object_type is None:
self.initialized = False
else:
self.initialized = True
self._volatile = VolatileFactory.get_client()
type_name = object_type.__name__
module_name = object_type.__module__.split('.')[-1]
fqm_name = 'ovs.dal.hybrids.{0}'.format(module_name)
try:
module = __import__(fqm_name, level=0, fromlist=[type_name])
_ = getattr(module, type_name)
except (ImportError, AttributeError):
logger.info('Received object type {0} is not a hybrid'.format(object_type))
raise TypeError('Invalid type for Descriptor: {0}'.format(object_type))
identifier = '{0}_{1}'.format(type_name, hashlib.sha1(fqm_name).hexdigest())
key = 'ovs_descriptor_{0}'.format(identifier)
self._descriptor = self._volatile.get(key)
if self._descriptor is None or cached is False:
if self._descriptor is None:
logger.debug('Object type {0} was translated to {1}.{2}'.format(
object_type, fqm_name, type_name
))
Toolbox.log_cache_hit('descriptor', False)
self._descriptor = {'fqmn': fqm_name,
'type': type_name,
'identifier': identifier,
'version': 3}
self._volatile.set(key, self._descriptor)
else:
Toolbox.log_cache_hit('descriptor', True)
self._descriptor['guid'] = guid
def load(self, descriptor):
"""
Loads an instance from a descriptor dictionary representation
"""
self._descriptor = copy.deepcopy(descriptor)
self.initialized = True
return self
@property
def descriptor(self):
"""
Returns a dictionary representation of the descriptor class
"""
if self.initialized:
return copy.deepcopy(self._descriptor)
else:
raise RuntimeError('Descriptor not yet initialized')
def get_object(self, instantiate=False):
"""
This method will yield an instance or the class to which the decriptor points
"""
if not self.initialized:
raise RuntimeError('Descriptor not yet initialized')
if self._descriptor['identifier'] not in Descriptor.object_cache:
type_name = self._descriptor['type']
module = __import__(self._descriptor['fqmn'], level=0, fromlist=[type_name])
cls = getattr(module, type_name)
Descriptor.object_cache[self._descriptor['identifier']] = cls
else:
cls = Descriptor.object_cache[self._descriptor['identifier']]
if instantiate:
if self._descriptor['guid'] is None:
return None
return cls(self._descriptor['guid'])
else:
return cls
@staticmethod
def isinstance(instance, object_type):
""""
Checks (based on descriptors) whether a given instance is of a given type
"""
try:
return Descriptor(instance.__class__) == Descriptor(object_type)
except TypeError:
return isinstance(instance, object_type)
def __eq__(self, other):
"""
Checks the descriptor identifiers
"""
return self._descriptor['identifier'] == other.descriptor['identifier']
def __ne__(self, other):
"""
Checks the descriptor identifiers
"""
return not self.__eq__(other)
class HybridRunner(object):
"""
The HybridRunner provides access to generic properties from the hybrid object by means
of dynamic code reflection
"""
@staticmethod
def get_hybrids():
"""
Yields all hybrid classes
"""
key = 'ovs_hybrid_structure'
volatile = VolatileFactory.get_client()
hybrid_structure = volatile.get(key)
if hybrid_structure is None:
Toolbox.log_cache_hit('hybrid_structure', False)
base_hybrids = []
inherit_table = {}
translation_table = {}
path = os.path.join(os.path.dirname(__file__), 'hybrids')
for filename in os.listdir(path):
if os.path.isfile(os.path.join(path, filename)) and filename.endswith('.py'):
name = filename.replace('.py', '')
module = imp.load_source(name, os.path.join(path, filename))
for member in inspect.getmembers(module):
if inspect.isclass(member[1]) \
and member[1].__module__ == name:
current_class = member[1]
try:
current_descriptor = Descriptor(current_class).descriptor
except TypeError:
continue
current_identifier = current_descriptor['identifier']
if current_identifier not in translation_table:
translation_table[current_identifier] = current_descriptor
if 'DataObject' in current_class.__base__.__name__:
if current_identifier not in base_hybrids:
base_hybrids.append(current_identifier)
else:
raise RuntimeError('Duplicate base hybrid found: {0}'.format(current_identifier))
elif 'DataObject' not in current_class.__name__:
structure = []
this_class = None
for this_class in current_class.__mro__:
if 'DataObject' in this_class.__name__:
break
try:
structure.append(Descriptor(this_class).descriptor['identifier'])
except TypeError:
break # This means we reached one of the built-in classes.
if 'DataObject' in this_class.__name__:
for index in reversed(range(1, len(structure))):
if structure[index] in inherit_table:
raise RuntimeError('Duplicate hybrid inheritance: {0}({1})'.format(structure[index - 1], structure[index]))
inherit_table[structure[index]] = structure[index - 1]
items_replaced = True
hybrids = {hybrid: None for hybrid in base_hybrids[:]}
while items_replaced is True:
items_replaced = False
for hybrid, replacement in inherit_table.iteritems():
if hybrid in hybrids.keys() and hybrids[hybrid] is None:
hybrids[hybrid] = replacement
items_replaced = True
if hybrid in hybrids.values():
for item in hybrids.keys():
if hybrids[item] == hybrid:
hybrids[item] = replacement
items_replaced = True
hybrid_structure = {hybrid: translation_table[replacement] if replacement is not None else translation_table[hybrid]
for hybrid, replacement in hybrids.iteritems()}
volatile.set(key, hybrid_structure)
else:
Toolbox.log_cache_hit('hybrid_structure', True)
return hybrid_structure
class Toolbox(object):
"""
Generic class for various methods
"""
@staticmethod
def try_get(key, fallback):
"""
Returns a value linked to a certain key from the volatile store.
If not found in the volatile store, it will try fetch it from the persistent
store. If not found, it returns the fallback
"""
volatile = VolatileFactory.get_client()
data = volatile.get(key)
if data is None:
try:
persistent = PersistentFactory.get_client()
data = persistent.get(key)
except:
data = fallback
volatile.set(key, data)
return data
@staticmethod
def check_type(value, required_type):
"""
Validates whether a certain value is of a given type. Some types are treated as special
case:
- A 'str' type accepts 'str', 'unicode' and 'basestring'
- A 'float' type accepts 'float', 'int'
- A list instance acts like an enum
"""
given_type = type(value)
if required_type is str:
correct = isinstance(value, basestring)
allowed_types = ['str', 'unicode', 'basestring']
elif required_type is float:
correct = isinstance(value, float) or isinstance(value, int)
allowed_types = ['float', 'int']
elif required_type is int:
correct = isinstance(value, int) or isinstance(value, long)
allowed_types = ['int', 'long']
elif isinstance(required_type, list):
# We're in an enum scenario. Field_type isn't a real type, but a list containing
# all possible enum values. Here as well, we need to do some str/unicode/basestring
# checking.
if isinstance(required_type[0], basestring):
value = str(value)
correct = value in required_type
allowed_types = required_type
given_type = value
else:
correct = isinstance(value, required_type)
allowed_types = [required_type.__name__]
return correct, allowed_types, given_type
@staticmethod
def log_cache_hit(cache_type, hit):
"""
Registers a cache hit or miss with a specific type
"""
volatile = VolatileFactory.get_client()
key = 'ovs_stats_cache_{0}_{1}'.format(cache_type, 'hit' if hit else 'miss')
try:
successfull = volatile.incr(key)
if not successfull:
volatile.set(key, 1)
except:
pass
class Migration(object):
"""
Handles all migrations between versions
"""
@staticmethod
def migrate():
"""
Executes all migrations. It keeps track of an internal "migration version" which is
a always increasing by one
"""
def execute(function, start, end):
"""
Executes a single migration, syncing versions
"""
version = function(start)
if version > end:
end = version
return end
key = 'ovs_model_version'
persistent = PersistentFactory.get_client()
if persistent.exists(key):
data = persistent.get(key)
else:
data = {}
migrators = []
path = os.path.join(os.path.dirname(__file__), 'migration')
for filename in os.listdir(path):
if os.path.isfile(os.path.join(path, filename)) and filename.endswith('.py'):
name = filename.replace('.py', '')
module = imp.load_source(name, os.path.join(path, filename))
for member in inspect.getmembers(module):
if inspect.isclass(member[1]) \
and member[1].__module__ == name \
and 'object' in [base.__name__ for base in member[1].__bases__]:
migrators.append((member[1].identifier, member[1].migrate))
for identifier, method in migrators:
base_version = data[identifier] if identifier in data else 0
new_version = execute(method, base_version, 0)
data[identifier] = new_version
persistent.set(key, data)
| |
"""
Lowering implementation for object mode.
"""
import builtins
import operator
import inspect
from llvmlite.llvmpy.core import Type, Constant
import llvmlite.llvmpy.core as lc
from numba.core import types, utils, ir, generators, cgutils
from numba.core.errors import ForbiddenConstruct, LoweringError
from numba.core.lowering import BaseLower
# Issue #475: locals() is unsupported as calling it naively would give
# out wrong results.
_unsupported_builtins = set([locals])
# Map operators to methods on the PythonAPI class
PYTHON_BINOPMAP = {
operator.add: ("number_add", False),
operator.sub: ("number_subtract", False),
operator.mul: ("number_multiply", False),
operator.truediv: ("number_truedivide", False),
operator.floordiv: ("number_floordivide", False),
operator.mod: ("number_remainder", False),
operator.pow: ("number_power", False),
operator.lshift: ("number_lshift", False),
operator.rshift: ("number_rshift", False),
operator.and_: ("number_and", False),
operator.or_: ("number_or", False),
operator.xor: ("number_xor", False),
# inplace operators
operator.iadd: ("number_add", True),
operator.isub: ("number_subtract", True),
operator.imul: ("number_multiply", True),
operator.itruediv: ("number_truedivide", True),
operator.ifloordiv: ("number_floordivide", True),
operator.imod: ("number_remainder", True),
operator.ipow: ("number_power", True),
operator.ilshift: ("number_lshift", True),
operator.irshift: ("number_rshift", True),
operator.iand: ("number_and", True),
operator.ior: ("number_or", True),
operator.ixor: ("number_xor", True),
}
PYTHON_BINOPMAP[operator.matmul] = ("number_matrix_multiply", False)
PYTHON_BINOPMAP[operator.imatmul] = ("number_matrix_multiply", True)
PYTHON_COMPAREOPMAP = {
operator.eq: '==',
operator.ne: '!=',
operator.lt: '<',
operator.le: '<=',
operator.gt: '>',
operator.ge: '>=',
operator.is_: 'is',
operator.is_not: 'is not',
operator.contains: 'in'
}
class PyLower(BaseLower):
GeneratorLower = generators.PyGeneratorLower
def init(self):
# Strings to be frozen into the Environment object
self._frozen_strings = set()
self._live_vars = set()
def pre_lower(self):
super(PyLower, self).pre_lower()
self.init_pyapi()
def post_lower(self):
pass
def pre_block(self, block):
self.init_vars(block)
def lower_inst(self, inst):
if isinstance(inst, ir.Assign):
value = self.lower_assign(inst)
self.storevar(value, inst.target.name)
elif isinstance(inst, ir.SetItem):
target = self.loadvar(inst.target.name)
index = self.loadvar(inst.index.name)
value = self.loadvar(inst.value.name)
ok = self.pyapi.object_setitem(target, index, value)
self.check_int_status(ok)
elif isinstance(inst, ir.DelItem):
target = self.loadvar(inst.target.name)
index = self.loadvar(inst.index.name)
ok = self.pyapi.object_delitem(target, index)
self.check_int_status(ok)
elif isinstance(inst, ir.SetAttr):
target = self.loadvar(inst.target.name)
value = self.loadvar(inst.value.name)
ok = self.pyapi.object_setattr(target,
self._freeze_string(inst.attr),
value)
self.check_int_status(ok)
elif isinstance(inst, ir.DelAttr):
target = self.loadvar(inst.target.name)
ok = self.pyapi.object_delattr(target,
self._freeze_string(inst.attr))
self.check_int_status(ok)
elif isinstance(inst, ir.StoreMap):
dct = self.loadvar(inst.dct.name)
key = self.loadvar(inst.key.name)
value = self.loadvar(inst.value.name)
ok = self.pyapi.dict_setitem(dct, key, value)
self.check_int_status(ok)
elif isinstance(inst, ir.Return):
retval = self.loadvar(inst.value.name)
if self.generator_info:
# StopIteration
# We own a reference to the "return value", but we
# don't return it.
self.pyapi.decref(retval)
self.genlower.return_from_generator(self)
return
# No need to incref() as the reference is already owned.
self.call_conv.return_value(self.builder, retval)
elif isinstance(inst, ir.Branch):
cond = self.loadvar(inst.cond.name)
if cond.type == Type.int(1):
istrue = cond
else:
istrue = self.pyapi.object_istrue(cond)
zero = lc.Constant.null(istrue.type)
pred = self.builder.icmp(lc.ICMP_NE, istrue, zero)
tr = self.blkmap[inst.truebr]
fl = self.blkmap[inst.falsebr]
self.builder.cbranch(pred, tr, fl)
elif isinstance(inst, ir.Jump):
target = self.blkmap[inst.target]
self.builder.branch(target)
elif isinstance(inst, ir.Del):
self.delvar(inst.value)
elif isinstance(inst, ir.Raise):
if inst.exception is not None:
exc = self.loadvar(inst.exception.name)
# A reference will be stolen by raise_object() and another
# by return_exception_raised().
self.incref(exc)
else:
exc = None
self.pyapi.raise_object(exc)
self.return_exception_raised()
else:
raise NotImplementedError(type(inst), inst)
@utils.cached_property
def _omitted_typobj(self):
"""Return a `OmittedArg` type instance as a LLVM value suitable for
testing at runtime.
"""
from numba.core.dispatcher import OmittedArg
return self.pyapi.unserialize(
self.pyapi.serialize_object(OmittedArg))
def lower_assign(self, inst):
"""
The returned object must have a new reference
"""
value = inst.value
if isinstance(value, (ir.Const, ir.FreeVar)):
return self.lower_const(value.value)
elif isinstance(value, ir.Var):
val = self.loadvar(value.name)
self.incref(val)
return val
elif isinstance(value, ir.Expr):
return self.lower_expr(value)
elif isinstance(value, ir.Global):
return self.lower_global(value.name, value.value)
elif isinstance(value, ir.Yield):
return self.lower_yield(value)
elif isinstance(value, ir.Arg):
param = self.func_ir.func_id.pysig.parameters.get(value.name)
obj = self.fnargs[value.index]
slot = cgutils.alloca_once_value(self.builder, obj)
# Don't check for OmittedArg unless the argument has a default
if param is not None and param.default is inspect.Parameter.empty:
self.incref(obj)
self.builder.store(obj, slot)
else:
# When an argument is omitted, the dispatcher hands it as
# _OmittedArg(<default value>)
typobj = self.pyapi.get_type(obj)
is_omitted = self.builder.icmp_unsigned('==', typobj,
self._omitted_typobj)
with self.builder.if_else(is_omitted, likely=False) as (omitted, present):
with present:
self.incref(obj)
self.builder.store(obj, slot)
with omitted:
# The argument is omitted => get the default value
obj = self.pyapi.object_getattr_string(obj, 'value')
self.builder.store(obj, slot)
return self.builder.load(slot)
else:
raise NotImplementedError(type(value), value)
def lower_yield(self, inst):
yp = self.generator_info.yield_points[inst.index]
assert yp.inst is inst
self.genlower.init_generator_state(self)
# Save live vars in state
# We also need to save live vars that are del'ed afterwards.
y = generators.LowerYield(self, yp, yp.live_vars | yp.weak_live_vars)
y.lower_yield_suspend()
# Yield to caller
val = self.loadvar(inst.value.name)
# Let caller own the reference
self.pyapi.incref(val)
self.call_conv.return_value(self.builder, val)
# Resumption point
y.lower_yield_resume()
# None is returned by the yield expression
return self.pyapi.make_none()
def lower_binop(self, expr, op, inplace=False):
lhs = self.loadvar(expr.lhs.name)
rhs = self.loadvar(expr.rhs.name)
assert not isinstance(op, str)
if op in PYTHON_BINOPMAP:
fname, inplace = PYTHON_BINOPMAP[op]
fn = getattr(self.pyapi, fname)
res = fn(lhs, rhs, inplace=inplace)
else:
# Assumed to be rich comparison
fn = PYTHON_COMPAREOPMAP.get(expr.fn, expr.fn)
if fn == 'in': # 'in' and operator.contains have args reversed
lhs, rhs = rhs, lhs
res = self.pyapi.object_richcompare(lhs, rhs, fn)
self.check_error(res)
return res
def lower_expr(self, expr):
if expr.op == 'binop':
return self.lower_binop(expr, expr.fn, inplace=False)
elif expr.op == 'inplace_binop':
return self.lower_binop(expr, expr.fn, inplace=True)
elif expr.op == 'unary':
value = self.loadvar(expr.value.name)
if expr.fn == operator.neg:
res = self.pyapi.number_negative(value)
elif expr.fn == operator.pos:
res = self.pyapi.number_positive(value)
elif expr.fn == operator.not_:
res = self.pyapi.object_not(value)
self.check_int_status(res)
res = self.pyapi.bool_from_bool(res)
elif expr.fn == operator.invert:
res = self.pyapi.number_invert(value)
else:
raise NotImplementedError(expr)
self.check_error(res)
return res
elif expr.op == 'call':
argvals = [self.loadvar(a.name) for a in expr.args]
fn = self.loadvar(expr.func.name)
args = self.pyapi.tuple_pack(argvals)
if expr.vararg:
# Expand *args
new_args = self.pyapi.number_add(args,
self.loadvar(expr.vararg.name))
self.decref(args)
args = new_args
if not expr.kws:
# No named arguments
ret = self.pyapi.call(fn, args, None)
else:
# Named arguments
keyvalues = [(k, self.loadvar(v.name)) for k, v in expr.kws]
kws = self.pyapi.dict_pack(keyvalues)
ret = self.pyapi.call(fn, args, kws)
self.decref(kws)
self.decref(args)
self.check_error(ret)
return ret
elif expr.op == 'getattr':
obj = self.loadvar(expr.value.name)
res = self.pyapi.object_getattr(obj, self._freeze_string(expr.attr))
self.check_error(res)
return res
elif expr.op == 'build_tuple':
items = [self.loadvar(it.name) for it in expr.items]
res = self.pyapi.tuple_pack(items)
self.check_error(res)
return res
elif expr.op == 'build_list':
items = [self.loadvar(it.name) for it in expr.items]
res = self.pyapi.list_pack(items)
self.check_error(res)
return res
elif expr.op == 'build_map':
res = self.pyapi.dict_new(expr.size)
self.check_error(res)
for k, v in expr.items:
key = self.loadvar(k.name)
value = self.loadvar(v.name)
ok = self.pyapi.dict_setitem(res, key, value)
self.check_int_status(ok)
return res
elif expr.op == 'build_set':
items = [self.loadvar(it.name) for it in expr.items]
res = self.pyapi.set_new()
self.check_error(res)
for it in items:
ok = self.pyapi.set_add(res, it)
self.check_int_status(ok)
return res
elif expr.op == 'getiter':
obj = self.loadvar(expr.value.name)
res = self.pyapi.object_getiter(obj)
self.check_error(res)
return res
elif expr.op == 'iternext':
iterobj = self.loadvar(expr.value.name)
item = self.pyapi.iter_next(iterobj)
is_valid = cgutils.is_not_null(self.builder, item)
pair = self.pyapi.tuple_new(2)
with self.builder.if_else(is_valid) as (then, otherwise):
with then:
self.pyapi.tuple_setitem(pair, 0, item)
with otherwise:
self.check_occurred()
# Make the tuple valid by inserting None as dummy
# iteration "result" (it will be ignored).
self.pyapi.tuple_setitem(pair, 0, self.pyapi.make_none())
self.pyapi.tuple_setitem(pair, 1, self.pyapi.bool_from_bool(is_valid))
return pair
elif expr.op == 'pair_first':
pair = self.loadvar(expr.value.name)
first = self.pyapi.tuple_getitem(pair, 0)
self.incref(first)
return first
elif expr.op == 'pair_second':
pair = self.loadvar(expr.value.name)
second = self.pyapi.tuple_getitem(pair, 1)
self.incref(second)
return second
elif expr.op == 'exhaust_iter':
iterobj = self.loadvar(expr.value.name)
tup = self.pyapi.sequence_tuple(iterobj)
self.check_error(tup)
# Check tuple size is as expected
tup_size = self.pyapi.tuple_size(tup)
expected_size = self.context.get_constant(types.intp, expr.count)
has_wrong_size = self.builder.icmp(lc.ICMP_NE,
tup_size, expected_size)
with cgutils.if_unlikely(self.builder, has_wrong_size):
self.return_exception(ValueError)
return tup
elif expr.op == 'getitem':
value = self.loadvar(expr.value.name)
index = self.loadvar(expr.index.name)
res = self.pyapi.object_getitem(value, index)
self.check_error(res)
return res
elif expr.op == 'static_getitem':
value = self.loadvar(expr.value.name)
index = self.context.get_constant(types.intp, expr.index)
indexobj = self.pyapi.long_from_ssize_t(index)
self.check_error(indexobj)
res = self.pyapi.object_getitem(value, indexobj)
self.decref(indexobj)
self.check_error(res)
return res
elif expr.op == 'getslice':
target = self.loadvar(expr.target.name)
start = self.loadvar(expr.start.name)
stop = self.loadvar(expr.stop.name)
slicefn = self.get_builtin_obj("slice")
sliceobj = self.pyapi.call_function_objargs(slicefn, (start, stop))
self.decref(slicefn)
self.check_error(sliceobj)
res = self.pyapi.object_getitem(target, sliceobj)
self.check_error(res)
return res
elif expr.op == 'cast':
val = self.loadvar(expr.value.name)
self.incref(val)
return val
elif expr.op == 'phi':
raise LoweringError("PHI not stripped")
elif expr.op == 'null':
# Make null value
return cgutils.get_null_value(self.pyapi.pyobj)
else:
raise NotImplementedError(expr)
def lower_const(self, const):
# All constants are frozen inside the environment
index = self.env_manager.add_const(const)
ret = self.env_manager.read_const(index)
self.check_error(ret)
self.incref(ret)
return ret
def lower_global(self, name, value):
"""
1) Check global scope dictionary.
2) Check __builtins__.
2a) is it a dictionary (for non __main__ module)
2b) is it a module (for __main__ module)
"""
moddict = self.get_module_dict()
obj = self.pyapi.dict_getitem(moddict, self._freeze_string(name))
self.incref(obj) # obj is borrowed
try:
if value in _unsupported_builtins:
raise ForbiddenConstruct("builtins %s() is not supported"
% name, loc=self.loc)
except TypeError:
# `value` is unhashable, ignore
pass
if hasattr(builtins, name):
obj_is_null = self.is_null(obj)
bbelse = self.builder.basic_block
with self.builder.if_then(obj_is_null):
mod = self.pyapi.dict_getitem(moddict,
self._freeze_string("__builtins__"))
builtin = self.builtin_lookup(mod, name)
bbif = self.builder.basic_block
retval = self.builder.phi(self.pyapi.pyobj)
retval.add_incoming(obj, bbelse)
retval.add_incoming(builtin, bbif)
else:
retval = obj
with cgutils.if_unlikely(self.builder, self.is_null(retval)):
self.pyapi.raise_missing_global_error(name)
self.return_exception_raised()
return retval
# -------------------------------------------------------------------------
def get_module_dict(self):
return self.env_body.globals
def get_builtin_obj(self, name):
# XXX The builtins dict could be bound into the environment
moddict = self.get_module_dict()
mod = self.pyapi.dict_getitem(moddict,
self._freeze_string("__builtins__"))
return self.builtin_lookup(mod, name)
def builtin_lookup(self, mod, name):
"""
Args
----
mod:
The __builtins__ dictionary or module, as looked up in
a module's globals.
name: str
The object to lookup
"""
fromdict = self.pyapi.dict_getitem(mod, self._freeze_string(name))
self.incref(fromdict) # fromdict is borrowed
bbifdict = self.builder.basic_block
with cgutils.if_unlikely(self.builder, self.is_null(fromdict)):
# This happen if we are using the __main__ module
frommod = self.pyapi.object_getattr(mod, self._freeze_string(name))
with cgutils.if_unlikely(self.builder, self.is_null(frommod)):
self.pyapi.raise_missing_global_error(name)
self.return_exception_raised()
bbifmod = self.builder.basic_block
builtin = self.builder.phi(self.pyapi.pyobj)
builtin.add_incoming(fromdict, bbifdict)
builtin.add_incoming(frommod, bbifmod)
return builtin
def check_occurred(self):
"""
Return if an exception occurred.
"""
err_occurred = cgutils.is_not_null(self.builder,
self.pyapi.err_occurred())
with cgutils.if_unlikely(self.builder, err_occurred):
self.return_exception_raised()
def check_error(self, obj):
"""
Return if *obj* is NULL.
"""
with cgutils.if_unlikely(self.builder, self.is_null(obj)):
self.return_exception_raised()
return obj
def check_int_status(self, num, ok_value=0):
"""
Raise an exception if *num* is smaller than *ok_value*.
"""
ok = lc.Constant.int(num.type, ok_value)
pred = self.builder.icmp(lc.ICMP_SLT, num, ok)
with cgutils.if_unlikely(self.builder, pred):
self.return_exception_raised()
def is_null(self, obj):
return cgutils.is_null(self.builder, obj)
def return_exception_raised(self):
"""
Return with the currently raised exception.
"""
self.cleanup_vars()
self.call_conv.return_exc(self.builder)
def init_vars(self, block):
"""
Initialize live variables for *block*.
"""
self._live_vars = set(self.func_ir.get_block_entry_vars(block))
def _getvar(self, name, ltype=None):
if name not in self.varmap:
self.varmap[name] = self.alloca(name, ltype=ltype)
return self.varmap[name]
def loadvar(self, name):
"""
Load the llvm value of the variable named *name*.
"""
# If this raises then the live variables analysis is wrong
assert name in self._live_vars, name
ptr = self.varmap[name]
val = self.builder.load(ptr)
with cgutils.if_unlikely(self.builder, self.is_null(val)):
self.pyapi.raise_missing_name_error(name)
self.return_exception_raised()
return val
def delvar(self, name):
"""
Delete the variable slot with the given name. This will decref
the corresponding Python object.
"""
# If this raises then the live variables analysis is wrong
self._live_vars.remove(name)
ptr = self._getvar(name) # initializes `name` if not already
self.decref(self.builder.load(ptr))
# This is a safety guard against double decref's, but really
# the IR should be correct and have only one Del per variable
# and code path.
self.builder.store(cgutils.get_null_value(ptr.type.pointee), ptr)
def storevar(self, value, name, clobber=False):
"""
Stores a llvm value and allocate stack slot if necessary.
The llvm value can be of arbitrary type.
"""
is_redefine = name in self._live_vars and not clobber
ptr = self._getvar(name, ltype=value.type)
if is_redefine:
old = self.builder.load(ptr)
else:
self._live_vars.add(name)
assert value.type == ptr.type.pointee, (str(value.type),
str(ptr.type.pointee))
self.builder.store(value, ptr)
# Safe to call decref even on non python object
if is_redefine:
self.decref(old)
def cleanup_vars(self):
"""
Cleanup live variables.
"""
for name in self._live_vars:
ptr = self._getvar(name)
self.decref(self.builder.load(ptr))
def alloca(self, name, ltype=None):
"""
Allocate a stack slot and initialize it to NULL.
The default is to allocate a pyobject pointer.
Use ``ltype`` to override.
"""
if ltype is None:
ltype = self.context.get_value_type(types.pyobject)
with self.builder.goto_block(self.entry_block):
ptr = self.builder.alloca(ltype, name=name)
self.builder.store(cgutils.get_null_value(ltype), ptr)
return ptr
def incref(self, value):
self.pyapi.incref(value)
def decref(self, value):
"""
This is allow to be called on non pyobject pointer, in which case
no code is inserted.
"""
lpyobj = self.context.get_value_type(types.pyobject)
if value.type == lpyobj:
self.pyapi.decref(value)
def _freeze_string(self, string):
"""
Freeze a Python string object into the code.
"""
return self.lower_const(string)
| |
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import copy
import datetime
import hashlib
import inspect
import os
import pprint
import mock
from oslo.serialization import jsonutils
from oslo.utils import timeutils
import six
from testtools import matchers
from nova.conductor import rpcapi as conductor_rpcapi
from nova import context
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova.openstack.common import log
from nova import rpc
from nova import test
from nova.tests.unit import fake_notifier
from nova import utils
LOG = log.getLogger(__name__)
class MyOwnedObject(base.NovaPersistentObject, base.NovaObject):
VERSION = '1.0'
fields = {'baz': fields.Field(fields.Integer())}
class MyObj(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
VERSION = '1.6'
fields = {'foo': fields.Field(fields.Integer(), default=1),
'bar': fields.Field(fields.String()),
'missing': fields.Field(fields.String()),
'readonly': fields.Field(fields.Integer(), read_only=True),
'rel_object': fields.ObjectField('MyOwnedObject', nullable=True),
'rel_objects': fields.ListOfObjectsField('MyOwnedObject',
nullable=True),
}
@staticmethod
def _from_db_object(context, obj, db_obj):
self = MyObj()
self.foo = db_obj['foo']
self.bar = db_obj['bar']
self.missing = db_obj['missing']
self.readonly = 1
return self
def obj_load_attr(self, attrname):
setattr(self, attrname, 'loaded!')
@base.remotable_classmethod
def query(cls, context):
obj = cls(context=context, foo=1, bar='bar')
obj.obj_reset_changes()
return obj
@base.remotable
def marco(self, context):
return 'polo'
@base.remotable
def _update_test(self, context):
if context.project_id == 'alternate':
self.bar = 'alternate-context'
else:
self.bar = 'updated'
@base.remotable
def save(self, context):
self.obj_reset_changes()
@base.remotable
def refresh(self, context):
self.foo = 321
self.bar = 'refreshed'
self.obj_reset_changes()
@base.remotable
def modify_save_modify(self, context):
self.bar = 'meow'
self.save()
self.foo = 42
self.rel_object = MyOwnedObject(baz=42)
def obj_make_compatible(self, primitive, target_version):
super(MyObj, self).obj_make_compatible(primitive, target_version)
# NOTE(danms): Simulate an older version that had a different
# format for the 'bar' attribute
if target_version == '1.1' and 'bar' in primitive:
primitive['bar'] = 'old%s' % primitive['bar']
class MyObjDiffVers(MyObj):
VERSION = '1.5'
@classmethod
def obj_name(cls):
return 'MyObj'
class MyObj2(object):
@classmethod
def obj_name(cls):
return 'MyObj'
@base.remotable_classmethod
def query(cls, *args, **kwargs):
pass
class RandomMixInWithNoFields(object):
"""Used to test object inheritance using a mixin that has no fields."""
pass
class TestSubclassedObject(RandomMixInWithNoFields, MyObj):
fields = {'new_field': fields.Field(fields.String())}
class TestMetaclass(test.TestCase):
def test_obj_tracking(self):
@six.add_metaclass(base.NovaObjectMetaclass)
class NewBaseClass(object):
VERSION = '1.0'
fields = {}
@classmethod
def obj_name(cls):
return cls.__name__
class Fake1TestObj1(NewBaseClass):
@classmethod
def obj_name(cls):
return 'fake1'
class Fake1TestObj2(Fake1TestObj1):
pass
class Fake1TestObj3(Fake1TestObj1):
VERSION = '1.1'
class Fake2TestObj1(NewBaseClass):
@classmethod
def obj_name(cls):
return 'fake2'
class Fake1TestObj4(Fake1TestObj3):
VERSION = '1.2'
class Fake2TestObj2(Fake2TestObj1):
VERSION = '1.1'
class Fake1TestObj5(Fake1TestObj1):
VERSION = '1.1'
# Newest versions first in the list. Duplicate versions take the
# newest object.
expected = {'fake1': [Fake1TestObj4, Fake1TestObj5, Fake1TestObj2],
'fake2': [Fake2TestObj2, Fake2TestObj1]}
self.assertEqual(expected, NewBaseClass._obj_classes)
# The following should work, also.
self.assertEqual(expected, Fake1TestObj1._obj_classes)
self.assertEqual(expected, Fake1TestObj2._obj_classes)
self.assertEqual(expected, Fake1TestObj3._obj_classes)
self.assertEqual(expected, Fake1TestObj4._obj_classes)
self.assertEqual(expected, Fake1TestObj5._obj_classes)
self.assertEqual(expected, Fake2TestObj1._obj_classes)
self.assertEqual(expected, Fake2TestObj2._obj_classes)
def test_field_checking(self):
def create_class(field):
class TestField(base.NovaObject):
VERSION = '1.5'
fields = {'foo': field()}
return TestField
create_class(fields.IPV4AndV6AddressField)
self.assertRaises(exception.ObjectFieldInvalid,
create_class, fields.IPV4AndV6Address)
self.assertRaises(exception.ObjectFieldInvalid,
create_class, int)
class TestObjToPrimitive(test.TestCase):
def test_obj_to_primitive_list(self):
class MyObjElement(base.NovaObject):
fields = {'foo': fields.IntegerField()}
def __init__(self, foo):
super(MyObjElement, self).__init__()
self.foo = foo
class MyList(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('MyObjElement')}
mylist = MyList()
mylist.objects = [MyObjElement(1), MyObjElement(2), MyObjElement(3)]
self.assertEqual([1, 2, 3],
[x['foo'] for x in base.obj_to_primitive(mylist)])
def test_obj_to_primitive_dict(self):
myobj = MyObj(foo=1, bar='foo')
self.assertEqual({'foo': 1, 'bar': 'foo'},
base.obj_to_primitive(myobj))
def test_obj_to_primitive_recursive(self):
class MyList(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('MyObj')}
mylist = MyList(objects=[MyObj(), MyObj()])
for i, value in enumerate(mylist):
value.foo = i
self.assertEqual([{'foo': 0}, {'foo': 1}],
base.obj_to_primitive(mylist))
def test_obj_to_primitive_with_ip_addr(self):
class TestObject(base.NovaObject):
fields = {'addr': fields.IPAddressField(),
'cidr': fields.IPNetworkField()}
obj = TestObject(addr='1.2.3.4', cidr='1.1.1.1/16')
self.assertEqual({'addr': '1.2.3.4', 'cidr': '1.1.1.1/16'},
base.obj_to_primitive(obj))
class TestObjMakeList(test.TestCase):
def test_obj_make_list(self):
class MyList(base.ObjectListBase, base.NovaObject):
pass
db_objs = [{'foo': 1, 'bar': 'baz', 'missing': 'banana'},
{'foo': 2, 'bar': 'bat', 'missing': 'apple'},
]
mylist = base.obj_make_list('ctxt', MyList(), MyObj, db_objs)
self.assertEqual(2, len(mylist))
self.assertEqual('ctxt', mylist._context)
for index, item in enumerate(mylist):
self.assertEqual(db_objs[index]['foo'], item.foo)
self.assertEqual(db_objs[index]['bar'], item.bar)
self.assertEqual(db_objs[index]['missing'], item.missing)
def compare_obj(test, obj, db_obj, subs=None, allow_missing=None,
comparators=None):
"""Compare a NovaObject and a dict-like database object.
This automatically converts TZ-aware datetimes and iterates over
the fields of the object.
:param:test: The TestCase doing the comparison
:param:obj: The NovaObject to examine
:param:db_obj: The dict-like database object to use as reference
:param:subs: A dict of objkey=dbkey field substitutions
:param:allow_missing: A list of fields that may not be in db_obj
:param:comparators: Map of comparator functions to use for certain fields
"""
if subs is None:
subs = {}
if allow_missing is None:
allow_missing = []
if comparators is None:
comparators = {}
for key in obj.fields:
if key in allow_missing and not obj.obj_attr_is_set(key):
continue
obj_val = getattr(obj, key)
db_key = subs.get(key, key)
db_val = db_obj[db_key]
if isinstance(obj_val, datetime.datetime):
obj_val = obj_val.replace(tzinfo=None)
if key in comparators:
comparator = comparators[key]
comparator(db_val, obj_val)
else:
test.assertEqual(db_val, obj_val)
class _BaseTestCase(test.TestCase):
def setUp(self):
super(_BaseTestCase, self).setUp()
self.remote_object_calls = list()
self.user_id = 'fake-user'
self.project_id = 'fake-project'
self.context = context.RequestContext(self.user_id, self.project_id)
fake_notifier.stub_notifier(self.stubs)
self.addCleanup(fake_notifier.reset)
def compare_obj(self, obj, db_obj, subs=None, allow_missing=None,
comparators=None):
compare_obj(self, obj, db_obj, subs=subs, allow_missing=allow_missing,
comparators=comparators)
def json_comparator(self, expected, obj_val):
# json-ify an object field for comparison with its db str
# equivalent
self.assertEqual(expected, jsonutils.dumps(obj_val))
def str_comparator(self, expected, obj_val):
"""Compare an object field to a string in the db by performing
a simple coercion on the object field value.
"""
self.assertEqual(expected, str(obj_val))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Python < v2.7 compatibility. Assert 'not isinstance(obj, cls)."""
try:
f = super(_BaseTestCase, self).assertNotIsInstance
except AttributeError:
self.assertThat(obj,
matchers.Not(matchers.IsInstance(cls)),
message=msg or '')
else:
f(obj, cls, msg=msg)
class _LocalTest(_BaseTestCase):
def setUp(self):
super(_LocalTest, self).setUp()
# Just in case
base.NovaObject.indirection_api = None
def assertRemotes(self):
self.assertEqual(self.remote_object_calls, [])
@contextlib.contextmanager
def things_temporarily_local():
# Temporarily go non-remote so the conductor handles
# this request directly
_api = base.NovaObject.indirection_api
base.NovaObject.indirection_api = None
yield
base.NovaObject.indirection_api = _api
class _RemoteTest(_BaseTestCase):
def _testable_conductor(self):
self.conductor_service = self.start_service(
'conductor', manager='nova.conductor.manager.ConductorManager')
self.remote_object_calls = list()
orig_object_class_action = \
self.conductor_service.manager.object_class_action
orig_object_action = \
self.conductor_service.manager.object_action
def fake_object_class_action(*args, **kwargs):
self.remote_object_calls.append((kwargs.get('objname'),
kwargs.get('objmethod')))
with things_temporarily_local():
result = orig_object_class_action(*args, **kwargs)
return (base.NovaObject.obj_from_primitive(result, context=args[0])
if isinstance(result, base.NovaObject) else result)
self.stubs.Set(self.conductor_service.manager, 'object_class_action',
fake_object_class_action)
def fake_object_action(*args, **kwargs):
self.remote_object_calls.append((kwargs.get('objinst'),
kwargs.get('objmethod')))
with things_temporarily_local():
result = orig_object_action(*args, **kwargs)
return result
self.stubs.Set(self.conductor_service.manager, 'object_action',
fake_object_action)
# Things are remoted by default in this session
base.NovaObject.indirection_api = conductor_rpcapi.ConductorAPI()
# To make sure local and remote contexts match
self.stubs.Set(rpc.RequestContextSerializer,
'serialize_context',
lambda s, c: c)
self.stubs.Set(rpc.RequestContextSerializer,
'deserialize_context',
lambda s, c: c)
def setUp(self):
super(_RemoteTest, self).setUp()
self._testable_conductor()
def assertRemotes(self):
self.assertNotEqual(self.remote_object_calls, [])
class _TestObject(object):
def test_object_attrs_in_init(self):
# Spot check a few
objects.Instance
objects.InstanceInfoCache
objects.SecurityGroup
# Now check the test one in this file. Should be newest version
self.assertEqual('1.6', objects.MyObj.VERSION)
def test_hydration_type_error(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.5',
'nova_object.data': {'foo': 'a'}}
self.assertRaises(ValueError, MyObj.obj_from_primitive, primitive)
def test_hydration(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.5',
'nova_object.data': {'foo': 1}}
real_method = MyObj._obj_from_primitive
def _obj_from_primitive(*args):
return real_method(*args)
with mock.patch.object(MyObj, '_obj_from_primitive') as ofp:
ofp.side_effect = _obj_from_primitive
obj = MyObj.obj_from_primitive(primitive)
ofp.assert_called_once_with(None, '1.5', primitive)
self.assertEqual(obj.foo, 1)
def test_hydration_version_different(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.2',
'nova_object.data': {'foo': 1}}
obj = MyObj.obj_from_primitive(primitive)
self.assertEqual(obj.foo, 1)
self.assertEqual('1.2', obj.VERSION)
def test_hydration_bad_ns(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'foo',
'nova_object.version': '1.5',
'nova_object.data': {'foo': 1}}
self.assertRaises(exception.UnsupportedObjectError,
MyObj.obj_from_primitive, primitive)
def test_hydration_additional_unexpected_stuff(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.5.1',
'nova_object.data': {
'foo': 1,
'unexpected_thing': 'foobar'}}
obj = MyObj.obj_from_primitive(primitive)
self.assertEqual(1, obj.foo)
self.assertFalse(hasattr(obj, 'unexpected_thing'))
# NOTE(danms): If we call obj_from_primitive() directly
# with a version containing .z, we'll get that version
# in the resulting object. In reality, when using the
# serializer, we'll get that snipped off (tested
# elsewhere)
self.assertEqual('1.5.1', obj.VERSION)
def test_dehydration(self):
expected = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6',
'nova_object.data': {'foo': 1}}
obj = MyObj(foo=1)
obj.obj_reset_changes()
self.assertEqual(obj.obj_to_primitive(), expected)
def test_object_property(self):
obj = MyObj(foo=1)
self.assertEqual(obj.foo, 1)
def test_object_property_type_error(self):
obj = MyObj()
def fail():
obj.foo = 'a'
self.assertRaises(ValueError, fail)
def test_object_dict_syntax(self):
obj = MyObj(foo=123, bar='bar')
self.assertEqual(obj['foo'], 123)
self.assertEqual(sorted(obj.items(), key=lambda x: x[0]),
[('bar', 'bar'), ('foo', 123)])
self.assertEqual(sorted(list(obj.iteritems()), key=lambda x: x[0]),
[('bar', 'bar'), ('foo', 123)])
def test_load(self):
obj = MyObj()
self.assertEqual(obj.bar, 'loaded!')
def test_load_in_base(self):
class Foo(base.NovaObject):
fields = {'foobar': fields.Field(fields.Integer())}
obj = Foo()
with self.assertRaisesRegex(NotImplementedError, ".*foobar.*"):
obj.foobar
def test_loaded_in_primitive(self):
obj = MyObj(foo=1)
obj.obj_reset_changes()
self.assertEqual(obj.bar, 'loaded!')
expected = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6',
'nova_object.changes': ['bar'],
'nova_object.data': {'foo': 1,
'bar': 'loaded!'}}
self.assertEqual(obj.obj_to_primitive(), expected)
def test_changes_in_primitive(self):
obj = MyObj(foo=123)
self.assertEqual(obj.obj_what_changed(), set(['foo']))
primitive = obj.obj_to_primitive()
self.assertIn('nova_object.changes', primitive)
obj2 = MyObj.obj_from_primitive(primitive)
self.assertEqual(obj2.obj_what_changed(), set(['foo']))
obj2.obj_reset_changes()
self.assertEqual(obj2.obj_what_changed(), set())
def test_obj_class_from_name(self):
obj = base.NovaObject.obj_class_from_name('MyObj', '1.5')
self.assertEqual('1.5', obj.VERSION)
def test_obj_class_from_name_latest_compatible(self):
obj = base.NovaObject.obj_class_from_name('MyObj', '1.1')
self.assertEqual('1.6', obj.VERSION)
def test_unknown_objtype(self):
self.assertRaises(exception.UnsupportedObjectError,
base.NovaObject.obj_class_from_name, 'foo', '1.0')
def test_obj_class_from_name_supported_version(self):
error = None
try:
base.NovaObject.obj_class_from_name('MyObj', '1.25')
except exception.IncompatibleObjectVersion as error:
pass
self.assertIsNotNone(error)
self.assertEqual('1.6', error.kwargs['supported'])
def test_with_alternate_context(self):
ctxt1 = context.RequestContext('foo', 'foo')
ctxt2 = context.RequestContext('bar', 'alternate')
obj = MyObj.query(ctxt1)
obj._update_test(ctxt2)
self.assertEqual(obj.bar, 'alternate-context')
self.assertRemotes()
def test_orphaned_object(self):
obj = MyObj.query(self.context)
obj._context = None
self.assertRaises(exception.OrphanedObjectError,
obj._update_test)
self.assertRemotes()
def test_changed_1(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(obj.obj_what_changed(), set(['foo']))
obj._update_test(self.context)
self.assertEqual(obj.obj_what_changed(), set(['foo', 'bar']))
self.assertEqual(obj.foo, 123)
self.assertRemotes()
def test_changed_2(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(obj.obj_what_changed(), set(['foo']))
obj.save()
self.assertEqual(obj.obj_what_changed(), set([]))
self.assertEqual(obj.foo, 123)
self.assertRemotes()
def test_changed_3(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(obj.obj_what_changed(), set(['foo']))
obj.refresh()
self.assertEqual(obj.obj_what_changed(), set([]))
self.assertEqual(obj.foo, 321)
self.assertEqual(obj.bar, 'refreshed')
self.assertRemotes()
def test_changed_4(self):
obj = MyObj.query(self.context)
obj.bar = 'something'
self.assertEqual(obj.obj_what_changed(), set(['bar']))
obj.modify_save_modify(self.context)
self.assertEqual(obj.obj_what_changed(), set(['foo', 'rel_object']))
self.assertEqual(obj.foo, 42)
self.assertEqual(obj.bar, 'meow')
self.assertIsInstance(obj.rel_object, MyOwnedObject)
self.assertRemotes()
def test_changed_with_sub_object(self):
class ParentObject(base.NovaObject):
fields = {'foo': fields.IntegerField(),
'bar': fields.ObjectField('MyObj'),
}
obj = ParentObject()
self.assertEqual(set(), obj.obj_what_changed())
obj.foo = 1
self.assertEqual(set(['foo']), obj.obj_what_changed())
bar = MyObj()
obj.bar = bar
self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
obj.obj_reset_changes()
self.assertEqual(set(), obj.obj_what_changed())
bar.foo = 1
self.assertEqual(set(['bar']), obj.obj_what_changed())
def test_static_result(self):
obj = MyObj.query(self.context)
self.assertEqual(obj.bar, 'bar')
result = obj.marco()
self.assertEqual(result, 'polo')
self.assertRemotes()
def test_updates(self):
obj = MyObj.query(self.context)
self.assertEqual(obj.foo, 1)
obj._update_test()
self.assertEqual(obj.bar, 'updated')
self.assertRemotes()
def test_base_attributes(self):
dt = datetime.datetime(1955, 11, 5)
obj = MyObj(created_at=dt, updated_at=dt, deleted_at=None,
deleted=False)
expected = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6',
'nova_object.changes':
['deleted', 'created_at', 'deleted_at', 'updated_at'],
'nova_object.data':
{'created_at': timeutils.isotime(dt),
'updated_at': timeutils.isotime(dt),
'deleted_at': None,
'deleted': False,
}
}
self.assertEqual(obj.obj_to_primitive(), expected)
def test_contains(self):
obj = MyObj()
self.assertNotIn('foo', obj)
obj.foo = 1
self.assertIn('foo', obj)
self.assertNotIn('does_not_exist', obj)
def test_obj_attr_is_set(self):
obj = MyObj(foo=1)
self.assertTrue(obj.obj_attr_is_set('foo'))
self.assertFalse(obj.obj_attr_is_set('bar'))
self.assertRaises(AttributeError, obj.obj_attr_is_set, 'bang')
def test_get(self):
obj = MyObj(foo=1)
# Foo has value, should not get the default
self.assertEqual(obj.get('foo', 2), 1)
# Foo has value, should return the value without error
self.assertEqual(obj.get('foo'), 1)
# Bar is not loaded, so we should get the default
self.assertEqual(obj.get('bar', 'not-loaded'), 'not-loaded')
# Bar without a default should lazy-load
self.assertEqual(obj.get('bar'), 'loaded!')
# Bar now has a default, but loaded value should be returned
self.assertEqual(obj.get('bar', 'not-loaded'), 'loaded!')
# Invalid attribute should raise AttributeError
self.assertRaises(AttributeError, obj.get, 'nothing')
# ...even with a default
self.assertRaises(AttributeError, obj.get, 'nothing', 3)
def test_object_inheritance(self):
base_fields = base.NovaPersistentObject.fields.keys()
myobj_fields = (['foo', 'bar', 'missing',
'readonly', 'rel_object', 'rel_objects'] +
base_fields)
myobj3_fields = ['new_field']
self.assertTrue(issubclass(TestSubclassedObject, MyObj))
self.assertEqual(len(myobj_fields), len(MyObj.fields))
self.assertEqual(set(myobj_fields), set(MyObj.fields.keys()))
self.assertEqual(len(myobj_fields) + len(myobj3_fields),
len(TestSubclassedObject.fields))
self.assertEqual(set(myobj_fields) | set(myobj3_fields),
set(TestSubclassedObject.fields.keys()))
def test_obj_as_admin(self):
obj = MyObj(context=self.context)
def fake(*args, **kwargs):
self.assertTrue(obj._context.is_admin)
with mock.patch.object(obj, 'obj_reset_changes') as mock_fn:
mock_fn.side_effect = fake
with obj.obj_as_admin():
obj.save()
self.assertTrue(mock_fn.called)
self.assertFalse(obj._context.is_admin)
def test_obj_as_admin_orphaned(self):
def testme():
obj = MyObj()
with obj.obj_as_admin():
pass
self.assertRaises(exception.OrphanedObjectError, testme)
def test_get_changes(self):
obj = MyObj()
self.assertEqual({}, obj.obj_get_changes())
obj.foo = 123
self.assertEqual({'foo': 123}, obj.obj_get_changes())
obj.bar = 'test'
self.assertEqual({'foo': 123, 'bar': 'test'}, obj.obj_get_changes())
obj.obj_reset_changes()
self.assertEqual({}, obj.obj_get_changes())
def test_obj_fields(self):
class TestObj(base.NovaObject):
fields = {'foo': fields.Field(fields.Integer())}
obj_extra_fields = ['bar']
@property
def bar(self):
return 'this is bar'
obj = TestObj()
self.assertEqual(['foo', 'bar'], obj.obj_fields)
def test_obj_constructor(self):
obj = MyObj(context=self.context, foo=123, bar='abc')
self.assertEqual(123, obj.foo)
self.assertEqual('abc', obj.bar)
self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
def test_obj_read_only(self):
obj = MyObj(context=self.context, foo=123, bar='abc')
obj.readonly = 1
self.assertRaises(exception.ReadOnlyFieldError, setattr,
obj, 'readonly', 2)
def test_obj_repr(self):
obj = MyObj(foo=123)
self.assertEqual('MyObj(bar=<?>,created_at=<?>,deleted=<?>,'
'deleted_at=<?>,foo=123,missing=<?>,readonly=<?>,'
'rel_object=<?>,rel_objects=<?>,updated_at=<?>)',
repr(obj))
def test_obj_make_obj_compatible(self):
subobj = MyOwnedObject(baz=1)
obj = MyObj(rel_object=subobj)
obj.obj_relationships = {
'rel_object': [('1.5', '1.1'), ('1.7', '1.2')],
}
primitive = obj.obj_to_primitive()['nova_object.data']
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
obj._obj_make_obj_compatible(copy.copy(primitive), '1.8',
'rel_object')
self.assertFalse(mock_compat.called)
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
obj._obj_make_obj_compatible(copy.copy(primitive),
'1.7', 'rel_object')
mock_compat.assert_called_once_with(
primitive['rel_object']['nova_object.data'], '1.2')
self.assertEqual('1.2',
primitive['rel_object']['nova_object.version'])
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
obj._obj_make_obj_compatible(copy.copy(primitive),
'1.6', 'rel_object')
mock_compat.assert_called_once_with(
primitive['rel_object']['nova_object.data'], '1.1')
self.assertEqual('1.1',
primitive['rel_object']['nova_object.version'])
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
obj._obj_make_obj_compatible(copy.copy(primitive), '1.5',
'rel_object')
mock_compat.assert_called_once_with(
primitive['rel_object']['nova_object.data'], '1.1')
self.assertEqual('1.1',
primitive['rel_object']['nova_object.version'])
with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat:
_prim = copy.copy(primitive)
obj._obj_make_obj_compatible(_prim, '1.4', 'rel_object')
self.assertFalse(mock_compat.called)
self.assertNotIn('rel_object', _prim)
def test_obj_make_compatible_hits_sub_objects(self):
subobj = MyOwnedObject(baz=1)
obj = MyObj(foo=123, rel_object=subobj)
obj.obj_relationships = {'rel_object': [('1.0', '1.0')]}
with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat:
obj.obj_make_compatible({'rel_object': 'foo'}, '1.10')
mock_compat.assert_called_once_with({'rel_object': 'foo'}, '1.10',
'rel_object')
def test_obj_make_compatible_skips_unset_sub_objects(self):
obj = MyObj(foo=123)
obj.obj_relationships = {'rel_object': [('1.0', '1.0')]}
with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat:
obj.obj_make_compatible({'rel_object': 'foo'}, '1.10')
self.assertFalse(mock_compat.called)
def test_obj_make_compatible_complains_about_missing_rules(self):
subobj = MyOwnedObject(baz=1)
obj = MyObj(foo=123, rel_object=subobj)
obj.obj_relationships = {}
self.assertRaises(exception.ObjectActionError,
obj.obj_make_compatible, {}, '1.0')
def test_obj_make_compatible_handles_list_of_objects(self):
subobj = MyOwnedObject(baz=1)
obj = MyObj(rel_objects=[subobj])
obj.obj_relationships = {'rel_objects': [('1.0', '1.123')]}
def fake_make_compat(primitive, version):
self.assertEqual('1.123', version)
self.assertIn('baz', primitive)
with mock.patch.object(subobj, 'obj_make_compatible') as mock_mc:
mock_mc.side_effect = fake_make_compat
obj.obj_to_primitive('1.0')
self.assertTrue(mock_mc.called)
class TestObject(_LocalTest, _TestObject):
def test_set_defaults(self):
obj = MyObj()
obj.obj_set_defaults('foo')
self.assertTrue(obj.obj_attr_is_set('foo'))
self.assertEqual(1, obj.foo)
def test_set_defaults_no_default(self):
obj = MyObj()
self.assertRaises(exception.ObjectActionError,
obj.obj_set_defaults, 'bar')
def test_set_all_defaults(self):
obj = MyObj()
obj.obj_set_defaults()
self.assertEqual(set(['deleted', 'foo']), obj.obj_what_changed())
self.assertEqual(1, obj.foo)
class TestRemoteObject(_RemoteTest, _TestObject):
def test_major_version_mismatch(self):
MyObj2.VERSION = '2.0'
self.assertRaises(exception.IncompatibleObjectVersion,
MyObj2.query, self.context)
def test_minor_version_greater(self):
MyObj2.VERSION = '1.7'
self.assertRaises(exception.IncompatibleObjectVersion,
MyObj2.query, self.context)
def test_minor_version_less(self):
MyObj2.VERSION = '1.2'
obj = MyObj2.query(self.context)
self.assertEqual(obj.bar, 'bar')
self.assertRemotes()
def test_compat(self):
MyObj2.VERSION = '1.1'
obj = MyObj2.query(self.context)
self.assertEqual('oldbar', obj.bar)
def test_revision_ignored(self):
MyObj2.VERSION = '1.1.456'
obj = MyObj2.query(self.context)
self.assertEqual('bar', obj.bar)
class TestObjectListBase(test.TestCase):
def test_list_like_operations(self):
class MyElement(base.NovaObject):
fields = {'foo': fields.IntegerField()}
def __init__(self, foo):
super(MyElement, self).__init__()
self.foo = foo
class Foo(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('MyElement')}
objlist = Foo(context='foo',
objects=[MyElement(1), MyElement(2), MyElement(3)])
self.assertEqual(list(objlist), objlist.objects)
self.assertEqual(len(objlist), 3)
self.assertIn(objlist.objects[0], objlist)
self.assertEqual(list(objlist[:1]), [objlist.objects[0]])
self.assertEqual(objlist[:1]._context, 'foo')
self.assertEqual(objlist[2], objlist.objects[2])
self.assertEqual(objlist.count(objlist.objects[0]), 1)
self.assertEqual(objlist.index(objlist.objects[1]), 1)
objlist.sort(key=lambda x: x.foo, reverse=True)
self.assertEqual([3, 2, 1],
[x.foo for x in objlist])
def test_serialization(self):
class Foo(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('Bar')}
class Bar(base.NovaObject):
fields = {'foo': fields.Field(fields.String())}
obj = Foo(objects=[])
for i in 'abc':
bar = Bar(foo=i)
obj.objects.append(bar)
obj2 = base.NovaObject.obj_from_primitive(obj.obj_to_primitive())
self.assertFalse(obj is obj2)
self.assertEqual([x.foo for x in obj],
[y.foo for y in obj2])
def _test_object_list_version_mappings(self, list_obj_class):
# Figure out what sort of object this list is for
list_field = list_obj_class.fields['objects']
item_obj_field = list_field._type._element_type
item_obj_name = item_obj_field._type._obj_name
# Look through all object classes of this type and make sure that
# the versions we find are covered by the parent list class
for item_class in base.NovaObject._obj_classes[item_obj_name]:
self.assertIn(
item_class.VERSION,
list_obj_class.child_versions.values(),
'Version mapping is incomplete for %s' % (
list_obj_class.__name__))
def test_object_version_mappings(self):
# Find all object list classes and make sure that they at least handle
# all the current object versions
for obj_classes in base.NovaObject._obj_classes.values():
for obj_class in obj_classes:
if issubclass(obj_class, base.ObjectListBase):
self._test_object_list_version_mappings(obj_class)
def test_list_changes(self):
class Foo(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('Bar')}
class Bar(base.NovaObject):
fields = {'foo': fields.StringField()}
obj = Foo(objects=[])
self.assertEqual(set(['objects']), obj.obj_what_changed())
obj.objects.append(Bar(foo='test'))
self.assertEqual(set(['objects']), obj.obj_what_changed())
obj.obj_reset_changes()
# This should still look dirty because the child is dirty
self.assertEqual(set(['objects']), obj.obj_what_changed())
obj.objects[0].obj_reset_changes()
# This should now look clean because the child is clean
self.assertEqual(set(), obj.obj_what_changed())
def test_initialize_objects(self):
class Foo(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('Bar')}
class Bar(base.NovaObject):
fields = {'foo': fields.StringField()}
obj = Foo()
self.assertEqual([], obj.objects)
self.assertEqual(set(), obj.obj_what_changed())
def test_obj_repr(self):
class Foo(base.ObjectListBase, base.NovaObject):
fields = {'objects': fields.ListOfObjectsField('Bar')}
class Bar(base.NovaObject):
fields = {'uuid': fields.StringField()}
obj = Foo(objects=[Bar(uuid='fake-uuid')])
self.assertEqual('Foo(objects=[Bar(fake-uuid)])', repr(obj))
class TestObjectSerializer(_BaseTestCase):
def test_serialize_entity_primitive(self):
ser = base.NovaObjectSerializer()
for thing in (1, 'foo', [1, 2], {'foo': 'bar'}):
self.assertEqual(thing, ser.serialize_entity(None, thing))
def test_deserialize_entity_primitive(self):
ser = base.NovaObjectSerializer()
for thing in (1, 'foo', [1, 2], {'foo': 'bar'}):
self.assertEqual(thing, ser.deserialize_entity(None, thing))
def _test_deserialize_entity_newer(self, obj_version, backported_to,
my_version='1.6'):
ser = base.NovaObjectSerializer()
ser._conductor = mock.Mock()
ser._conductor.object_backport.return_value = 'backported'
class MyTestObj(MyObj):
VERSION = my_version
obj = MyTestObj()
obj.VERSION = obj_version
primitive = obj.obj_to_primitive()
result = ser.deserialize_entity(self.context, primitive)
if backported_to is None:
self.assertFalse(ser._conductor.object_backport.called)
else:
self.assertEqual('backported', result)
ser._conductor.object_backport.assert_called_with(self.context,
primitive,
backported_to)
def test_deserialize_entity_newer_version_backports(self):
self._test_deserialize_entity_newer('1.25', '1.6')
def test_deserialize_entity_newer_revision_does_not_backport_zero(self):
self._test_deserialize_entity_newer('1.6.0', None)
def test_deserialize_entity_newer_revision_does_not_backport(self):
self._test_deserialize_entity_newer('1.6.1', None)
def test_deserialize_entity_newer_version_passes_revision(self):
self._test_deserialize_entity_newer('1.7', '1.6.1', '1.6.1')
def test_deserialize_dot_z_with_extra_stuff(self):
primitive = {'nova_object.name': 'MyObj',
'nova_object.namespace': 'nova',
'nova_object.version': '1.6.1',
'nova_object.data': {
'foo': 1,
'unexpected_thing': 'foobar'}}
ser = base.NovaObjectSerializer()
obj = ser.deserialize_entity(self.context, primitive)
self.assertEqual(1, obj.foo)
self.assertFalse(hasattr(obj, 'unexpected_thing'))
# NOTE(danms): The serializer is where the logic lives that
# avoids backports for cases where only a .z difference in
# the received object version is detected. As a result, we
# end up with a version of what we expected, effectively the
# .0 of the object.
self.assertEqual('1.6', obj.VERSION)
def test_object_serialization(self):
ser = base.NovaObjectSerializer()
obj = MyObj()
primitive = ser.serialize_entity(self.context, obj)
self.assertIn('nova_object.name', primitive)
obj2 = ser.deserialize_entity(self.context, primitive)
self.assertIsInstance(obj2, MyObj)
self.assertEqual(self.context, obj2._context)
def test_object_serialization_iterables(self):
ser = base.NovaObjectSerializer()
obj = MyObj()
for iterable in (list, tuple, set):
thing = iterable([obj])
primitive = ser.serialize_entity(self.context, thing)
self.assertEqual(1, len(primitive))
for item in primitive:
self.assertNotIsInstance(item, base.NovaObject)
thing2 = ser.deserialize_entity(self.context, primitive)
self.assertEqual(1, len(thing2))
for item in thing2:
self.assertIsInstance(item, MyObj)
# dict case
thing = {'key': obj}
primitive = ser.serialize_entity(self.context, thing)
self.assertEqual(1, len(primitive))
for item in primitive.itervalues():
self.assertNotIsInstance(item, base.NovaObject)
thing2 = ser.deserialize_entity(self.context, primitive)
self.assertEqual(1, len(thing2))
for item in thing2.itervalues():
self.assertIsInstance(item, MyObj)
# object-action updates dict case
thing = {'foo': obj.obj_to_primitive()}
primitive = ser.serialize_entity(self.context, thing)
self.assertEqual(thing, primitive)
thing2 = ser.deserialize_entity(self.context, thing)
self.assertIsInstance(thing2['foo'], base.NovaObject)
# NOTE(danms): The hashes in this list should only be changed if
# they come with a corresponding version bump in the affected
# objects
object_data = {
'Agent': '1.0-c4ff8a833aee8ae44ab8aed1a171273d',
'AgentList': '1.0-31f07426a729311a42ff7f6246e76e25',
'Aggregate': '1.1-f5d477be06150529a9b2d27cc49030b5',
'AggregateList': '1.2-4b02a285b8612bfb86a96ff80052fb0a',
'BandwidthUsage': '1.2-a9d7c2ba54995e48ce38688c51c9416d',
'BandwidthUsageList': '1.2-5b564cbfd5ae6e106443c086938e7602',
'BlockDeviceMapping': '1.5-9968ffe513e7672484b0f528b034cd0f',
'BlockDeviceMappingList': '1.6-ee2ed2eb3f3f2f54d573ccea0ff2eeaa',
'ComputeNode': '1.9-d59bebd3176d86f0f7ea02086732a0d4',
'ComputeNodeList': '1.9-4fdeaf7dce98f5736f0ed239c9265c65',
'DNSDomain': '1.0-5bdc288d7c3b723ce86ede998fd5c9ba',
'DNSDomainList': '1.0-cfb3e7e82be661501c31099523154db4',
'EC2InstanceMapping': '1.0-627baaf4b12c9067200979bdc4558a99',
'EC2SnapshotMapping': '1.0-26cf315be1f8abab4289d4147671c836',
'EC2VolumeMapping': '1.0-2f8c3bf077c65a425294ec2b361c9143',
'FixedIP': '1.7-2472964d39e50da67202109eb85cd173',
'FixedIPList': '1.7-125de790b58cfb8c84ffc8c34db4a81e',
'Flavor': '1.1-096cfd023c35d07542cf732fb29b45e4',
'FlavorList': '1.1-a3d5551267cb8f62ff38ded125900721',
'FloatingIP': '1.6-27eb68b7c9c620dd5f0561b5a3be0e82',
'FloatingIPList': '1.7-f376f63ed99243f9d90841b7f6732bbf',
'HVSpec': '1.0-c4d8377cc4fe519930e60c1d8265a142',
'Instance': '1.17-972cae223db35e88bb184bdf8c197229',
'InstanceAction': '1.1-6b1d0a6dbd522b5a83c20757ec659663',
'InstanceActionEvent': '1.1-42dbdba74bd06e0619ca75cd3397cd1b',
'InstanceActionEventList': '1.0-1d5cc958171d6ce07383c2ad6208318e',
'InstanceActionList': '1.0-368410fdb8d69ae20c495308535d6266',
'InstanceExternalEvent': '1.0-f1134523654407a875fd59b80f759ee7',
'InstanceFault': '1.2-313438e37e9d358f3566c85f6ddb2d3e',
'InstanceFaultList': '1.1-aeb598ffd0cd6aa61fca7adf0f5e900d',
'InstanceGroup': '1.9-95ece99f092e8f4f88327cdbb44162c9',
'InstanceGroupList': '1.6-c6b78f3c9d9080d33c08667e80589817',
'InstanceInfoCache': '1.5-ef64b604498bfa505a8c93747a9d8b2f',
'InstanceList': '1.13-179093360c48747a41694cc2f326d75d',
'InstanceNUMACell': '1.2-5d2dfa36e9ecca9b63f24bf3bc958ea4',
'InstanceNUMATopology': '1.1-86b95d263c4c68411d44c6741b8d2bb0',
'InstancePCIRequest': '1.1-e082d174f4643e5756ba098c47c1510f',
'InstancePCIRequests': '1.1-bc7c6684d8579ee49d6a3b8aef756918',
'KeyPair': '1.1-3410f51950d052d861c11946a6ae621a',
'KeyPairList': '1.0-71132a568cc5d078ba1748a9c02c87b8',
'Migration': '1.1-67c47726c2c71422058cd9d149d6d3ed',
'MigrationList': '1.1-8c5f678edc72a592d591a13b35e54353',
'MyObj': '1.6-02b1e712b7ee334fa3fefe024c340977',
'MyOwnedObject': '1.0-0f3d6c028543d7f3715d121db5b8e298',
'Network': '1.2-2ea21ede5e45bb80e7b7ac7106915c4e',
'NetworkList': '1.2-aa4ad23f035b97a41732ea8b3445fc5e',
'NetworkRequest': '1.1-f31192f5a725017707f989585e12d7dc',
'NetworkRequestList': '1.1-beeab521ac9450f1f5ef4eaa945a783c',
'NUMACell': '1.2-cb9c3b08cc1c418d021492f788d04173',
'NUMAPagesTopology': '1.0-97d93f70a68625b5f29ff63a40a4f612',
'NUMATopology': '1.2-790f6bdff85bf6e5677f409f3a4f1c6a',
'PciDevice': '1.3-e059641df10e85d464672c5183a9473b',
'PciDeviceList': '1.1-38cbe2d3c23b9e46f7a74b486abcad85',
'PciDevicePool': '1.0-d6ed1abe611c9947345a44155abe6f11',
'PciDevicePoolList': '1.0-d31e08e0ff620a4df7cc2014b6c50da8',
'Quotas': '1.2-36098cf2143e6535873c3fa3d6fe56f7',
'QuotasNoOp': '1.2-164c628906b170fd946a7672e85e4935',
'S3ImageMapping': '1.0-9225943a44a91ad0349b9fd8bd3f3ce2',
'SecurityGroup': '1.1-bba0e72865e0953793e796571692453b',
'SecurityGroupList': '1.0-528e6448adfeeb78921ebeda499ab72f',
'SecurityGroupRule': '1.1-a9175baf7664439af1a16c2010b55576',
'SecurityGroupRuleList': '1.1-667fca3a9928f23d2d10e61962c55f3c',
'Service': '1.7-82bbfd46a744a9c89bc44b47a1b81683',
'ServiceList': '1.5-f137850fbd69933a69a03eae572b05f0',
'Service': '1.8-82bbfd46a744a9c89bc44b47a1b81683',
'ServiceList': '1.6-f8bd332b71ff1c3a86b88b6070072fd4',
'Tag': '1.0-a11531f4e4e3166eef6243d6d58a18bd',
'TagList': '1.0-e89bf8c8055f1f1d654fb44f0abf1f53',
'TestSubclassedObject': '1.6-87177ccbefd7a740a9e261f958e15b00',
'VirtualInterface': '1.0-10fdac4c704102b6d57d6936d6d790d2',
'VirtualInterfaceList': '1.0-accbf02628a8063c1d885077a2bf49b6',
'VirtCPUTopology': '1.0-fc694de72e20298f7c6bab1083fd4563',
}
object_relationships = {
'BlockDeviceMapping': {'Instance': '1.17'},
'ComputeNode': {'PciDevicePoolList': '1.0'},
'FixedIP': {'Instance': '1.17', 'Network': '1.2',
'VirtualInterface': '1.0',
'FloatingIPList': '1.7'},
'FloatingIP': {'FixedIP': '1.7'},
'Instance': {'InstanceFault': '1.2',
'InstanceInfoCache': '1.5',
'InstanceNUMATopology': '1.1',
'PciDeviceList': '1.1',
'TagList': '1.0',
'SecurityGroupList': '1.0',
'InstancePCIRequests': '1.1'},
'InstanceNUMACell': {'VirtCPUTopology': '1.0'},
'MyObj': {'MyOwnedObject': '1.0'},
'SecurityGroupRule': {'SecurityGroup': '1.1'},
'Service': {'ComputeNode': '1.9'},
'TestSubclassedObject': {'MyOwnedObject': '1.0'}
}
class TestObjectVersions(test.TestCase):
def _find_remotable_method(self, cls, thing, parent_was_remotable=False):
"""Follow a chain of remotable things down to the original function."""
if isinstance(thing, classmethod):
return self._find_remotable_method(cls, thing.__get__(None, cls))
elif inspect.ismethod(thing) and hasattr(thing, 'remotable'):
return self._find_remotable_method(cls, thing.original_fn,
parent_was_remotable=True)
elif parent_was_remotable:
# We must be the first non-remotable thing underneath a stack of
# remotable things (i.e. the actual implementation method)
return thing
else:
# This means the top-level thing never hit a remotable layer
return None
def _get_fingerprint(self, obj_name):
obj_class = base.NovaObject._obj_classes[obj_name][0]
fields = obj_class.fields.items()
fields.sort()
methods = []
for name in dir(obj_class):
thing = getattr(obj_class, name)
if inspect.ismethod(thing) or isinstance(thing, classmethod):
method = self._find_remotable_method(obj_class, thing)
if method:
methods.append((name, inspect.getargspec(method)))
methods.sort()
# NOTE(danms): Things that need a version bump are any fields
# and their types, or the signatures of any remotable methods.
# Of course, these are just the mechanical changes we can detect,
# but many other things may require a version bump (method behavior
# and return value changes, for example).
if hasattr(obj_class, 'child_versions'):
relevant_data = (fields, methods, obj_class.child_versions)
else:
relevant_data = (fields, methods)
fingerprint = '%s-%s' % (obj_class.VERSION,
hashlib.md5(str(relevant_data)).hexdigest())
return fingerprint
def test_versions(self):
fingerprints = {}
for obj_name in base.NovaObject._obj_classes:
fingerprints[obj_name] = self._get_fingerprint(obj_name)
if os.getenv('GENERATE_HASHES'):
file('object_hashes.txt', 'w').write(
pprint.pformat(fingerprints))
raise test.TestingException(
'Generated hashes in object_hashes.txt')
stored = set(object_data.items())
computed = set(fingerprints.items())
changed = stored.symmetric_difference(computed)
expected = {}
actual = {}
for name, hash in changed:
expected[name] = object_data.get(name)
actual[name] = fingerprints.get(name)
self.assertEqual(expected, actual,
'Some objects have changed; please make sure the '
'versions have been bumped, and then update their '
'hashes here.')
def _build_tree(self, tree, obj_class):
obj_name = obj_class.obj_name()
if obj_name in tree:
return
for name, field in obj_class.fields.items():
if isinstance(field._type, fields.Object):
sub_obj_name = field._type._obj_name
sub_obj_class = base.NovaObject._obj_classes[sub_obj_name][0]
self._build_tree(tree, sub_obj_class)
tree.setdefault(obj_name, {})
tree[obj_name][sub_obj_name] = sub_obj_class.VERSION
def test_relationships(self):
tree = {}
for obj_name in base.NovaObject._obj_classes.keys():
self._build_tree(tree, base.NovaObject._obj_classes[obj_name][0])
stored = set([(x, str(y)) for x, y in object_relationships.items()])
computed = set([(x, str(y)) for x, y in tree.items()])
changed = stored.symmetric_difference(computed)
expected = {}
actual = {}
for name, deps in changed:
expected[name] = object_relationships.get(name)
actual[name] = tree.get(name)
self.assertEqual(expected, actual,
'Some objects have changed dependencies. '
'Please make sure to bump the versions of '
'parent objects and provide a rule in their '
'obj_make_compatible() routines to backlevel '
'the child object.')
def test_obj_make_compatible(self):
# Iterate all object classes and verify that we can run
# obj_make_compatible with every older version than current.
# This doesn't actually test the data conversions, but it at least
# makes sure the method doesn't blow up on something basic like
# expecting the wrong version format.
for obj_name in base.NovaObject._obj_classes:
obj_class = base.NovaObject._obj_classes[obj_name][0]
version = utils.convert_version_to_tuple(obj_class.VERSION)
for n in range(version[1]):
test_version = '%d.%d' % (version[0], n)
LOG.info('testing obj: %s version: %s' %
(obj_name, test_version))
obj_class().obj_to_primitive(target_version=test_version)
def test_obj_relationships_in_order(self):
# Iterate all object classes and verify that we can run
# obj_make_compatible with every older version than current.
# This doesn't actually test the data conversions, but it at least
# makes sure the method doesn't blow up on something basic like
# expecting the wrong version format.
for obj_name in base.NovaObject._obj_classes:
obj_class = base.NovaObject._obj_classes[obj_name][0]
for field, versions in obj_class.obj_relationships.items():
last_my_version = (0, 0)
last_child_version = (0, 0)
for my_version, child_version in versions:
_my_version = utils.convert_version_to_tuple(my_version)
_ch_version = utils.convert_version_to_tuple(child_version)
self.assertTrue((last_my_version < _my_version
and last_child_version <= _ch_version),
'Object %s relationship '
'%s->%s for field %s is out of order' % (
obj_name, my_version, child_version,
field))
last_my_version = _my_version
last_child_version = _ch_version
| |
# -*- coding: utf-8 -*-
# Copyright (C) 2006 Joe Wreschnig
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
"""Utility classes for Mutagen.
You should not rely on the interfaces here being stable. They are
intended for internal use in Mutagen only.
"""
import struct
import codecs
from fnmatch import fnmatchcase
from ._compat import chr_, text_type, PY2, iteritems, iterbytes, \
integer_types, xrange
class MutagenError(Exception):
"""Base class for all custom exceptions in mutagen
.. versionadded:: 1.25
"""
def total_ordering(cls):
assert "__eq__" in cls.__dict__
assert "__lt__" in cls.__dict__
cls.__le__ = lambda self, other: self == other or self < other
cls.__gt__ = lambda self, other: not (self == other or self < other)
cls.__ge__ = lambda self, other: not self < other
cls.__ne__ = lambda self, other: not self.__eq__(other)
return cls
def hashable(cls):
"""Makes sure the class is hashable.
Needs a working __eq__ and __hash__ and will add a __ne__.
"""
# py2
assert "__hash__" in cls.__dict__
# py3
assert cls.__dict__["__hash__"] is not None
assert "__eq__" in cls.__dict__
cls.__ne__ = lambda self, other: not self.__eq__(other)
return cls
def enum(cls):
assert cls.__bases__ == (object,)
d = dict(cls.__dict__)
new_type = type(cls.__name__, (int,), d)
new_type.__module__ = cls.__module__
map_ = {}
for key, value in iteritems(d):
if key.upper() == key and isinstance(value, integer_types):
value_instance = new_type(value)
setattr(new_type, key, value_instance)
map_[value] = key
def repr_(self):
if self in map_:
return "%s.%s" % (type(self).__name__, map_[self])
else:
return "%s(%s)" % (type(self).__name__, self)
setattr(new_type, "__repr__", repr_)
return new_type
@total_ordering
class DictMixin(object):
"""Implement the dict API using keys() and __*item__ methods.
Similar to UserDict.DictMixin, this takes a class that defines
__getitem__, __setitem__, __delitem__, and keys(), and turns it
into a full dict-like object.
UserDict.DictMixin is not suitable for this purpose because it's
an old-style class.
This class is not optimized for very large dictionaries; many
functions have linear memory requirements. I recommend you
override some of these functions if speed is required.
"""
def __iter__(self):
return iter(self.keys())
def __has_key(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
if PY2:
has_key = __has_key
__contains__ = __has_key
if PY2:
iterkeys = lambda self: iter(self.keys())
def values(self):
return [self[k] for k in self.keys()]
if PY2:
itervalues = lambda self: iter(self.values())
def items(self):
return list(zip(self.keys(), self.values()))
if PY2:
iteritems = lambda s: iter(s.items())
def clear(self):
for key in list(self.keys()):
self.__delitem__(key)
def pop(self, key, *args):
if len(args) > 1:
raise TypeError("pop takes at most two arguments")
try:
value = self[key]
except KeyError:
if args:
return args[0]
else:
raise
del(self[key])
return value
def popitem(self):
for key in self.keys():
break
else:
raise KeyError("dictionary is empty")
return key, self.pop(key)
def update(self, other=None, **kwargs):
if other is None:
self.update(kwargs)
other = {}
try:
for key, value in other.items():
self.__setitem__(key, value)
except AttributeError:
for key, value in other:
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __repr__(self):
return repr(dict(self.items()))
def __eq__(self, other):
return dict(self.items()) == other
def __lt__(self, other):
return dict(self.items()) < other
__hash__ = object.__hash__
def __len__(self):
return len(self.keys())
class DictProxy(DictMixin):
def __init__(self, *args, **kwargs):
self.__dict = {}
super(DictProxy, self).__init__(*args, **kwargs)
def __getitem__(self, key):
return self.__dict[key]
def __setitem__(self, key, value):
self.__dict[key] = value
def __delitem__(self, key):
del(self.__dict[key])
def keys(self):
return self.__dict.keys()
def _fill_cdata(cls):
"""Add struct pack/unpack functions"""
funcs = {}
for key, name in [("b", "char"), ("h", "short"),
("i", "int"), ("q", "longlong")]:
for echar, esuffix in [("<", "le"), (">", "be")]:
esuffix = "_" + esuffix
for unsigned in [True, False]:
s = struct.Struct(echar + (key.upper() if unsigned else key))
get_wrapper = lambda f: lambda *a, **k: f(*a, **k)[0]
unpack = get_wrapper(s.unpack)
unpack_from = get_wrapper(s.unpack_from)
def get_unpack_from(s):
def unpack_from(data, offset=0):
return s.unpack_from(data, offset)[0], offset + s.size
return unpack_from
unpack_from = get_unpack_from(s)
pack = s.pack
prefix = "u" if unsigned else ""
if s.size == 1:
esuffix = ""
bits = str(s.size * 8)
funcs["%s%s%s" % (prefix, name, esuffix)] = unpack
funcs["%sint%s%s" % (prefix, bits, esuffix)] = unpack
funcs["%s%s%s_from" % (prefix, name, esuffix)] = unpack_from
funcs["%sint%s%s_from" % (prefix, bits, esuffix)] = unpack_from
funcs["to_%s%s%s" % (prefix, name, esuffix)] = pack
funcs["to_%sint%s%s" % (prefix, bits, esuffix)] = pack
for key, func in iteritems(funcs):
setattr(cls, key, staticmethod(func))
class cdata(object):
"""C character buffer to Python numeric type conversions.
For each size/sign/endianness:
uint32_le(data)/to_uint32_le(num)/uint32_le_from(data, offset=0)
"""
from struct import error
error = error
bitswap = b''.join(
chr_(sum(((val >> i) & 1) << (7 - i) for i in range(8)))
for val in range(256))
test_bit = staticmethod(lambda value, n: bool((value >> n) & 1))
_fill_cdata(cdata)
def lock(fileobj):
"""Lock a file object 'safely'.
That means a failure to lock because the platform doesn't
support fcntl or filesystem locks is not considered a
failure. This call does block.
Returns whether or not the lock was successful, or
raises an exception in more extreme circumstances (full
lock table, invalid file).
"""
try:
import fcntl
except ImportError:
return False
else:
try:
fcntl.lockf(fileobj, fcntl.LOCK_EX)
except IOError:
# FIXME: There's possibly a lot of complicated
# logic that needs to go here in case the IOError
# is EACCES or EAGAIN.
return False
else:
return True
def unlock(fileobj):
"""Unlock a file object.
Don't call this on a file object unless a call to lock()
returned true.
"""
# If this fails there's a mismatched lock/unlock pair,
# so we definitely don't want to ignore errors.
import fcntl
fcntl.lockf(fileobj, fcntl.LOCK_UN)
def insert_bytes(fobj, size, offset, BUFFER_SIZE=2 ** 16):
"""Insert size bytes of empty space starting at offset.
fobj must be an open file object, open rb+ or
equivalent. Mutagen tries to use mmap to resize the file, but
falls back to a significantly slower method if mmap fails.
"""
assert 0 < size
assert 0 <= offset
locked = False
fobj.seek(0, 2)
filesize = fobj.tell()
movesize = filesize - offset
fobj.write(b'\x00' * size)
fobj.flush()
try:
try:
import mmap
file_map = mmap.mmap(fobj.fileno(), filesize + size)
try:
file_map.move(offset + size, offset, movesize)
finally:
file_map.close()
except (ValueError, EnvironmentError, ImportError):
# handle broken mmap scenarios
locked = lock(fobj)
fobj.truncate(filesize)
fobj.seek(0, 2)
padsize = size
# Don't generate an enormous string if we need to pad
# the file out several megs.
while padsize:
addsize = min(BUFFER_SIZE, padsize)
fobj.write(b"\x00" * addsize)
padsize -= addsize
fobj.seek(filesize, 0)
while movesize:
# At the start of this loop, fobj is pointing at the end
# of the data we need to move, which is of movesize length.
thismove = min(BUFFER_SIZE, movesize)
# Seek back however much we're going to read this frame.
fobj.seek(-thismove, 1)
nextpos = fobj.tell()
# Read it, so we're back at the end.
data = fobj.read(thismove)
# Seek back to where we need to write it.
fobj.seek(-thismove + size, 1)
# Write it.
fobj.write(data)
# And seek back to the end of the unmoved data.
fobj.seek(nextpos)
movesize -= thismove
fobj.flush()
finally:
if locked:
unlock(fobj)
def delete_bytes(fobj, size, offset, BUFFER_SIZE=2 ** 16):
"""Delete size bytes of empty space starting at offset.
fobj must be an open file object, open rb+ or
equivalent. Mutagen tries to use mmap to resize the file, but
falls back to a significantly slower method if mmap fails.
"""
locked = False
assert 0 < size
assert 0 <= offset
fobj.seek(0, 2)
filesize = fobj.tell()
movesize = filesize - offset - size
assert 0 <= movesize
try:
if movesize > 0:
fobj.flush()
try:
import mmap
file_map = mmap.mmap(fobj.fileno(), filesize)
try:
file_map.move(offset, offset + size, movesize)
finally:
file_map.close()
except (ValueError, EnvironmentError, ImportError):
# handle broken mmap scenarios
locked = lock(fobj)
fobj.seek(offset + size)
buf = fobj.read(BUFFER_SIZE)
while buf:
fobj.seek(offset)
fobj.write(buf)
offset += len(buf)
fobj.seek(offset + size)
buf = fobj.read(BUFFER_SIZE)
fobj.truncate(filesize - size)
fobj.flush()
finally:
if locked:
unlock(fobj)
def dict_match(d, key, default=None):
"""Like __getitem__ but works as if the keys() are all filename patterns.
Returns the value of any dict key that matches the passed key.
"""
if key in d and "[" not in key:
return d[key]
else:
for pattern, value in iteritems(d):
if fnmatchcase(key, pattern):
return value
return default
def decode_terminated(data, encoding, strict=True):
"""Returns the decoded data until the first NULL terminator
and all data after it.
In case the data can't be decoded raises UnicodeError.
In case the encoding is not found raises LookupError.
In case the data isn't null terminated (even if it is encoded correctly)
raises ValueError except if strict is False, then the decoded string
will be returned anyway.
"""
codec_info = codecs.lookup(encoding)
# normalize encoding name so we can compare by name
encoding = codec_info.name
# fast path
if encoding in ("utf-8", "iso8859-1"):
index = data.find(b"\x00")
if index == -1:
# make sure we raise UnicodeError first, like in the slow path
res = data.decode(encoding), b""
if strict:
raise ValueError("not null terminated")
else:
return res
return data[:index].decode(encoding), data[index + 1:]
# slow path
decoder = codec_info.incrementaldecoder()
r = []
for i, b in enumerate(iterbytes(data)):
c = decoder.decode(b)
if c == u"\x00":
return u"".join(r), data[i + 1:]
r.append(c)
else:
# make sure the decoder is finished
r.append(decoder.decode(b"", True))
if strict:
raise ValueError("not null terminated")
return u"".join(r), b""
def split_escape(string, sep, maxsplit=None, escape_char="\\"):
"""Like unicode/str/bytes.split but allows for the separator to be escaped
If passed unicode/str/bytes will only return list of unicode/str/bytes.
"""
assert len(sep) == 1
assert len(escape_char) == 1
if isinstance(string, bytes):
if isinstance(escape_char, text_type):
escape_char = escape_char.encode("ascii")
iter_ = iterbytes
else:
iter_ = iter
if maxsplit is None:
maxsplit = len(string)
empty = string[:0]
result = []
current = empty
escaped = False
for char in iter_(string):
if escaped:
if char != escape_char and char != sep:
current += escape_char
current += char
escaped = False
else:
if char == escape_char:
escaped = True
elif char == sep and len(result) < maxsplit:
result.append(current)
current = empty
else:
current += char
result.append(current)
return result
class BitReaderError(Exception):
pass
class BitReader(object):
def __init__(self, fileobj):
self._fileobj = fileobj
self._buffer = 0
self._bits = 0
self._pos = fileobj.tell()
def bits(self, count):
"""Reads `count` bits and returns an uint, MSB read first.
May raise BitReaderError if not enough data could be read or
IOError by the underlying file object.
"""
if count < 0:
raise ValueError
if count > self._bits:
n_bytes = (count - self._bits + 7) // 8
data = self._fileobj.read(n_bytes)
if len(data) != n_bytes:
raise BitReaderError("not enough data")
for b in bytearray(data):
self._buffer = (self._buffer << 8) | b
self._bits += n_bytes * 8
self._bits -= count
value = self._buffer >> self._bits
self._buffer &= (1 << self._bits) - 1
assert self._bits < 8
return value
def bytes(self, count):
"""Returns a bytearray of length `count`. Works unaligned."""
if count < 0:
raise ValueError
# fast path
if self._bits == 0:
data = self._fileobj.read(count)
if len(data) != count:
raise BitReaderError("not enough data")
return data
return bytes(bytearray(self.bits(8) for _ in xrange(count)))
def skip(self, count):
"""Skip `count` bits.
Might raise BitReaderError if there wasn't enough data to skip,
but might also fail on the next bits() instead.
"""
if count < 0:
raise ValueError
if count <= self._bits:
self.bits(count)
else:
count -= self.align()
n_bytes = count // 8
self._fileobj.seek(n_bytes, 1)
count -= n_bytes * 8
self.bits(count)
def get_position(self):
"""Returns the amount of bits read or skipped so far"""
return (self._fileobj.tell() - self._pos) * 8 - self._bits
def align(self):
"""Align to the next byte, returns the amount of bits skipped"""
bits = self._bits
self._buffer = 0
self._bits = 0
return bits
def is_aligned(self):
"""If we are currently aligned to bytes and nothing is buffered"""
return self._bits == 0
def get_fileobj(ftype, fname_or_obj):
"""
Gets a file object opened in mode rb+ from fname_or_obj or ftype.filename.
fname_or_obj must be either None, a filename or a file-like object opened
in mode rb+.
ftype must be an object of a class child of FileType (defined in _file.py)
"""
if fname_or_obj is None:
fname_or_obj = ftype.filename
if type(fname_or_obj) == str:
return open(fname_or_obj, 'rb+')
return fname_or_obj
| |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for constrained_optimization.python.swap_regret_optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.constrained_optimization.python import swap_regret_optimizer
from tensorflow.contrib.constrained_optimization.python import test_util
from tensorflow.python.ops import standard_ops
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
class AdditiveSwapRegretOptimizerWrapper(
swap_regret_optimizer.AdditiveSwapRegretOptimizer):
"""Testing wrapper class around AdditiveSwapRegretOptimizer.
This class is identical to AdditiveSwapRegretOptimizer, except that it caches
the internal optimization state when _stochastic_matrix() is called, so that
we can test that the stochastic matrices take on their expected values.
"""
def __init__(self, optimizer, constraint_optimizer=None):
"""Same as AdditiveSwapRegretOptimizer.__init__()."""
super(AdditiveSwapRegretOptimizerWrapper, self).__init__(
optimizer=optimizer, constraint_optimizer=constraint_optimizer)
self._cached_stochastic_matrix = None
@property
def stochastic_matrix(self):
"""Returns the cached stochastic matrix."""
return self._cached_stochastic_matrix
def _stochastic_matrix(self, state):
"""Caches the internal state for testing."""
self._cached_stochastic_matrix = super(AdditiveSwapRegretOptimizerWrapper,
self)._stochastic_matrix(state)
return self._cached_stochastic_matrix
class MultiplicativeSwapRegretOptimizerWrapper(
swap_regret_optimizer.MultiplicativeSwapRegretOptimizer):
"""Testing wrapper class around MultiplicativeSwapRegretOptimizer.
This class is identical to MultiplicativeSwapRegretOptimizer, except that it
caches the internal optimization state when _stochastic_matrix() is called, so
that we can test that the stochastic matrices take on their expected values.
"""
def __init__(self,
optimizer,
constraint_optimizer=None,
minimum_multiplier_radius=None,
initial_multiplier_radius=None):
"""Same as MultiplicativeSwapRegretOptimizer.__init__()."""
super(MultiplicativeSwapRegretOptimizerWrapper, self).__init__(
optimizer=optimizer,
constraint_optimizer=constraint_optimizer,
minimum_multiplier_radius=1e-3,
initial_multiplier_radius=initial_multiplier_radius)
self._cached_stochastic_matrix = None
@property
def stochastic_matrix(self):
"""Returns the cached stochastic matrix."""
return self._cached_stochastic_matrix
def _stochastic_matrix(self, state):
"""Caches the internal state for testing."""
self._cached_stochastic_matrix = super(
MultiplicativeSwapRegretOptimizerWrapper,
self)._stochastic_matrix(state)
return self._cached_stochastic_matrix
class SwapRegretOptimizerTest(test.TestCase):
def test_maximum_eigenvector_power_method(self):
"""Tests power method routine on some known left-stochastic matrices."""
matrix1 = np.matrix([[0.6, 0.1, 0.1], [0.0, 0.6, 0.9], [0.4, 0.3, 0.0]])
matrix2 = np.matrix([[0.4, 0.4, 0.2], [0.2, 0.1, 0.5], [0.4, 0.5, 0.3]])
with self.cached_session() as session:
eigenvector1 = session.run(
swap_regret_optimizer._maximal_eigenvector_power_method(
standard_ops.constant(matrix1)))
eigenvector2 = session.run(
swap_regret_optimizer._maximal_eigenvector_power_method(
standard_ops.constant(matrix2)))
# Check that eigenvector1 and eigenvector2 are eigenvectors of matrix1 and
# matrix2 (respectively) with associated eigenvalue 1.
matrix_eigenvector1 = np.tensordot(matrix1, eigenvector1, axes=1)
matrix_eigenvector2 = np.tensordot(matrix2, eigenvector2, axes=1)
self.assertAllClose(eigenvector1, matrix_eigenvector1, rtol=0, atol=1e-6)
self.assertAllClose(eigenvector2, matrix_eigenvector2, rtol=0, atol=1e-6)
def test_project_stochastic_matrix_wrt_euclidean_norm(self):
"""Tests Euclidean projection routine on some known values."""
matrix = standard_ops.constant([[-0.1, -0.1, 0.4], [-0.8, 0.4, 1.2],
[-0.3, 0.1, 0.2]])
expected_projected_matrix = np.array([[0.6, 0.1, 0.1], [0.0, 0.6, 0.9],
[0.4, 0.3, 0.0]])
with self.cached_session() as session:
projected_matrix = session.run(
swap_regret_optimizer._project_stochastic_matrix_wrt_euclidean_norm(
matrix))
self.assertAllClose(
expected_projected_matrix, projected_matrix, rtol=0, atol=1e-6)
def test_project_log_stochastic_matrix_wrt_kl_divergence(self):
"""Tests KL-divergence projection routine on some known values."""
matrix = standard_ops.constant([[0.2, 0.8, 0.6], [0.1, 0.2, 1.5],
[0.2, 1.0, 0.9]])
expected_projected_matrix = np.array([[0.4, 0.4, 0.2], [0.2, 0.1, 0.5],
[0.4, 0.5, 0.3]])
with self.cached_session() as session:
projected_matrix = session.run(
standard_ops.exp(
swap_regret_optimizer.
_project_log_stochastic_matrix_wrt_kl_divergence(
standard_ops.log(matrix))))
self.assertAllClose(
expected_projected_matrix, projected_matrix, rtol=0, atol=1e-6)
def test_additive_swap_regret_optimizer(self):
"""Tests that the stochastic matrices update as expected."""
minimization_problem = test_util.ConstantMinimizationProblem(
np.array([0.6, -0.1, 0.4]))
optimizer = AdditiveSwapRegretOptimizerWrapper(
gradient_descent.GradientDescentOptimizer(1.0))
train_op = optimizer.minimize_constrained(minimization_problem)
# Calculated using a numpy+python implementation of the algorithm.
expected_matrices = [
np.array([[1.0, 1.0, 1.0, 1.0], [0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]]),
np.array([[0.66666667, 1.0, 1.0, 1.0], [0.26666667, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0], [0.06666667, 0.0, 0.0, 0.0]]),
np.array([[0.41666667, 0.93333333, 1.0,
0.98333333], [0.46666667, 0.05333333, 0.0,
0.01333333], [0.0, 0.0, 0.0, 0.0],
[0.11666667, 0.01333333, 0.0, 0.00333333]]),
]
matrices = []
with self.cached_session() as session:
session.run(standard_ops.global_variables_initializer())
while len(matrices) < len(expected_matrices):
matrices.append(session.run(optimizer.stochastic_matrix))
session.run(train_op)
for expected, actual in zip(expected_matrices, matrices):
self.assertAllClose(expected, actual, rtol=0, atol=1e-6)
def test_multiplicative_swap_regret_optimizer(self):
"""Tests that the stochastic matrices update as expected."""
minimization_problem = test_util.ConstantMinimizationProblem(
np.array([0.6, -0.1, 0.4]))
optimizer = MultiplicativeSwapRegretOptimizerWrapper(
gradient_descent.GradientDescentOptimizer(1.0),
initial_multiplier_radius=0.8)
train_op = optimizer.minimize_constrained(minimization_problem)
# Calculated using a numpy+python implementation of the algorithm.
expected_matrices = [
np.array([[0.4, 0.4, 0.4, 0.4], [0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2], [0.2, 0.2, 0.2, 0.2]]),
np.array([[0.36999014, 0.38528351, 0.38528351, 0.38528351], [
0.23517483, 0.21720297, 0.21720297, 0.21720297
], [0.17774131, 0.18882719, 0.18882719, 0.18882719],
[0.21709373, 0.20868632, 0.20868632, 0.20868632]]),
np.array([[0.33972109, 0.36811863, 0.37118462, 0.36906575], [
0.27114826, 0.23738228, 0.23376693, 0.23626491
], [0.15712313, 0.17641793, 0.17858959, 0.17708679],
[0.23200752, 0.21808115, 0.21645886, 0.21758255]]),
]
matrices = []
with self.cached_session() as session:
session.run(standard_ops.global_variables_initializer())
while len(matrices) < len(expected_matrices):
matrices.append(session.run(optimizer.stochastic_matrix))
session.run(train_op)
for expected, actual in zip(expected_matrices, matrices):
self.assertAllClose(expected, actual, rtol=0, atol=1e-6)
if __name__ == '__main__':
test.main()
| |
# Copyright [2015] Hewlett-Packard Development Company, L.P.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from mock import Mock
from mock import patch
from novaclient import exceptions as nova_exceptions
from trove.cluster.models import Cluster
from trove.cluster.models import ClusterTasks
from trove.cluster.models import DBCluster
from trove.common import cfg
from trove.common import exception
from trove.common import remote
from trove.common.strategies.cluster.experimental.redis import api as redis_api
from trove.instance import models as inst_models
from trove.instance.models import DBInstance
from trove.instance.models import InstanceTasks
from trove.quota.quota import QUOTAS
from trove.taskmanager import api as task_api
from trove.tests.unittests import trove_testtools
CONF = cfg.CONF
class FakeOptGroup(object):
def __init__(self, cluster_member_count=3,
volume_support=True, device_path='/dev/vdb'):
self.cluster_member_count = cluster_member_count
self.volume_support = volume_support
self.device_path = device_path
class ClusterTest(trove_testtools.TestCase):
def setUp(self):
super(ClusterTest, self).setUp()
self.cluster_id = str(uuid.uuid4())
self.cluster_name = "Cluster" + self.cluster_id
self.tenant_id = "23423432"
self.dv_id = "1"
self.db_info = DBCluster(ClusterTasks.NONE,
id=self.cluster_id,
name=self.cluster_name,
tenant_id=self.tenant_id,
datastore_version_id=self.dv_id,
task_id=ClusterTasks.NONE._code)
self.get_client_patch = patch.object(task_api.API, 'get_client')
self.get_client_mock = self.get_client_patch.start()
self.addCleanup(self.get_client_patch.stop)
self.dbcreate_patch = patch.object(DBCluster, 'create',
return_value=self.db_info)
self.dbcreate_mock = self.dbcreate_patch.start()
self.addCleanup(self.dbcreate_patch.stop)
self.context = trove_testtools.TroveTestContext(self)
self.datastore = Mock()
self.dv = Mock()
self.dv.manager = "redis"
self.datastore_version = self.dv
self.cluster = redis_api.RedisCluster(self.context, self.db_info,
self.datastore,
self.datastore_version)
self.instances_w_volumes = [{'volume_size': 1,
'flavor_id': '1234'}] * 3
self.instances_no_volumes = [{'flavor_id': '1234'}] * 3
def tearDown(self):
super(ClusterTest, self).tearDown()
@patch.object(remote, 'create_nova_client')
def test_create_invalid_flavor_specified(self,
mock_client):
(mock_client.return_value.flavors.get) = Mock(
side_effect=nova_exceptions.NotFound(
404, "Flavor id not found %s" % id))
self.assertRaises(exception.FlavorNotFound,
Cluster.create,
Mock(),
self.cluster_name,
self.datastore,
self.datastore_version,
self.instances_w_volumes,
{})
@patch.object(remote, 'create_nova_client')
@patch.object(redis_api, 'CONF')
def test_create_volume_no_specified(self, mock_conf, mock_client):
mock_conf.get = Mock(
return_value=FakeOptGroup(volume_support=True))
self.assertRaises(exception.VolumeSizeNotSpecified,
Cluster.create,
Mock(),
self.cluster_name,
self.datastore,
self.datastore_version,
self.instances_no_volumes,
{})
@patch.object(remote, 'create_nova_client')
@patch.object(redis_api, 'CONF')
def test_create_storage_specified_with_no_volume_support(self,
mock_conf,
mock_client):
mock_conf.get = Mock(
return_value=FakeOptGroup(volume_support=False))
mock_client.return_value.flavors = Mock()
self.assertRaises(exception.VolumeNotSupported,
Cluster.create,
Mock(),
self.cluster_name,
self.datastore,
self.datastore_version,
self.instances_w_volumes,
{})
@patch.object(remote, 'create_nova_client')
@patch.object(redis_api, 'CONF')
def test_create_storage_not_specified_and_no_ephemeral_flavor(self,
mock_conf,
mock_client):
class FakeFlavor:
def __init__(self, flavor_id):
self.flavor_id = flavor_id
@property
def id(self):
return self.flavor.id
@property
def ephemeral(self):
return 0
mock_conf.get = Mock(
return_value=FakeOptGroup(volume_support=False))
(mock_client.return_value.
flavors.get.return_value) = FakeFlavor('1234')
self.assertRaises(exception.LocalStorageNotSpecified,
Cluster.create,
Mock(),
self.cluster_name,
self.datastore,
self.datastore_version,
self.instances_no_volumes,
{})
@patch.object(redis_api, 'CONF')
@patch.object(inst_models.Instance, 'create')
@patch.object(task_api, 'load')
@patch.object(QUOTAS, 'check_quotas')
@patch.object(remote, 'create_nova_client')
def test_create(self, mock_client, mock_check_quotas, mock_task_api,
mock_ins_create, mock_conf):
mock_conf.get = Mock(
return_value=FakeOptGroup(volume_support=True))
mock_client.return_value.flavors = Mock()
self.cluster.create(Mock(),
self.cluster_name,
self.datastore,
self.datastore_version,
self.instances_w_volumes, {})
mock_task_api.return_value.create_cluster.assert_called_with(
self.dbcreate_mock.return_value.id)
self.assertEqual(3, mock_ins_create.call_count)
@patch.object(redis_api, 'CONF')
@patch.object(inst_models.Instance, 'create')
@patch.object(task_api, 'load')
@patch.object(QUOTAS, 'check_quotas')
@patch.object(remote, 'create_nova_client')
def test_create_with_ephemeral_flavor(self, mock_client, mock_check_quotas,
mock_task_api, mock_ins_create,
mock_conf):
class FakeFlavor:
def __init__(self, flavor_id):
self.flavor_id = flavor_id
@property
def id(self):
return self.flavor.id
@property
def ephemeral(self):
return 1
mock_conf.get = Mock(
return_value=FakeOptGroup(volume_support=False))
(mock_client.return_value.
flavors.get.return_value) = FakeFlavor('1234')
self.cluster.create(Mock(),
self.cluster_name,
self.datastore,
self.datastore_version,
self.instances_no_volumes, {})
mock_task_api.return_value.create_cluster.assert_called_with(
self.dbcreate_mock.return_value.id)
self.assertEqual(3, mock_ins_create.call_count)
@patch.object(DBCluster, 'update')
@patch.object(redis_api, 'CONF')
@patch.object(inst_models.Instance, 'create')
@patch.object(task_api, 'load')
@patch.object(QUOTAS, 'check_quotas')
@patch.object(remote, 'create_nova_client')
def test_grow(self, mock_client, mock_check_quotas, mock_task_api,
mock_ins_create, mock_conf, mock_update):
mock_conf.get = Mock(
return_value=FakeOptGroup(volume_support=True))
mock_client.return_value.flavors = Mock()
self.cluster.grow(self.instances_w_volumes)
mock_task_api.return_value.grow_cluster.assert_called_with(
self.dbcreate_mock.return_value.id,
[mock_ins_create.return_value.id] * 3)
self.assertEqual(3, mock_ins_create.call_count)
@patch.object(DBInstance, 'find_all')
@patch.object(Cluster, 'get_guest')
@patch.object(DBCluster, 'update')
@patch.object(inst_models.Instance, 'load')
@patch.object(inst_models.Instance, 'delete')
def test_shrink(self,
mock_ins_delete, mock_ins_load, mock_update,
mock_guest, mock_find_all):
mock_find_all.return_value.all.return_value = [
DBInstance(InstanceTasks.NONE, id="1", name="member1",
compute_instance_id="compute-1",
task_id=InstanceTasks.NONE._code,
task_description=InstanceTasks.NONE._db_text,
volume_id="volume-1",
datastore_version_id="1",
cluster_id=self.cluster_id,
type="member")]
self.cluster.shrink(['id1'])
self.assertEqual(1, mock_ins_delete.call_count)
@patch('trove.cluster.models.LOG')
def test_delete_bad_task_status(self, mock_logging):
self.cluster.db_info.task_status = ClusterTasks.BUILDING_INITIAL
self.assertRaises(exception.UnprocessableEntity,
self.cluster.delete)
@patch.object(task_api.API, 'delete_cluster')
@patch.object(Cluster, 'update_db')
@patch.object(inst_models.DBInstance, 'find_all')
def test_delete_task_status_none(self,
mock_find_all,
mock_update_db,
mock_delete_cluster):
self.cluster.db_info.task_status = ClusterTasks.NONE
self.cluster.delete()
mock_update_db.assert_called_with(task_status=ClusterTasks.DELETING)
@patch.object(task_api.API, 'delete_cluster')
@patch.object(Cluster, 'update_db')
@patch.object(inst_models.DBInstance, 'find_all')
def test_delete_task_status_deleting(self,
mock_find_all,
mock_update_db,
mock_delete_cluster):
self.cluster.db_info.task_status = ClusterTasks.DELETING
self.cluster.delete()
mock_update_db.assert_called_with(task_status=ClusterTasks.DELETING)
| |
from common import app, available_resources, get_cluster_metadata, ensure_mom_version
from datetime import timedelta
from dcos import marathon
import itertools
import logging
import math
import shakedown
from utils import marathon_on_marathon
def setup_module(module):
""" Setup test module
"""
logging.basicConfig(format='%(asctime)s %(levelname)-8s: %(message)s')
def setup_function(function):
""" Setup test function
"""
print(get_cluster_metadata())
print(available_resources())
def app_def(app_id):
return {
"id": app_id,
"instances": 1,
"cmd": "for (( ; ; )); do sleep 100000000; done",
"cpus": 0.01,
"mem": 32,
"disk": 0,
"backoffFactor": 1.0,
"backoffSeconds": 0,
}
def linear_step_function(step_size=1000):
"""
Curried linear step function that gives next instances size based on a
step.
"""
def inner(step):
return step * step_size
return inner
def exponential_decay(start=1000, decay=0.5):
"""
Returns the next batch size which has a exponential decay.
With default parameters we have:
0:1000, 1:606.53, 2:367.88, 3:223.13, 4:135.34, 5:82.08
Increase decay for a faster slow down of batches.
This function is useful to jump to a certain size quickly and to slow down
growth then.
Always returns the lower integer with min 1.
:start First batch size.
:decay Exponential decay constant.
"""
def inner(step):
extact = start * math.exp(-1 * step * decay)
approx = math.floor(extact)
return max(1, approx)
return inner
def incremental_steps(step_func):
"""
Generator that yields new instances size in steps until eternity.
:param step_func The current step number is passed to this function. It
should return the next size. See 'linear_step_function' for an example.
:yield Next size
"""
for current_step in itertools.count(start=1):
yield step_func(current_step)
def test_incremental_scale():
"""
Scale instances of app in steps until the first error, e.g. a timeout, is
reached.
"""
client = marathon.create_client()
client.add_app(app_def("cap-app"))
for new_size in incremental_steps(linear_step_function(step_size=1000)):
shakedown.echo("Scaling to {}".format(new_size))
shakedown.deployment_wait(
app_id='cap-app', timeout=timedelta(minutes=10).total_seconds())
client.scale_app('/cap-app', new_size)
shakedown.deployment_wait(
app_id='cap-app', timeout=timedelta(minutes=10).total_seconds())
shakedown.echo("done.")
def test_incremental_app_scale():
"""
Scale number of app in steps until the first error, e.g. a timeout, is
reached. The apps are created in root group.
"""
client = marathon.create_client()
client.remove_group('/')
for step in itertools.count(start=1):
shakedown.echo("Add new apps")
app_id = "app-{0:0>4}".format(step)
client.add_app(app_def(app_id))
shakedown.deployment_wait(
timeout=timedelta(minutes=15).total_seconds())
shakedown.echo("done.")
def test_incremental_apps_per_group_scale():
"""
Try to reach the maximum number of apps. We start with batches of apps in a
group and decay the batch size.
"""
client = marathon.create_client()
batch_size_for = exponential_decay(start=500, decay=0.3)
for step in itertools.count(start=0):
batch_size = batch_size_for(step)
shakedown.echo("Add {} apps".format(batch_size))
group_id = "/batch-{0:0>3}".format(step)
app_ids = ("app-{0:0>4}".format(i) for i in range(batch_size))
app_definitions = [app_def(app_id) for app_id in app_ids]
next_batch = {
"apps": app_definitions,
"dependencies": [],
"id": group_id
}
client.create_group(next_batch)
shakedown.deployment_wait(
timeout=timedelta(minutes=15).total_seconds())
shakedown.echo("done.")
def test_incremental_groups_scale():
"""
Scale number of groups.
"""
client = marathon.create_client()
batch_size_for = exponential_decay(start=40, decay=0.01)
total = 0
for step in itertools.count(start=0):
batch_size = batch_size_for(step)
total += batch_size
shakedown.echo("Add {} groups totaling {}".format(batch_size, total))
group_ids = ("/group-{0:0>4}".format(step * batch_size + i)
for i in range(batch_size))
app_ids = ("{}/app-1".format(g) for g in group_ids)
app_definitions = [app_def(app_id) for app_id in app_ids]
# There is no app id. We simply PUT /v2/apps to create groups in
# batches.
client.update_app('', app_definitions)
shakedown.deployment_wait(
timeout=timedelta(minutes=15).total_seconds())
shakedown.echo("done.")
def test_incremental_group_nesting():
"""
Scale depth of nested groups. Again we grow fast at the beginning and then
slow the growth.
"""
client = marathon.create_client()
batch_size_for = exponential_decay(start=5, decay=0.1)
depth = 0
for step in itertools.count(start=0):
batch_size = batch_size_for(step)
depth += batch_size
shakedown.echo("Create a group with a nesting of {}".format(depth))
group_ids = ("group-{0:0>3}".format(g) for g in range(depth))
nested_groups = '/'.join(group_ids)
# Note: We always deploy into the same nested groups.
app_id = '/{0}/app-1'.format(nested_groups)
client.add_app(app_def(app_id))
shakedown.deployment_wait(
timeout=timedelta(minutes=15).total_seconds())
shakedown.echo("done.")
| |
import logging
import datetime
import hmac, hashlib, base64
from django.contrib.auth.models import User
from auth_mac.models import Credentials, Nonce
import re
from auth_mac.utils import to_utc, random_string
import random
reHeader = re.compile(r"""(mac|nonce|id|ts|ext)="([^"]+)""")
authlog = logging.getLogger("auth_mac.authorization")
def compare_string_fixedtime(string1,string2):
"""A fixed-time string comparison function"""
# Ensure the strings are the same length
if len(string1) != len(string2):
return False
# Add up the XOR differences
testSum = sum(ord(x) ^ ord(y) for x, y in zip(string1, string2))
# if they were different....
if testSum:
return False
return True
def _build_authheader(method, data):
datastr = ", ".join(['{0}="{1}"'.format(x, y) for x, y in data.iteritems()])
return "{0} {1}".format(method, datastr)
class SignatureError(Exception):
pass
class Signature(object):
"A class to ease the creation of MAC signatures"
MAC = None
data = None
base_string = None
def __init__(self, credentials, **kwargs):
self.MAC = credentials
self.data = dict()
self.update_data_from_dictionary(kwargs)
def _add_data_item(self, from_dict, name, default=None):
"""Basic inline function to add a key to the self data"""
if from_dict.has_key(name):
self.data[name] = from_dict[name]
else:
if not self.data.has_key(name):
self.data[name] = default
def update_data_from_dictionary(self, from_dict):
"Read all required information out of a dictionary"
self._add_data_item(from_dict, "method", None)
self._add_data_item(from_dict, "uri", None)
self._add_data_item(from_dict, "host", None)
self._add_data_item(from_dict, "port", None)
self._add_data_item(from_dict, "ext", "")
self._add_data_item(from_dict, "timestamp", None)
self._add_data_item(from_dict, "nonce", None)
# If we are changing, wipe out the signature and base string
self.base_string = None
self.signature = None
def update(self, **kwargs):
"Update the parameters from a dictionary"
self.update_data_from_dictionary(kwargs)
def _get_timestamp(self):
timestamp = datetime.datetime.utcnow() - datetime.datetime(1970,1,1)
return timestamp.days * 24 * 3600 + timestamp.seconds
def _get_nonce(self):
return random_string(8)
def validate(self):
"Validates that we have all the required information"
if not self.MAC:
raise SignatureError("Have not been given a MAC credential")
required_values = {
"method": "HTTP Request Method",
"uri": "HTTP Request URI",
"host": "Destination Host",
"port": "Destination Port",
}
# Check all of these
for key, errorstring in required_values.iteritems():
errorstring = "Missing information for signature: {0}".format(errorstring)
# If we don't have the key, or the key is None
if not self.data.has_key(key):
raise SignatureError(errorstring)
else:
if not self.data[key]:
raise SignatureError(errorstring)
# If the timestamp or nonce are blank, generate them
if not self.data["nonce"]:
self.data["nonce"] = self._get_nonce()
if not self.data["timestamp"]:
self.data["timestamp"] = self._get_timestamp()
# Make sure the method is capitalised
self.data["method"] = self.data["method"].upper()
def sign_request(self, **kwargs):
"""Signs a request to a specified URI and returns the signature"""
self.update_data_from_dictionary(kwargs)
self.validate()
return self.calculate_signature()
def calculate_signature(self):
"Calculates the signature given internal data"
# What order do we use for calculations?
data_vars = ["timestamp", "nonce", "method", "uri", "host", "port", "ext"]
data = [str(self.data[x]) for x in data_vars]
self.base_string = "\n".join(data) + "\n"
# print "Signing with key '{0}'".format(self.MAC.key)
hm = hmac.new(str(self.MAC.key), self.base_string, hashlib.sha1)
self.signature = base64.b64encode(hm.digest())
return self.signature
def get_header(self, **kwargs):
"Return the HTTP Authorization header for the set IDs"
self.update_data_from_dictionary(kwargs)
self.validate()
data = {"id": self.MAC.identifier,
"ts": self.data["timestamp"],
"nonce": self.data["nonce"],
"mac": self.sign_request() }
# Include the optional ext field
if self.data["ext"]:
data["ext"] = self.data["ext"]
return _build_authheader("MAC", data)
class Validator(object):
"""Validates the mac credentials passed in from an HTTP HEADER"""
error = None
errorBody = None
def __init__(self, Authorization, request):
self.authstring = Authorization
self.request = request
def validate_header(self):
"Validates that the header string is well formed"
if not self.authstring.startswith("MAC "):
# We have not tried to authenticate with MAC credentials
return False
# Split the string into key/value pairs
results = reHeader.findall(self.authstring)
# Verify that none are repeated
for key, value in results:
# Check they are all identified
if not key in ("mac", "nonce", "ext", "id", "ts"):
self.error = "Unidentified param"
return False
# Find all supplied keys with this keyname
allkeys = [x for x, y in results if x == key]
if len(allkeys) > 1:
self.error = "Duplicate key '{0}'".format(key)
return False
# Verify that none are missing
data = dict(results)
if not all(data.has_key(x) for x in ("mac", "nonce", "id", "ts")):
self.error = "Missing authorisation information"
return False
self.data = data
return True
def validate_credentials(self):
"Validates that the credentials are valid"
try:
credentials = Credentials.objects.get(identifier=self.data["id"])
except Credentials.DoesNotExist:
self.error = "Invalid MAC credentials"
return False
# Check that it hasn't expired
if credentials.expired:
self.error = "MAC credentials expired"
return False
self.credentials = credentials
return True
def validate_nonce(self):
"Validates that the nonce is not a repeat"
# Convert the timestamp to a datetime object
timestamp = datetime.datetime(1970,1,1) + \
datetime.timedelta(seconds=int(self.data["ts"]))
# Convert this timestamp to UTC if we are timezone-aware
timestamp = to_utc(timestamp)
# Try and get a nonce object with these values
try:
Nonce.objects.get(nonce=self.data["nonce"], timestamp=timestamp, credentials=self.credentials)
self.error = "Duplicate nonce"
return False
except Nonce.DoesNotExist:
# Create the nonce, then return true
nonce = Nonce(nonce=self.data["nonce"], timestamp=timestamp, credentials=self.credentials)
nonce.save()
return True
return False
def validate_signature(self):
"Validates that the signature is good"
s = Signature(self.credentials)
if not self.request.META.has_key("HTTP_HOST"):
# We can't calculate a signature without the host
self.error = "Missing Host header"
return False
hostname = self.request.META["HTTP_HOST"]
port = self.request.META["SERVER_PORT"]
# Strip out the port from hostname, if this has been passed to us
if ":" in hostname:
hostname = hostname.split(":")[0]
# Form the rest of the signature
s.update(host=hostname, port=port)
s.update(timestamp=self.data["ts"], nonce=self.data["nonce"])
s.update(uri=self.request.path)
s.update(method=self.request.META["REQUEST_METHOD"])
signature = s.calculate_signature()
# Compare them
if not compare_string_fixedtime(signature, self.data["mac"]):
self.error = "Invalid Signature. Base string in body."
self.errorBody = s.base_string
return False
return True
def validate(self):
"Validates that everything is well formed and signed correctly"
# Validate the forming of the signature, this will fill _data
if not self.validate_header():
return False
# Validate that the credentials are good and current
if not self.validate_credentials():
return False
# Validate that this nonce is not out of date
if not self.validate_nonce():
return False
# Now, validate the cryptographic signature..
if not self.validate_signature():
return False
# Everything worked! et our user property
self.user = self.credentials.user
return True
| |
"""Tests for mock server and api"""
import asyncio
import itertools
import json
import jsonschema
import pytest
import requests
from egtaonline import api
from egtaonline import mockserver
def validate_object(obj, obj_schema):
"""Validate a required object schema"""
jsonschema.validate(obj, {
'type': 'object',
'properties': obj_schema,
'required': list(obj_schema),
})
def is_sorted(gen, *, reverse=False):
"""Test if a generator is sorted"""
ait, bit = itertools.tee(gen)
next(bit, None)
if reverse:
ait, bit = bit, ait
return all(a <= b for a, b in zip(ait, bit))
# TODO in python3.6 we may be able to use async fixtures, but async_generator
# didn't work with teardowns
# TODO This is a cheap way around the lack of async generators in python3.5
async def agather(aiter):
"""Gather an async iterator into a list"""
lst = []
async for elem in aiter:
lst.append(elem)
return lst
async def create_simulator(server, egta, name, version):
"""Create a simulator that's semi configured"""
sim = await egta.get_simulator(server.create_simulator(
name, version, conf={'key': 'value'}))
await sim.add_strategies({
'a': ['1', '2', '3', '4'],
'b': ['5', '6', '7'],
})
return sim
async def sched_complete(sched, sleep=0.001):
"""Wait for scheduler to complete"""
while (await sched.get_info())['active'] and not all( # pragma: no branch
p['requirement'] <= p['current_count'] for p
in (await sched.get_requirements())['scheduling_requirements']):
await asyncio.sleep(sleep) # pragma: no cover
@pytest.mark.asyncio
async def test_get_simulators():
"""Test getting simulators"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim1 = server.create_simulator('foo', '1')
sim2 = server.create_simulator('bar', '1')
sim3 = server.create_simulator('bar', '2')
assert sum(1 for _ in await egta.get_simulators()) == 3
assert {0, 1, 2} == {s['id'] for s in await egta.get_simulators()}
sim = await egta.get_simulator(0)
assert sim['id'] == sim1
sim = await egta.get_simulator_fullname('foo-1')
assert sim['id'] == sim1
sim = await egta.get_simulator(2)
assert sim['id'] == sim3
sim = await egta.get_simulator_fullname('bar-1')
assert sim['id'] == sim2
sim = await egta.get_simulator_fullname('bar-2')
assert sim['id'] == sim3
with pytest.raises(requests.exceptions.HTTPError):
await egta.get_simulator(3)
with pytest.raises(AssertionError):
await egta.get_simulator_fullname('baz')
@pytest.mark.asyncio
async def test_simulator():
"""Test simulator api"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
info = await sim.get_info()
validate_object(info, {
'configuration': {'type': 'object'},
'created_at': {'type': 'string'},
'email': {'type': 'string'},
'id': {'type': 'integer'},
'name': {'type': 'string'},
'role_configuration': {'type': 'object'},
'source': {'type': 'object'},
'updated_at': {'type': 'string'},
'url': {'type': 'string'},
'version': {'type': 'string'},
})
role_conf = {'a': ['1', '2', '3', '4'], 'b': ['5', '6', '7']}
assert info['role_configuration'] == role_conf
await asyncio.sleep(1)
await sim.remove_strategy('a', '3')
new_role_conf = {'a': ['1', '2', '4'], 'b': ['5', '6', '7']}
new_info = await sim.get_info()
assert new_info['role_configuration'] == new_role_conf
assert new_info['updated_at'] != info['updated_at']
assert info['role_configuration'] == role_conf
await sim.remove_role('b')
new_role_conf = {'a': ['1', '2', '4']}
new_info = await sim.get_info()
assert new_info['role_configuration'] == new_role_conf
# Stale object didn't update
assert info['role_configuration'] == role_conf
# Add existing strategy
await sim.add_strategy('a', '1')
new_info = await sim.get_info()
assert new_info['role_configuration'] == new_role_conf
await sim.add_strategy('a', '2')
await sim.add_strategy('a', '3')
new_role_conf = {'a': ['1', '2', '3', '4']}
new_info = await sim.get_info()
assert new_info['role_configuration'] == new_role_conf
await sim.remove_strategies({'a': ['4', '5', '4']})
new_role_conf = {'a': ['1', '2', '3']}
new_info = await sim.get_info()
assert new_info['role_configuration'] == new_role_conf
await sim.remove_role('c')
with pytest.raises(KeyError):
await sim.add_strategy('c', '8')
# Shouldn't raise exception, because removals never do
await sim.remove_strategies({'c': ['8']})
@pytest.mark.asyncio
async def test_get_schedulers():
"""Test getting schedulers"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
await sim.create_generic_scheduler('1', False, 0, 10, 0, 0)
sched2 = await egta.create_generic_scheduler(
sim['id'], '2', False, 0, 10, 0, 0)
sched3 = await sim.create_generic_scheduler('3', False, 0, 10, 0, 0)
await sim.create_generic_scheduler('4', False, 0, 10, 0, 0)
await sim.create_generic_scheduler('5', False, 0, 10, 0, 0)
with pytest.raises(requests.exceptions.HTTPError):
await sim.create_generic_scheduler('4', False, 0, 10, 0, 0)
sched = await egta.get_scheduler(2)
assert sched['id'] == sched3['id']
sched = await egta.get_scheduler_name('3')
assert sched['id'] == sched3['id']
assert sum(1 for _ in await egta.get_generic_schedulers()) == 5
assert {0, 1, 2, 3, 4} == {
s['id'] for s in await egta.get_generic_schedulers()}
await sched2.destroy_scheduler()
await sched3.destroy_scheduler()
assert sum(1 for _ in await egta.get_generic_schedulers()) == 3
assert {0, 3, 4} == {
s['id'] for s in await egta.get_generic_schedulers()}
with pytest.raises(requests.exceptions.HTTPError):
await egta.get_scheduler(5)
with pytest.raises(requests.exceptions.HTTPError):
await egta.get_scheduler(2)
with pytest.raises(AssertionError):
await egta.get_scheduler_name('3')
@pytest.mark.asyncio
async def test_scheduler():
"""Test scheduler api"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
sched = await sim.create_generic_scheduler('sched', True, 0, 10, 0, 0)
validate_object(sched, {
'active': {'type': 'boolean'},
'created_at': {'type': 'string'},
'default_observation_requirement': {'type': 'integer'},
'id': {'type': 'integer'},
'name': {'type': 'string'},
'nodes': {'type': 'integer'},
'observations_per_simulation': {'type': 'integer'},
'process_memory': {'type': 'integer'},
'simulator_instance_id': {'type': 'integer'},
'size': {'type': 'integer'},
'time_per_observation': {'type': 'integer'},
'updated_at': {'type': 'string'},
})
info = await sched.get_info()
validate_object(info, {
'active': {'type': 'boolean'},
'created_at': {'type': 'string'},
'default_observation_requirement': {'type': 'integer'},
'id': {'type': 'integer'},
'name': {'type': 'string'},
'nodes': {'type': 'integer'},
'observations_per_simulation': {'type': 'integer'},
'process_memory': {'type': 'integer'},
'simulator_instance_id': {'type': 'integer'},
'size': {'type': 'integer'},
'time_per_observation': {'type': 'integer'},
'updated_at': {'type': 'string'},
})
validate_object((await sched.get_requirements()), {
'active': {'type': 'boolean'},
'configuration': {'type': 'array'},
'default_observation_requirement': {'type': 'integer'},
'id': {'type': 'integer'},
'name': {'type': 'string'},
'nodes': {'type': 'integer'},
'observations_per_simulation': {'type': 'integer'},
'process_memory': {'type': 'integer'},
'scheduling_requirements': {'type': 'array'},
'simulator_id': {'type': 'integer'},
'size': {'type': 'integer'},
'time_per_observation': {'type': 'integer'},
'type': {'type': 'string'},
'url': {'type': 'string'},
})
await sched.deactivate()
assert not (await sched.get_info())['active']
# stale info invalid
assert info['active']
await sched.activate()
assert (await sched.get_info())['active']
await sched.update(process_memory=1)
assert (await sched.get_info())['process_memory'] == 1
await sched.add_roles({'a': 8})
with pytest.raises(requests.exceptions.HTTPError):
await sched.add_role('a', 1)
with pytest.raises(requests.exceptions.HTTPError):
await sched.add_role('c', 1)
with pytest.raises(requests.exceptions.HTTPError):
await sched.add_role('b', 3)
await sched.add_role('b', 2)
await sched.remove_role('b')
await sched.remove_roles(['b', 'c'])
@pytest.mark.asyncio
async def test_profiles(): # pylint: disable=too-many-statements,too-many-locals
"""Test profile api"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
sched1 = await sim.create_generic_scheduler('sched', True, 0, 10, 0, 0)
await sched1.add_roles({'a': 8, 'b': 2})
assignment = 'a: 8 1; b: 1 5, 1 7'
symgrp = [{'role': 'a', 'strategy': '1', 'count': 8},
{'role': 'b', 'strategy': '5', 'count': 1},
{'role': 'b', 'strategy': '7', 'count': 1}]
assert assignment == mockserver.symgrps_to_assignment(symgrp)
prof1 = await sched1.add_profile(assignment, 0)
validate_object(prof1, {
'assignment': {'type': 'string'},
'created_at': {'type': 'string'},
'id': {'type': 'integer'},
'observations_count': {'type': 'integer'},
'role_configuration': {'type': 'object'},
'simulator_instance_id': {'type': 'integer'},
'size': {'type': 'integer'},
'updated_at': {'type': 'string'},
})
assert (await prof1.get_structure())['observations_count'] == 0
for grp in (await prof1.get_summary())['symmetry_groups']:
assert grp['payoff'] is None
assert grp['payoff_sd'] is None
assert not (await prof1.get_observations())['observations']
assert not (await prof1.get_full_data())['observations']
prof0 = await egta.get_profile(prof1['id'])
assert prof1['id'] == prof0['id']
assert prof1['id'] == (await sched1.add_profile(symgrp, 0))['id']
await sched1.remove_profile(prof1['id'])
await sched1.add_profile(assignment, 3)
await sched_complete(sched1)
reqs = (await sched1.get_requirements())['scheduling_requirements']
assert len(reqs) == 1
assert reqs[0]['current_count'] == 3
assert reqs[0]['requirement'] == 3
assert reqs[0]['id'] == prof1['id']
struct = await prof1.get_structure()
validate_object(struct, {
'assignment': {'type': 'string'},
'created_at': {'type': 'string'},
'id': {'type': 'integer'},
'observations_count': {'type': 'integer'},
'role_configuration': {'type': 'object'},
'simulator_instance_id': {'type': 'integer'},
'size': {'type': 'integer'},
'updated_at': {'type': 'string'},
})
assert struct['assignment'] == assignment
assert struct['observations_count'] == 3
assert struct['size'] == 10
summ = await prof1.get_summary()
validate_object(summ, {
'id': {'type': 'integer'},
'observations_count': {'type': 'integer'},
'simulator_instance_id': {'type': 'integer'},
'symmetry_groups': {'type': 'array'},
})
assert summ['observations_count'] == 3
assert len(summ['symmetry_groups']) == 3
obs = await prof1.get_observations()
validate_object(obs, {
'id': {'type': 'integer'},
'simulator_instance_id': {'type': 'integer'},
'symmetry_groups': {'type': 'array'},
'observations': {'type': 'array'},
})
assert len(obs['symmetry_groups']) == 3
assert len(obs['observations']) == 3
assert all(len(o['symmetry_groups']) == 3
for o in obs['observations'])
full = await prof1.get_full_data()
validate_object(full, {
'id': {'type': 'integer'},
'simulator_instance_id': {'type': 'integer'},
'symmetry_groups': {'type': 'array'},
'observations': {'type': 'array'},
})
assert len(full['symmetry_groups']) == 3
assert len(full['observations']) == 3
assert all(len(o['players']) == 10 for o in full['observations'])
sched2 = await sim.create_generic_scheduler(
'sched2', True, 0, 10, 0, 0)
await sched2.add_roles({'a': 8, 'b': 2})
prof2 = await sched2.add_profile(assignment, 5)
await sched_complete(sched2)
assert prof2['id'] == prof1['id']
assert (await prof2.get_structure())['observations_count'] == 5
assert (await prof1.get_structure())['observations_count'] == 5
reqs = (await sched2.get_requirements())['scheduling_requirements']
assert len(reqs) == 1
assert reqs[0]['current_count'] == 5
assert reqs[0]['requirement'] == 5
reqs = (await sched1.get_requirements())['scheduling_requirements']
assert len(reqs) == 1
assert reqs[0]['current_count'] == 5
assert reqs[0]['requirement'] == 3
await sched1.remove_profile(prof1['id'])
assert not (await sched1.get_requirements())['scheduling_requirements']
updated_time = (await sched1.get_info())['updated_at']
await asyncio.sleep(1)
await sched1.remove_profile(prof1['id'])
assert (await sched1.get_info())['updated_at'] == updated_time
assert prof1['id'] == (await sched1.add_profile(assignment, 1))['id']
reqs = (await sched1.get_requirements())['scheduling_requirements']
assert len(reqs) == 1
assert reqs[0]['current_count'] == 5
assert reqs[0]['requirement'] == 1
# Test delayed scheduling
await sched1.deactivate()
await sched1.remove_profile(prof1['id'])
await sched1.add_profile(assignment, 9)
await sched_complete(sched1)
assert (await prof1.get_structure())['observations_count'] == 5
reqs = (await sched1.get_requirements())['scheduling_requirements']
assert len(reqs) == 1
assert reqs[0]['current_count'] == 5
assert reqs[0]['requirement'] == 9
await sched1.activate()
await sched_complete(sched1)
assert (await prof1.get_structure())['observations_count'] == 9
await sched1.remove_all_profiles()
assert not (await sched1.get_requirements())['scheduling_requirements']
await sched1.remove_profile(10**10)
assert (await sched2.get_requirements())['scheduling_requirements']
await sched2.remove_profile(prof2['id'])
assert not (await sched2.get_requirements())['scheduling_requirements']
@pytest.mark.asyncio
async def test_delayed_profiles():
"""Test mock server with delayed profile scheduling"""
async with mockserver.server() as server, api.api('') as egta:
sim = await egta.get_simulator(
server.create_simulator('sim', '1', delay_dist=lambda: 0.5))
await sim.add_strategies({'1': ['a'], '2': ['b', 'c']})
sched = await sim.create_generic_scheduler('sched', True, 0, 10, 0, 0)
await sched.add_roles({'1': 8, '2': 2})
prof = await sched.add_profile('1: 8 a; 2: 1 b, 1 c', 3)
reqs = (await sched.get_requirements())['scheduling_requirements']
assert len(reqs) == 1
assert reqs[0]['current_count'] == 0
assert reqs[0]['requirement'] == 3
assert reqs[0]['id'] == prof['id']
count = 0
async for sim in egta.get_simulations():
assert sim['state'] == 'running'
count += 1
assert count == 3
await asyncio.sleep(0.5)
await sched_complete(sched)
reqs = (await sched.get_requirements())['scheduling_requirements']
assert len(reqs) == 1
assert reqs[0]['current_count'] == 3
assert reqs[0]['requirement'] == 3
assert reqs[0]['id'] == prof['id']
count = 0
async for sim in egta.get_simulations():
assert sim['state'] == 'complete'
count += 1
assert count == 3
# Test that extra sims get killed
async with mockserver.server() as server, api.api('') as egta:
sim = await egta.get_simulator(
server.create_simulator('sim', '1', delay_dist=lambda: 10))
await sim.add_strategies({'1': ['a'], '2': ['b', 'c']})
sched = await sim.create_generic_scheduler('sched', True, 0, 10, 0, 0)
await sched.add_roles({'1': 8, '2': 2})
# Need two profiles here because the other will be in the queue
await sched.add_profile('1: 8 a; 2: 1 b, 1 c', 1)
await sched.add_profile('1: 8 a; 2: 2 b', 1)
@pytest.mark.asyncio
async def test_missing_profile():
"""Test getting missing profile"""
async with mockserver.server(), \
api.api('', num_tries=3, retry_delay=0.5) as egta:
with pytest.raises(requests.exceptions.HTTPError):
await egta.get_profile(0)
@pytest.mark.asyncio
async def test_get_games():
"""Test getting games"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
await sim.create_game('a', 5)
game2 = await egta.create_game(sim['id'], 'b', 6)
game3 = await sim.create_game('c', 3)
with pytest.raises(requests.exceptions.HTTPError):
await sim.create_game('b', 3)
assert (await egta.get_game(1))['id'] == game2['id']
assert (await egta.get_game_name('c'))['id'] == game3['id']
assert sum(1 for _ in await egta.get_games()) == 3
assert {0, 1, 2} == {g['id'] for g in await egta.get_games()}
await game2.destroy_game()
assert sum(1 for _ in await egta.get_games()) == 2
assert {0, 2} == {g['id'] for g in await egta.get_games()}
with pytest.raises(requests.exceptions.HTTPError):
await egta.get_game(3)
with pytest.raises(requests.exceptions.HTTPError):
await egta.get_game(1)
with pytest.raises(AssertionError):
await egta.get_game_name('b')
@pytest.mark.asyncio
async def test_game():
"""Test game mocks"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
sched = await sim.create_generic_scheduler(
'sched', True, 0, 4, 0, 0, configuration={'k': 'v'})
await sched.add_roles({'a': 2, 'b': 2})
game = await sched.create_game()
await game.add_symgroups([
('a', 2, ['1']), ('b', 2, ['5', '6'])])
prof = await sched.add_profile('a: 2 1; b: 1 5, 1 6', 0)
await sched_complete(sched)
reqs = await sched.get_requirements()
assert len(reqs['scheduling_requirements']) == 1
assert not (await game.get_summary())['profiles']
assert not (await game.get_observations())['profiles']
assert not (await game.get_full_data())['profiles']
await sched.remove_profile(prof['id'])
await sched.add_profile(prof['assignment'], 1)
await sched.add_profile('a: 2 1; b: 2 5', 2)
await sched_complete(sched)
size_counts = {}
for prof in (await game.get_summary())['profiles']:
counts = prof['observations_count']
size_counts[counts] = size_counts.get(counts, 0) + 1
assert size_counts == {1: 1, 2: 1}
size_counts = {}
for prof in (await game.get_observations())['profiles']:
counts = len(prof['observations'])
size_counts[counts] = size_counts.get(counts, 0) + 1
assert size_counts == {1: 1, 2: 1}
size_counts = {}
for prof in (await game.get_full_data())['profiles']:
for obs in prof['observations']:
assert len(obs['players']) == 4
counts = len(prof['observations'])
size_counts[counts] = size_counts.get(counts, 0) + 1
assert size_counts == {1: 1, 2: 1}
await game.remove_strategy('b', '6')
assert len((await game.get_summary())['profiles']) == 1
assert len((await game.get_observations())['profiles']) == 1
assert len((await game.get_full_data())['profiles']) == 1
await game.remove_strategy('a', '4')
await game.add_strategies({'a': ['2', '3']})
await game.remove_strategies({'a': ['1', '3']})
await game.remove_roles(['b'])
updated_time = (await game.get_structure())['updated_at']
await asyncio.sleep(1)
await game.remove_role('b')
assert (await game.get_structure())['updated_at'] == updated_time
await game.add_roles({'b': 2})
sched = await game.create_generic_scheduler('scheder', False, 1, 1, 1)
assert sched['size'] == game['size']
@pytest.mark.asyncio
async def test_canon_game(): # pylint: disable=too-many-locals
"""Test that canon game creates proper games"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
sim2 = await create_simulator(server, egta, 'sim', '2')
symgrps = [('a', 2, ['1']), ('b', 2, ['5', '6'])]
conf = {'key': 'val'}
game1 = await sim.get_canon_game(symgrps, conf)
summ = await game1.get_summary()
assert conf == dict(summ['configuration'])
def unpack(name, count, strategies):
"""Helper to unpack dictionaries"""
return name, count, strategies
symgrp_dict = {}
for role_info in summ['roles']:
role, count, strategies = unpack(**role_info)
symgrp_dict[role] = (role, count, set(strategies))
for role, count, strats in symgrps:
_, cnt, strt = symgrp_dict[role]
assert cnt == count
assert strt == set(strats)
game2 = await egta.get_canon_game(
sim['id'], symgrps, {'key': 'diff'})
assert game1['id'] != game2['id']
game3 = await egta.get_canon_game(
sim['id'], [('a', 2, ['1']), ('b', 2, ['5', '7'])], conf)
assert game1['id'] != game3['id']
game4 = await egta.get_canon_game(
sim['id'], [('a', 2, ['1']), ('b', 1, ['5', '6'])], conf)
assert game1['id'] != game4['id']
game5 = await egta.get_canon_game(sim2['id'], symgrps, conf)
assert game1['id'] != game5['id']
game6 = await egta.get_canon_game(sim['id'], symgrps, conf)
assert game1['id'] == game6['id']
def _raise(ex):
"""Raise an exception"""
raise ex
@pytest.mark.asyncio
async def test_large_game_failsafes():
"""Test that large games fallback to gathering profile data"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
error = requests.exceptions.HTTPError(
'500 Server Error: Game too large!')
sched = await sim.create_generic_scheduler(
'sched', True, 0, 4, 0, 0, configuration={'k': 'v'})
await sched.add_roles({'a': 2, 'b': 2})
game = await sched.create_game()
await game.add_symgroups([
('a', 2, ['1']), ('b', 2, ['5', '6'])])
await sched.add_profile('a: 2 1; b: 1 5, 1 6', 1)
await sched.add_profile('a: 2 1; b: 2 5', 2)
await sched_complete(sched)
base = await game.get_observations()
server.custom_response(lambda: _raise(error))
alternate = await game.get_observations()
assert base == alternate
size_counts = {}
for prof in alternate['profiles']:
counts = len(prof['observations'])
size_counts[counts] = size_counts.get(counts, 0) + 1
assert size_counts == {1: 1, 2: 1}
base = await game.get_full_data()
server.custom_response(lambda: _raise(error))
alternate = await game.get_full_data()
assert base == alternate
size_counts = {}
for prof in alternate['profiles']:
for obs in prof['observations']:
assert len(obs['players']) == 4
counts = len(prof['observations'])
size_counts[counts] = size_counts.get(counts, 0) + 1
assert size_counts == {1: 1, 2: 1}
@pytest.mark.asyncio
async def test_profile_json_error():
"""Test invalid profile json triggers retry"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
sched = await sim.create_generic_scheduler('sched', True, 0, 4, 0, 0)
await sched.add_roles({'a': 2, 'b': 2})
game = await sched.create_game()
await game.add_symgroups([
('a', 2, ['1']), ('b', 2, ['5', '6'])])
prof = await sched.add_profile('a: 2 1; b: 2 5', 2)
await sched_complete(sched)
server.custom_response(lambda: '', 2)
summ = await prof.get_summary()
assert 'id' in summ
assert 'observations_count' in summ
assert 'symmetry_groups' in summ
server.custom_response(lambda: '', 3)
with pytest.raises(json.decoder.JSONDecodeError):
await prof.get_summary()
server.custom_response(lambda: '{}', 3)
with pytest.raises(jsonschema.ValidationError):
await prof.get_summary()
@pytest.mark.asyncio
async def test_game_json_error():
"""Test returning invalid json in games triggers retry"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
sched = await sim.create_generic_scheduler('sched', True, 0, 4, 0, 0)
await sched.add_roles({'a': 2, 'b': 2})
game = await sched.create_game()
await game.add_symgroups([
('a', 2, ['1']), ('b', 2, ['5', '6'])])
await sched.add_profile('a: 2 1; b: 1 5, 1 6', 1)
await sched.add_profile('a: 2 1; b: 2 5', 2)
await sched_complete(sched)
server.custom_response(lambda: '', 2)
summ = await game.get_summary()
size_counts = {}
for prof in summ['profiles']:
counts = prof['observations_count']
size_counts[counts] = size_counts.get(counts, 0) + 1
assert size_counts == {1: 1, 2: 1}
server.custom_response(lambda: '', 3)
with pytest.raises(json.decoder.JSONDecodeError):
await game.get_summary()
server.custom_response(lambda: '{}', 3)
with pytest.raises(jsonschema.ValidationError):
await game.get_summary()
@pytest.mark.asyncio
async def test_get_simulations():
"""Test getting simulations"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
assert not await agather(egta.get_simulations())
sim1 = await create_simulator(server, egta, 'sim', '1')
sched1 = await sim1.create_generic_scheduler(
'sched1', True, 0, 4, 0, 0)
await sched1.add_roles({'a': 2, 'b': 2})
await sched1.add_profile('a: 2 1; b: 1 6, 1 7', 2)
assert len(await agather(egta.get_simulations())) == 2
simul = next(iter(await agather(egta.get_simulations())))
validate_object(simul, {
'folder': {'type': 'integer'},
'job': {'type': 'number'},
'profile': {'type': 'string'},
'simulator': {'type': 'string'},
'state': {'type': 'string'},
})
validate_object(await egta.get_simulation(simul['folder']), {
'error_message': {'type': 'string'},
'folder_number': {'type': 'integer'},
'job': {'type': 'string'},
'profile': {'type': 'string'},
'simulator_fullname': {'type': 'string'},
'size': {'type': 'integer'},
'state': {'type': 'string'},
})
sim2 = await create_simulator(server, egta, 'sim', '2')
sched2 = await sim2.create_generic_scheduler(
'sched2', True, 0, 5, 0, 0)
await sched2.add_roles({'a': 2, 'b': 3})
await sched2.add_profile('a: 2 1; b: 1 5, 2 7', 3)
assert len(await agather(egta.get_simulations())) == 5
# Test simulations
assert is_sorted( # pragma: no branch
(f['simulator'] for f
in await agather(egta.get_simulations(column='simulator'))),
reverse=True)
assert is_sorted( # pragma: no branch
(f['folder'] for f
in await agather(egta.get_simulations(column='folder'))),
reverse=True)
assert is_sorted( # pragma: no branch
(f['profile'] for f
in await agather(egta.get_simulations(column='profile'))),
reverse=True)
assert is_sorted( # pragma: no branch
(f['state'] for f
in await agather(egta.get_simulations(column='state'))),
reverse=True)
assert is_sorted( # pragma: no branch
f['simulator'] for f
in await agather(egta.get_simulations(
asc=True, column='simulator')))
assert is_sorted( # pragma: no branch
f['folder'] for f
in await agather(egta.get_simulations(asc=True, column='folder')))
assert is_sorted( # pragma: no branch
f['profile'] for f
in await agather(egta.get_simulations(asc=True, column='profile')))
assert is_sorted( # pragma: no branch
f['state'] for f
in await agather(egta.get_simulations(asc=True, column='state')))
assert not await agather(egta.get_simulations(page_start=2))
await sched2.add_profile('a: 2 1; b: 1 5, 2 6', 21)
assert len(await agather(egta.get_simulations(page_start=2))) == 1
@pytest.mark.asyncio
async def test_exception_open():
"""Test that exceptions are even thrown on open"""
async with mockserver.server() as server:
server.custom_response(lambda: _raise(TimeoutError))
with pytest.raises(TimeoutError):
async with api.api(''):
pass # pragma: no cover
@pytest.mark.asyncio
async def test_exceptions():
"""Test that exceptions can be properly set"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
await sim.add_strategies({'role': ['strategy']})
sched = await sim.create_generic_scheduler('sched', True, 0, 1, 0, 0)
await sched.add_role('role', 1)
prof = await sched.add_profile('role: 1 strategy', 1)
game = await sched.create_game('game')
await game.add_symgroup('role', 1, ['strategy'])
server.custom_response(lambda: _raise(TimeoutError), 11)
# Creations fail
with pytest.raises(TimeoutError):
await sim.create_generic_scheduler('sched_2', False, 0, 0, 0, 0)
with pytest.raises(TimeoutError):
await sched.create_game()
# Infos fail
with pytest.raises(TimeoutError):
await sim.get_info()
with pytest.raises(TimeoutError):
await sched.get_info()
with pytest.raises(TimeoutError):
await game.get_structure()
with pytest.raises(TimeoutError):
await prof.get_structure()
# Mutates fail
with pytest.raises(TimeoutError):
await sim.add_role('r')
with pytest.raises(TimeoutError):
await sim.add_strategy('role', 's')
with pytest.raises(TimeoutError):
await sched.add_role('r', 1)
with pytest.raises(TimeoutError):
await game.add_role('r', 1)
with pytest.raises(TimeoutError):
await game.add_strategy('role', 's')
# Succeed after done
assert sim['id'] == (await sim.get_info())['id']
@pytest.mark.asyncio
async def test_threading():
"""Test that no errors arise when multi-threading"""
async with mockserver.server() as server, \
api.api('', num_tries=3, retry_delay=0.5) as egta:
sim = await create_simulator(server, egta, 'sim', '1')
await asyncio.gather(*[
sim.add_strategies({'r{:d}'.format(i): ['s{:d}'.format(i)]})
for i in range(10)])
| |
"""The tests for the Google Pub/Sub component."""
from collections import namedtuple
from datetime import datetime
import pytest
import homeassistant.components.google_pubsub as google_pubsub
from homeassistant.components.google_pubsub import DateTimeJSONEncoder as victim
from homeassistant.const import EVENT_STATE_CHANGED
from homeassistant.core import split_entity_id
from homeassistant.setup import async_setup_component
import tests.async_mock as mock
GOOGLE_PUBSUB_PATH = "homeassistant.components.google_pubsub"
async def test_datetime():
"""Test datetime encoding."""
time = datetime(2019, 1, 13, 12, 30, 5)
assert victim().encode(time) == '"2019-01-13T12:30:05"'
async def test_no_datetime():
"""Test integer encoding."""
assert victim().encode(42) == "42"
async def test_nested():
"""Test dictionary encoding."""
assert victim().encode({"foo": "bar"}) == '{"foo": "bar"}'
@pytest.fixture(autouse=True, name="mock_client")
def mock_client_fixture():
"""Mock the pubsub client."""
with mock.patch(f"{GOOGLE_PUBSUB_PATH}.pubsub_v1") as client:
client.PublisherClient = mock.MagicMock()
setattr(
client.PublisherClient,
"from_service_account_json",
mock.MagicMock(return_value=mock.MagicMock()),
)
yield client
@pytest.fixture(autouse=True, name="mock_os")
def mock_os_fixture():
"""Mock the OS cli."""
with mock.patch(f"{GOOGLE_PUBSUB_PATH}.os") as os_cli:
os_cli.path = mock.MagicMock()
setattr(os_cli.path, "join", mock.MagicMock(return_value="path"))
yield os_cli
@pytest.fixture(autouse=True)
def mock_bus_and_json(hass, monkeypatch):
"""Mock the event bus listener and os component."""
hass.bus.listen = mock.MagicMock()
monkeypatch.setattr(
f"{GOOGLE_PUBSUB_PATH}.json.dumps", mock.Mock(return_value=mock.MagicMock())
)
async def test_minimal_config(hass, mock_client):
"""Test the minimal config and defaults of component."""
config = {
google_pubsub.DOMAIN: {
"project_id": "proj",
"topic_name": "topic",
"credentials_json": "creds",
"filter": {},
}
}
assert await async_setup_component(hass, google_pubsub.DOMAIN, config)
await hass.async_block_till_done()
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
assert mock_client.PublisherClient.from_service_account_json.call_count == 1
assert (
mock_client.PublisherClient.from_service_account_json.call_args[0][0] == "path"
)
async def test_full_config(hass, mock_client):
"""Test the full config of the component."""
config = {
google_pubsub.DOMAIN: {
"project_id": "proj",
"topic_name": "topic",
"credentials_json": "creds",
"filter": {
"include_domains": ["light"],
"include_entity_globs": ["sensor.included_*"],
"include_entities": ["binary_sensor.included"],
"exclude_domains": ["light"],
"exclude_entity_globs": ["sensor.excluded_*"],
"exclude_entities": ["binary_sensor.excluded"],
},
}
}
assert await async_setup_component(hass, google_pubsub.DOMAIN, config)
await hass.async_block_till_done()
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
assert mock_client.PublisherClient.from_service_account_json.call_count == 1
assert (
mock_client.PublisherClient.from_service_account_json.call_args[0][0] == "path"
)
FilterTest = namedtuple("FilterTest", "id should_pass")
def make_event(entity_id):
"""Make a mock event for test."""
domain = split_entity_id(entity_id)[0]
state = mock.MagicMock(
state="not blank",
domain=domain,
entity_id=entity_id,
object_id="entity",
attributes={},
)
return mock.MagicMock(data={"new_state": state}, time_fired=12345)
async def _setup(hass, filter_config):
"""Shared set up for filtering tests."""
config = {
google_pubsub.DOMAIN: {
"project_id": "proj",
"topic_name": "topic",
"credentials_json": "creds",
"filter": filter_config,
}
}
assert await async_setup_component(hass, google_pubsub.DOMAIN, config)
await hass.async_block_till_done()
return hass.bus.listen.call_args_list[0][0][1]
async def test_allowlist(hass, mock_client):
"""Test an allowlist only config."""
handler_method = await _setup(
hass,
{
"include_domains": ["light"],
"include_entity_globs": ["sensor.included_*"],
"include_entities": ["binary_sensor.included"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("climate.excluded", False),
FilterTest("light.included", True),
FilterTest("sensor.excluded_test", False),
FilterTest("sensor.included_test", True),
FilterTest("binary_sensor.included", True),
FilterTest("binary_sensor.excluded", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
async def test_denylist(hass, mock_client):
"""Test a denylist only config."""
handler_method = await _setup(
hass,
{
"exclude_domains": ["climate"],
"exclude_entity_globs": ["sensor.excluded_*"],
"exclude_entities": ["binary_sensor.excluded"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("climate.excluded", False),
FilterTest("light.included", True),
FilterTest("sensor.excluded_test", False),
FilterTest("sensor.included_test", True),
FilterTest("binary_sensor.included", True),
FilterTest("binary_sensor.excluded", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
async def test_filtered_allowlist(hass, mock_client):
"""Test an allowlist config with a filtering denylist."""
handler_method = await _setup(
hass,
{
"include_domains": ["light"],
"include_entity_globs": ["*.included_*"],
"exclude_domains": ["climate"],
"exclude_entity_globs": ["*.excluded_*"],
"exclude_entities": ["light.excluded"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("light.included", True),
FilterTest("light.excluded_test", False),
FilterTest("light.excluded", False),
FilterTest("sensor.included_test", True),
FilterTest("climate.included_test", False),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
async def test_filtered_denylist(hass, mock_client):
"""Test a denylist config with a filtering allowlist."""
handler_method = await _setup(
hass,
{
"include_entities": ["climate.included", "sensor.excluded_test"],
"exclude_domains": ["climate"],
"exclude_entity_globs": ["*.excluded_*"],
"exclude_entities": ["light.excluded"],
},
)
publish_client = mock_client.PublisherClient.from_service_account_json("path")
tests = [
FilterTest("climate.excluded", False),
FilterTest("climate.included", True),
FilterTest("switch.excluded_test", False),
FilterTest("sensor.excluded_test", True),
FilterTest("light.excluded", False),
FilterTest("light.included", True),
]
for test in tests:
event = make_event(test.id)
handler_method(event)
was_called = publish_client.publish.call_count == 1
assert test.should_pass == was_called
publish_client.publish.reset_mock()
| |
#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data model classes for the Email Settings API."""
__author__ = 'Claudio Cherubino <ccherubino@google.com>'
import atom.data
import gdata.apps
import gdata.apps_property
import gdata.data
# This is required to work around a naming conflict between the Google
# Spreadsheets API and Python's built-in property function
pyproperty = property
# The apps:property label of the label property
LABEL_NAME = 'label'
# The apps:property from of the filter property
FILTER_FROM_NAME = 'from'
# The apps:property to of the filter property
FILTER_TO_NAME = 'to'
# The apps:property subject of the filter property
FILTER_SUBJECT_NAME = 'subject'
# The apps:property hasTheWord of the filter property
FILTER_HAS_THE_WORD_NAME = 'hasTheWord'
# The apps:property doesNotHaveTheWord of the filter property
FILTER_DOES_NOT_HAVE_THE_WORD_NAME = 'doesNotHaveTheWord'
# The apps:property hasAttachment of the filter property
FILTER_HAS_ATTACHMENTS_NAME = 'hasAttachment'
# The apps:property label of the filter action property
FILTER_LABEL = 'label'
# The apps:property shouldMarkAsRead of the filter action property
FILTER_MARK_AS_READ = 'shouldMarkAsRead'
# The apps:property shouldArchive of the filter action propertylabel
FILTER_ARCHIVE = 'shouldArchive'
# The apps:property name of the send-as alias property
SENDAS_ALIAS_NAME = 'name'
# The apps:property address of theAPPS_TEMPLATE send-as alias property
SENDAS_ALIAS_ADDRESS = 'address'
# The apps:property replyTo of the send-as alias property
SENDAS_ALIAS_REPLY_TO = 'replyTo'
# The apps:property makeDefault of the send-as alias property
SENDAS_ALIAS_MAKE_DEFAULT = 'makeDefault'
# The apps:property enable of the webclip property
WEBCLIP_ENABLE = 'enable'
# The apps:property enable of the forwarding property
FORWARDING_ENABLE = 'enable'
# The apps:property forwardTo of the forwarding property
FORWARDING_TO = 'forwardTo'
# The apps:property action of the forwarding property
FORWARDING_ACTION = 'action'
# The apps:property enable of the POP property
POP_ENABLE = 'enable'
# The apps:property enableFor of the POP propertyACTION
POP_ENABLE_FOR = 'enableFor'
# The apps:property action of the POP property
POP_ACTION = 'action'
# The apps:property enable of the IMAP property
IMAP_ENABLE = 'enable'
# The apps:property enable of the vacation responder property
VACATION_RESPONDER_ENABLE = 'enable'
# The apps:property subject of the vacation responder property
VACATION_RESPONDER_SUBJECT = 'subject'
# The apps:property message of the vacation responder property
VACATION_RESPONDER_MESSAGE = 'message'
# The apps:property contactsOnly of the vacation responder property
VACATION_RESPONDER_CONTACTS_ONLY = 'contactsOnly'
# The apps:property signature of the signature property
SIGNATURE_VALUE = 'signature'
# The apps:property language of the language property
LANGUAGE_TAG = 'language'
# The apps:property pageSize of the general settings property
GENERAL_PAGE_SIZE = 'pageSize'
# The apps:property shortcuts of the general settings property
GENERAL_SHORTCUTS = 'shortcuts'
# The apps:property arrows of the general settings property
GENERAL_ARROWS = 'arrows'
# The apps:prgdata.appsoperty snippets of the general settings property
GENERAL_SNIPPETS = 'snippets'
# The apps:property uniAppsProcode of the general settings property
GENERAL_UNICODE = 'unicode'
class EmailSettingsEntry(gdata.data.GDEntry):
"""Represents an Email Settings entry in object form."""
property = [gdata.apps_property.AppsProperty]
def _GetProperty(self, name):
"""Get the apps:property value with the given name.
Args:
name: string Name of the apps:property value to get.
Returns:
The apps:property value with the given name, or None if the name was
invalid.
"""
value = None
for p in self.property:
if p.name == name:
value = p.value
break
return value
def _SetProperty(self, name, value):
"""Set the apps:property value with the given name to the given value.
Args:
name: string Name of the apps:property value to set.
value: string Value to give the apps:property value with the given name.
"""
found = False
for i in range(len(self.property)):
if self.property[i].name == name:
self.property[i].value = value
found = True
break
if not found:
self.property.append(gdata.apps_property.AppsProperty(name=name, value=value))
def find_edit_link(self):
return self.uri
class EmailSettingsLabel(EmailSettingsEntry):
"""Represents a Label in object form."""
def GetName(self):
"""Get the name of the Label object.
Returns:
The name of this Label object as a string or None.
"""
return self._GetProperty(LABEL_NAME)
def SetName(self, value):
"""Set the name of this Label object.
Args:
value: string The new label name to give this object.
"""
self._SetProperty(LABEL_NAME, value)
name = pyproperty(GetName, SetName)
def __init__(self, uri=None, name=None, *args, **kwargs):
"""Constructs a new EmailSettingsLabel object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
name: string (optional) The name to give this new object.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsLabel, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if name:
self.name = name
class EmailSettingsFilter(EmailSettingsEntry):
"""Represents an Email Settings Filter in object form."""
def GetFrom(self):
"""Get the From value of the Filter object.
Returns:
The From value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_FROM_NAME)
def SetFrom(self, value):
"""Set the From value of this Filter object.
Args:
value: string The new From value to give this object.
"""
self._SetProperty(FILTER_FROM_NAME, value)
from_address = pyproperty(GetFrom, SetFrom)
def GetTo(self):
"""Get the To value of the Filter object.
Returns:
The To value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_TO_NAME)
def SetTo(self, value):
"""Set the To value of this Filter object.
Args:
value: string The new To value to give this object.
"""
self._SetProperty(FILTER_TO_NAME, value)
to_address = pyproperty(GetTo, SetTo)
def GetSubject(self):
"""Get the Subject value of the Filter object.
Returns:
The Subject value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_SUBJECT_NAME)
def SetSubject(self, value):
"""Set the Subject value of this Filter object.
Args:
value: string The new Subject value to give this object.
"""
self._SetProperty(FILTER_SUBJECT_NAME, value)
subject = pyproperty(GetSubject, SetSubject)
def GetHasTheWord(self):
"""Get the HasTheWord value of the Filter object.
Returns:
The HasTheWord value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_HAS_THE_WORD_NAME)
def SetHasTheWord(self, value):
"""Set the HasTheWord value of this Filter object.
Args:
value: string The new HasTheWord value to give this object.
"""
self._SetProperty(FILTER_HAS_THE_WORD_NAME, value)
has_the_word = pyproperty(GetHasTheWord, SetHasTheWord)
def GetDoesNotHaveTheWord(self):
"""Get the DoesNotHaveTheWord value of the Filter object.
Returns:
The DoesNotHaveTheWord value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_DOES_NOT_HAVE_THE_WORD_NAME)
def SetDoesNotHaveTheWord(self, value):
"""Set the DoesNotHaveTheWord value of this Filter object.
Args:
value: string The new DoesNotHaveTheWord value to give this object.
"""
self._SetProperty(FILTER_DOES_NOT_HAVE_THE_WORD_NAME, value)
does_not_have_the_word = pyproperty(GetDoesNotHaveTheWord,
SetDoesNotHaveTheWord)
def GetHasAttachments(self):
"""Get the HasAttachments value of the Filter object.
Returns:
The HasAttachments value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_HAS_ATTACHMENTS_NAME)
def SetHasAttachments(self, value):
"""Set the HasAttachments value of this Filter object.
Args:
value: string The new HasAttachments value to give this object.
"""
self._SetProperty(FILTER_HAS_ATTACHMENTS_NAME, value)
has_attachments = pyproperty(GetHasAttachments,
SetHasAttachments)
def GetLabel(self):
"""Get the Label value of the Filter object.
Returns:
The Label value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_LABEL)
def SetLabel(self, value):
"""Set the Label value of this Filter object.
Args:
value: string The new Label value to give this object.
"""
self._SetProperty(FILTER_LABEL, value)
label = pyproperty(GetLabel, SetLabel)
def GetMarkAsRead(self):
"""Get the MarkAsRead value of the Filter object.
Returns:
The MarkAsRead value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_MARK_AS_READ)
def SetMarkAsRead(self, value):
"""Set the MarkAsRead value of this Filter object.
Args:
value: string The new MarkAsRead value to give this object.
"""
self._SetProperty(FILTER_MARK_AS_READ, value)
mark_as_read = pyproperty(GetMarkAsRead, SetMarkAsRead)
def GetArchive(self):
"""Get the Archive value of the Filter object.
Returns:
The Archive value of this Filter object as a string or None.
"""
return self._GetProperty(FILTER_ARCHIVE)
def SetArchive(self, value):
"""Set the Archive value of this Filter object.
Args:
value: string The new Archive value to give this object.
"""
self._SetProperty(FILTER_ARCHIVE, value)
archive = pyproperty(GetArchive, SetArchive)
def __init__(self, uri=None, from_address=None, to_address=None,
subject=None, has_the_word=None, does_not_have_the_word=None,
has_attachments=None, label=None, mark_as_read=None,
archive=None, *args, **kwargs):
"""Constructs a new EmailSettingsFilter object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
from_address: string (optional) The source email address for the filter.
to_address: string (optional) The destination email address for
the filter.
subject: string (optional) The value the email must have in its
subject to be filtered.
has_the_word: string (optional) The value the email must have in its
subject or body to be filtered.
does_not_have_the_word: string (optional) The value the email cannot
have in its subject or body to be filtered.
has_attachments: Boolean (optional) Whether or not the email must
have an attachment to be filtered.
label: string (optional) The name of the label to apply to
messages matching the filter criteria.
mark_as_read: Boolean (optional) Whether or not to mark messages
matching the filter criteria as read.
archive: Boolean (optional) Whether or not to move messages
matching to Archived state.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsFilter, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if from_address:
self.from_address = from_address
if to_address:
self.to_address = to_address
if subject:
self.subject = subject
if has_the_word:
self.has_the_word = has_the_word
if does_not_have_the_word:
self.does_not_have_the_word = does_not_have_the_word
if has_attachments is not None:
self.has_attachments = str(has_attachments)
if label:
self.label = label
if mark_as_read is not None:
self.mark_as_read = str(mark_as_read)
if archive is not None:
self.archive = str(archive)
class EmailSettingsSendAsAlias(EmailSettingsEntry):
"""Represents an Email Settings send-as Alias in object form."""
def GetName(self):
"""Get the Name of the send-as Alias object.
Returns:
The Name of this send-as Alias object as a string or None.
"""
return self._GetProperty(SENDAS_ALIAS_NAME)
def SetName(self, value):
"""Set the Name of this send-as Alias object.
Args:
value: string The new Name to give this object.
"""
self._SetProperty(SENDAS_ALIAS_NAME, value)
name = pyproperty(GetName, SetName)
def GetAddress(self):
"""Get the Address of the send-as Alias object.
Returns:
The Address of this send-as Alias object as a string or None.
"""
return self._GetProperty(SENDAS_ALIAS_ADDRESS)
def SetAddress(self, value):
"""Set the Address of this send-as Alias object.
Args:
value: string The new Address to give this object.
"""
self._SetProperty(SENDAS_ALIAS_ADDRESS, value)
address = pyproperty(GetAddress, SetAddress)
def GetReplyTo(self):
"""Get the ReplyTo address of the send-as Alias object.
Returns:
The ReplyTo address of this send-as Alias object as a string or None.
"""
return self._GetProperty(SENDAS_ALIAS_REPLY_TO)
def SetReplyTo(self, value):
"""Set the ReplyTo address of this send-as Alias object.
Args:
value: string The new ReplyTo address to give this object.
"""
self._SetProperty(SENDAS_ALIAS_REPLY_TO, value)
reply_to = pyproperty(GetReplyTo, SetReplyTo)
def GetMakeDefault(self):
"""Get the MakeDefault value of the send-as Alias object.
Returns:
The MakeDefault value of this send-as Alias object as a string or None.
"""
return self._GetProperty(SENDAS_ALIAS_MAKE_DEFAULT)
def SetMakeDefault(self, value):
"""Set the MakeDefault value of this send-as Alias object.
Args:
value: string The new MakeDefault valueto give this object.WebClip
"""
self._SetProperty(SENDAS_ALIAS_MAKE_DEFAULT, value)
make_default = pyproperty(GetMakeDefault, SetMakeDefault)
def __init__(self, uri=None, name=None, address=None, reply_to=None,
make_default=None, *args, **kwargs):
"""Constructs a new EmailSettingsSendAsAlias object with the given
arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
name: string (optional) The name that will appear in the "From" field
for this user.
address: string (optional) The email address that appears as the
origination address for emails sent by this user.
reply_to: string (optional) The address to be used as the reply-to
address in email sent using the alias.
make_default: Boolean (optional) Whether or not this alias should
become the default alias for this user.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsSendAsAlias, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if name:
self.name = name
if address:
self.address = address
if reply_to:
self.reply_to = reply_to
if make_default is not None:
self.make_default = str(make_default)
class EmailSettingsWebClip(EmailSettingsEntry):
"""Represents a WebClip in object form."""
def GetEnable(self):
"""Get the Enable value of the WebClip object.
Returns:
The Enable value of this WebClip object as a string or None.
"""
return self._GetProperty(WEBCLIP_ENABLE)
def SetEnable(self, value):
"""Set the Enable value of this WebClip object.
Args:
value: string The new Enable value to give this object.
"""
self._SetProperty(WEBCLIP_ENABLE, value)
enable = pyproperty(GetEnable, SetEnable)
def __init__(self, uri=None, enable=None, *args, **kwargs):
"""Constructs a new EmailSettingsWebClip object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
enable: Boolean (optional) Whether to enable showing Web clips.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsWebClip, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if enable is not None:
self.enable = str(enable)
class EmailSettingsForwarding(EmailSettingsEntry):
"""Represents Forwarding settings in object form."""
def GetEnable(self):
"""Get the Enable value of the Forwarding object.
Returns:
The Enable value of this Forwarding object as a string or None.
"""
return self._GetProperty(FORWARDING_ENABLE)
def SetEnable(self, value):
"""Set the Enable value of this Forwarding object.
Args:
value: string The new Enable value to give this object.
"""
self._SetProperty(FORWARDING_ENABLE, value)
enable = pyproperty(GetEnable, SetEnable)
def GetForwardTo(self):
"""Get the ForwardTo value of the Forwarding object.
Returns:
The ForwardTo value of this Forwarding object as a string or None.
"""
return self._GetProperty(FORWARDING_TO)
def SetForwardTo(self, value):
"""Set the ForwardTo value of this Forwarding object.
Args:
value: string The new ForwardTo value to give this object.
"""
self._SetProperty(FORWARDING_TO, value)
forward_to = pyproperty(GetForwardTo, SetForwardTo)
def GetAction(self):
"""Get the Action value of the Forwarding object.
Returns:
The Action value of this Forwarding object as a string or None.
"""
return self._GetProperty(FORWARDING_ACTION)
def SetAction(self, value):
"""Set the Action value of this Forwarding object.
Args:
value: string The new Action value to give this object.
"""
self._SetProperty(FORWARDING_ACTION, value)
action = pyproperty(GetAction, SetAction)
def __init__(self, uri=None, enable=None, forward_to=None, action=None,
*args, **kwargs):
"""Constructs a new EmailSettingsForwarding object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
enable: Boolean (optional) Whether to enable incoming email forwarding.
forward_to: string (optional) The address email will be forwarded to.
action: string (optional) The action to perform after forwarding an
email ("KEEP", "ARCHIVE", "DELETE").
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsForwarding, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if enable is not None:
self.enable = str(enable)
if forward_to:
self.forward_to = forward_to
if action:
self.action = action
class EmailSettingsPop(EmailSettingsEntry):
"""Represents POP settings in object form."""
def GetEnable(self):
"""Get the Enable value of the POP object.
Returns:
The Enable value of this POP object as a string or None.
"""
return self._GetProperty(POP_ENABLE)
def SetEnable(self, value):
"""Set the Enable value of this POP object.
Args:
value: string The new Enable value to give this object.
"""
self._SetProperty(POP_ENABLE, value)
enable = pyproperty(GetEnable, SetEnable)
def GetEnableFor(self):
"""Get the EnableFor value of the POP object.
Returns:
The EnableFor value of this POP object as a string or None.
"""
return self._GetProperty(POP_ENABLE_FOR)
def SetEnableFor(self, value):
"""Set the EnableFor value of this POP object.
Args:
value: string The new EnableFor value to give this object.
"""
self._SetProperty(POP_ENABLE_FOR, value)
enable_for = pyproperty(GetEnableFor, SetEnableFor)
def GetPopAction(self):
"""Get the Action value of the POP object.
Returns:
The Action value of this POP object as a string or None.
"""
return self._GetProperty(POP_ACTION)
def SetPopAction(self, value):
"""Set the Action value of this POP object.
Args:
value: string The new Action value to give this object.
"""
self._SetProperty(POP_ACTION, value)
action = pyproperty(GetPopAction, SetPopAction)
def __init__(self, uri=None, enable=None, enable_for=None,
action=None, *args, **kwargs):
"""Constructs a new EmailSettingsPOP object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
enable: Boolean (optional) Whether to enable incoming POP3 access.
enable_for: string (optional) Whether to enable POP3 for all mail
("ALL_MAIL"), or mail from now on ("MAIL_FROM_NOW_ON").
action: string (optional) What Google Mail should do with its copy
of the email after it is retrieved using POP
("KEEP", "ARCHIVE", or "DELETE").
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsPop, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if enable is not None:
self.enable = str(enable)
if enable_for:
self.enable_for = enable_for
if action:
self.action = action
class EmailSettingsImap(EmailSettingsEntry):
"""Represents IMAP settings in object form."""
def GetEnable(self):
"""Get the Enable value of the IMAP object.
Returns:
The Enable value of this IMAP object as a string or None.
"""
return self._GetProperty(IMAP_ENABLE)
def SetEnable(self, value):
"""Set the Enable value of this IMAP object.
Args:
value: string The new Enable value to give this object.
"""
self._SetProperty(IMAP_ENABLE, value)
enable = pyproperty(GetEnable, SetEnable)
def __init__(self, uri=None, enable=None, *args, **kwargs):
"""Constructs a new EmailSettingsImap object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
enable: Boolean (optional) Whether to enable IMAP access.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsImap, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if enable is not None:
self.enable = str(enable)
class EmailSettingsVacationResponder(EmailSettingsEntry):
"""Represents Vacation Responder settings in object form."""
def GetEnable(self):
"""Get the Enable value of the Vacation Responder object.
Returns:
The Enable value of this Vacation Responder object as a string or None.
"""
return self._GetProperty(VACATION_RESPONDER_ENABLE)
def SetEnable(self, value):
"""Set the Enable value of this Vacation Responder object.
Args:
value: string The new Enable value to give this object.
"""
self._SetProperty(VACATION_RESPONDER_ENABLE, value)
enable = pyproperty(GetEnable, SetEnable)
def GetSubject(self):
"""Get the Subject value of the Vacation Responder object.
Returns:
The Subject value of this Vacation Responder object as a string or None.
"""
return self._GetProperty(VACATION_RESPONDER_SUBJECT)
def SetSubject(self, value):
"""Set the Subject value of this Vacation Responder object.
Args:
value: string The new Subject value to give this object.
"""
self._SetProperty(VACATION_RESPONDER_SUBJECT, value)
subject = pyproperty(GetSubject, SetSubject)
def GetMessage(self):
"""Get the Message value of the Vacation Responder object.
Returns:
The Message value of this Vacation Responder object as a string or None.
"""
return self._GetProperty(VACATION_RESPONDER_MESSAGE)
def SetMessage(self, value):
"""Set the Message value of this Vacation Responder object.
Args:
value: string The new Message value to give this object.
"""
self._SetProperty(VACATION_RESPONDER_MESSAGE, value)
message = pyproperty(GetMessage, SetMessage)
def GetContactsOnly(self):
"""Get the ContactsOnly value of the Vacation Responder object.
Returns:
The ContactsOnly value of this Vacation Responder object as a
string or None.
"""
return self._GetProperty(VACATION_RESPONDER_ENABLE)
def SetContactsOnly(self, value):
"""Set the ContactsOnly value of this Vacation Responder object.
Args:
value: string The new ContactsOnly value to give this object.
"""
self._SetProperty(VACATION_RESPONDER_CONTACTS_ONLY, value)
contacts_only = pyproperty(GetContactsOnly, SetContactsOnly)
def __init__(self, uri=None, enable=None, subject=None,
message=None, contacts_only=None, *args, **kwargs):
"""Constructs a new EmailSettingsVacationResponder object with the
given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
enable: Boolean (optional) Whether to enable the vacation responder.
subject: string (optional) The subject line of the vacation responder
autoresponse.
message: string (optional) The message body of the vacation responder
autoresponse.
contacts_only: Boolean (optional) Whether to only send autoresponses
to known contacts.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsVacationResponder, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if enable is not None:
self.enable = str(enable)
if subject:
self.subject = subject
if message:
self.message = message
if contacts_only is not None:
self.contacts_only = str(contacts_only)
class EmailSettingsSignature(EmailSettingsEntry):
"""Represents a Signature in object form."""
def GetValue(self):
"""Get the value of the Signature object.
Returns:
The value of this Signature object as a string or None.
"""
value = self._GetProperty(SIGNATURE_VALUE)
if value == ' ': # hack to support empty signature
return ''
else:
return value
def SetValue(self, value):
"""Set the name of this Signature object.
Args:
value: string The new signature value to give this object.
"""
if value == '': # hack to support empty signature
value = ' '
self._SetProperty(SIGNATURE_VALUE, value)
signature_value = pyproperty(GetValue, SetValue)
def __init__(self, uri=None, signature=None, *args, **kwargs):
"""Constructs a new EmailSettingsSignature object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
signature: string (optional) The signature to be appended to outgoing
messages.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsSignature, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if signature is not None:
self.signature_value = signature
class EmailSettingsLanguage(EmailSettingsEntry):
"""Represents Language Settings in object form."""
def GetLanguage(self):
"""Get the tag of the Language object.
Returns:
The tag of this Language object as a string or None.
"""
return self._GetProperty(LANGUAGE_TAG)
def SetLanguage(self, value):
"""Set the tag of this Language object.
Args:
value: string The new tag value to give this object.
"""
self._SetProperty(LANGUAGE_TAG, value)
language_tag = pyproperty(GetLanguage, SetLanguage)
def __init__(self, uri=None, language=None, *args, **kwargs):
"""Constructs a new EmailSettingsLanguage object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
language: string (optional) The language tag for Google Mail's display
language.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsLanguage, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if language:
self.language_tag = language
class EmailSettingsGeneral(EmailSettingsEntry):
"""Represents General Settings in object form."""
def GetPageSize(self):
"""Get the Page Size value of the General Settings object.
Returns:
The Page Size value of this General Settings object as a string or None.
"""
return self._GetProperty(GENERAL_PAGE_SIZE)
def SetPageSize(self, value):
"""Set the Page Size value of this General Settings object.
Args:
value: string The new Page Size value to give this object.
"""
self._SetProperty(GENERAL_PAGE_SIZE, value)
page_size = pyproperty(GetPageSize, SetPageSize)
def GetShortcuts(self):
"""Get the Shortcuts value of the General Settings object.
Returns:
The Shortcuts value of this General Settings object as a string or None.
"""
return self._GetProperty(GENERAL_SHORTCUTS)
def SetShortcuts(self, value):
"""Set the Shortcuts value of this General Settings object.
Args:
value: string The new Shortcuts value to give this object.
"""
self._SetProperty(GENERAL_SHORTCUTS, value)
shortcuts = pyproperty(GetShortcuts, SetShortcuts)
def GetArrows(self):
"""Get the Arrows value of the General Settings object.
Returns:
The Arrows value of this General Settings object as a string or None.
"""
return self._GetProperty(GENERAL_ARROWS)
def SetArrows(self, value):
"""Set the Arrows value of this General Settings object.
Args:
value: string The new Arrows value to give this object.
"""
self._SetProperty(GENERAL_ARROWS, value)
arrows = pyproperty(GetArrows, SetArrows)
def GetSnippets(self):
"""Get the Snippets value of the General Settings object.
Returns:
The Snippets value of this General Settings object as a string or None.
"""
return self._GetProperty(GENERAL_SNIPPETS)
def SetSnippets(self, value):
"""Set the Snippets value of this General Settings object.
Args:
value: string The new Snippets value to give this object.
"""
self._SetProperty(GENERAL_SNIPPETS, value)
snippets = pyproperty(GetSnippets, SetSnippets)
def GetUnicode(self):
"""Get the Unicode value of the General Settings object.
Returns:
The Unicode value of this General Settings object as a string or None.
"""
return self._GetProperty(GENERAL_UNICODE)
def SetUnicode(self, value):
"""Set the Unicode value of this General Settings object.
Args:
value: string The new Unicode value to give this object.
"""
self._SetProperty(GENERAL_UNICODE, value)
use_unicode = pyproperty(GetUnicode, SetUnicode)
def __init__(self, uri=None, page_size=None, shortcuts=None,
arrows=None, snippets=None, use_unicode=None, *args, **kwargs):
"""Constructs a new EmailSettingsGeneral object with the given arguments.
Args:
uri: string (optional) The uri of of this object for HTTP requests.
page_size: int (optional) The number of conversations to be shown per page.
shortcuts: Boolean (optional) Whether to enable keyboard shortcuts.
arrows: Boolean (optional) Whether to display arrow-shaped personal
indicators next to email sent specifically to the user.
snippets: Boolean (optional) Whether to display snippets of the messages
in the inbox and when searching.
use_unicode: Boolean (optional) Whether to use UTF-8 (unicode) encoding
for all outgoing messages.
args: The other parameters to pass to gdata.entry.GDEntry constructor.
kwargs: The other parameters to pass to gdata.entry.GDEntry constructor.
"""
super(EmailSettingsGeneral, self).__init__(*args, **kwargs)
if uri:
self.uri = uri
if page_size is not None:
self.page_size = str(page_size)
if shortcuts is not None:
self.shortcuts = str(shortcuts)
if arrows is not None:
self.arrows = str(arrows)
if snippets is not None:
self.snippets = str(snippets)
if use_unicode is not None:
self.use_unicode = str(use_unicode)
| |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import pandas as pd
from pandas.api.types import CategoricalDtype
from pyspark import pandas as ps
from pyspark.pandas.tests.data_type_ops.testing_utils import TestCasesUtils
from pyspark.testing.pandasutils import PandasOnSparkTestCase
class DateOpsTest(PandasOnSparkTestCase, TestCasesUtils):
@property
def pser(self):
return pd.Series(
[datetime.date(1994, 1, 31), datetime.date(1994, 2, 1), datetime.date(1994, 2, 2)]
)
@property
def psser(self):
return ps.from_pandas(self.pser)
@property
def date_pdf(self):
psers = {
"this": self.pser,
"that": pd.Series(
[datetime.date(2000, 1, 31), datetime.date(1994, 3, 1), datetime.date(1990, 2, 2)]
),
}
return pd.concat(psers, axis=1)
@property
def date_psdf(self):
return ps.from_pandas(self.date_pdf)
@property
def some_date(self):
return datetime.date(1994, 1, 1)
def test_add(self):
self.assertRaises(TypeError, lambda: self.psser + "x")
self.assertRaises(TypeError, lambda: self.psser + 1)
self.assertRaises(TypeError, lambda: self.psser + self.some_date)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser + psser)
def test_sub(self):
self.assertRaises(TypeError, lambda: self.psser - "x")
self.assertRaises(TypeError, lambda: self.psser - 1)
self.assert_eq(
(self.pser - self.some_date).dt.days,
self.psser - self.some_date,
)
pdf, psdf = self.pdf, self.psdf
for col in self.df_cols:
if col == "date":
self.assert_eq((pdf["date"] - pdf[col]).dt.days, psdf["date"] - psdf[col])
else:
self.assertRaises(TypeError, lambda: psdf["date"] - psdf[col])
pdf, psdf = self.date_pdf, self.date_psdf
self.assert_eq((pdf["this"] - pdf["that"]).dt.days, psdf["this"] - psdf["that"])
def test_mul(self):
self.assertRaises(TypeError, lambda: self.psser * "x")
self.assertRaises(TypeError, lambda: self.psser * 1)
self.assertRaises(TypeError, lambda: self.psser * self.some_date)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser * psser)
def test_truediv(self):
self.assertRaises(TypeError, lambda: self.psser / "x")
self.assertRaises(TypeError, lambda: self.psser / 1)
self.assertRaises(TypeError, lambda: self.psser / self.some_date)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser / psser)
def test_floordiv(self):
self.assertRaises(TypeError, lambda: self.psser // "x")
self.assertRaises(TypeError, lambda: self.psser // 1)
self.assertRaises(TypeError, lambda: self.psser // self.some_date)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser // psser)
def test_mod(self):
self.assertRaises(TypeError, lambda: self.psser % "x")
self.assertRaises(TypeError, lambda: self.psser % 1)
self.assertRaises(TypeError, lambda: self.psser % self.some_date)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser % psser)
def test_pow(self):
self.assertRaises(TypeError, lambda: self.psser ** "x")
self.assertRaises(TypeError, lambda: self.psser ** 1)
self.assertRaises(TypeError, lambda: self.psser ** self.some_date)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser ** psser)
def test_radd(self):
self.assertRaises(TypeError, lambda: "x" + self.psser)
self.assertRaises(TypeError, lambda: 1 + self.psser)
self.assertRaises(TypeError, lambda: self.some_date + self.psser)
def test_rsub(self):
self.assertRaises(TypeError, lambda: "x" - self.psser)
self.assertRaises(TypeError, lambda: 1 - self.psser)
self.assert_eq(
(self.some_date - self.pser).dt.days,
self.some_date - self.psser,
)
def test_rmul(self):
self.assertRaises(TypeError, lambda: "x" * self.psser)
self.assertRaises(TypeError, lambda: 1 * self.psser)
self.assertRaises(TypeError, lambda: self.some_date * self.psser)
def test_rtruediv(self):
self.assertRaises(TypeError, lambda: "x" / self.psser)
self.assertRaises(TypeError, lambda: 1 / self.psser)
self.assertRaises(TypeError, lambda: self.some_date / self.psser)
def test_rfloordiv(self):
self.assertRaises(TypeError, lambda: "x" // self.psser)
self.assertRaises(TypeError, lambda: 1 // self.psser)
self.assertRaises(TypeError, lambda: self.some_date // self.psser)
def test_rmod(self):
self.assertRaises(TypeError, lambda: 1 % self.psser)
self.assertRaises(TypeError, lambda: self.some_date % self.psser)
def test_rpow(self):
self.assertRaises(TypeError, lambda: "x" ** self.psser)
self.assertRaises(TypeError, lambda: 1 ** self.psser)
self.assertRaises(TypeError, lambda: self.some_date ** self.psser)
def test_and(self):
self.assertRaises(TypeError, lambda: self.psser & True)
self.assertRaises(TypeError, lambda: self.psser & False)
self.assertRaises(TypeError, lambda: self.psser & self.psser)
def test_rand(self):
self.assertRaises(TypeError, lambda: True & self.psser)
self.assertRaises(TypeError, lambda: False & self.psser)
def test_or(self):
self.assertRaises(TypeError, lambda: self.psser | True)
self.assertRaises(TypeError, lambda: self.psser | False)
self.assertRaises(TypeError, lambda: self.psser | self.psser)
def test_ror(self):
self.assertRaises(TypeError, lambda: True | self.psser)
self.assertRaises(TypeError, lambda: False | self.psser)
def test_from_to_pandas(self):
data = [datetime.date(1994, 1, 31), datetime.date(1994, 2, 1), datetime.date(1994, 2, 2)]
pser = pd.Series(data)
psser = ps.Series(data)
self.assert_eq(pser, psser.to_pandas())
self.assert_eq(ps.from_pandas(pser), psser)
def test_isnull(self):
self.assert_eq(self.pser.isnull(), self.psser.isnull())
def test_astype(self):
pser = self.pser
psser = self.psser
self.assert_eq(pser.astype(str), psser.astype(str))
self.assert_eq(pser.astype(bool), psser.astype(bool))
cat_type = CategoricalDtype(categories=["a", "b", "c"])
self.assert_eq(pser.astype(cat_type), psser.astype(cat_type))
def test_neg(self):
self.assertRaises(TypeError, lambda: -self.psser)
def test_abs(self):
self.assertRaises(TypeError, lambda: abs(self.psser))
def test_invert(self):
self.assertRaises(TypeError, lambda: ~self.psser)
def test_eq(self):
pdf, psdf = self.date_pdf, self.date_psdf
self.assert_eq(pdf["this"] == pdf["that"], psdf["this"] == psdf["that"])
self.assert_eq(pdf["this"] == pdf["this"], psdf["this"] == psdf["this"])
def test_ne(self):
pdf, psdf = self.date_pdf, self.date_psdf
self.assert_eq(pdf["this"] != pdf["that"], psdf["this"] != psdf["that"])
self.assert_eq(pdf["this"] != pdf["this"], psdf["this"] != psdf["this"])
def test_lt(self):
pdf, psdf = self.date_pdf, self.date_psdf
self.assert_eq(pdf["this"] == pdf["that"], psdf["this"] == psdf["that"])
self.assert_eq(pdf["this"] == pdf["this"], psdf["this"] == psdf["this"])
def test_le(self):
pdf, psdf = self.date_pdf, self.date_psdf
self.assert_eq(pdf["this"] <= pdf["that"], psdf["this"] <= psdf["that"])
self.assert_eq(pdf["this"] <= pdf["this"], psdf["this"] <= psdf["this"])
def test_gt(self):
pdf, psdf = self.date_pdf, self.date_psdf
self.assert_eq(pdf["this"] > pdf["that"], psdf["this"] > psdf["that"])
self.assert_eq(pdf["this"] > pdf["this"], psdf["this"] > psdf["this"])
def test_ge(self):
pdf, psdf = self.date_pdf, self.date_psdf
self.assert_eq(pdf["this"] >= pdf["that"], psdf["this"] >= psdf["that"])
self.assert_eq(pdf["this"] >= pdf["this"], psdf["this"] >= psdf["this"])
if __name__ == "__main__":
import unittest
from pyspark.pandas.tests.data_type_ops.test_date_ops import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
'''This file generates shell code for the setup.SHELL scripts to set environment variables'''
from __future__ import print_function
import argparse
import copy
import errno
import os
import platform
import sys
CATKIN_MARKER_FILE = '.catkin'
system = platform.system()
IS_DARWIN = (system == 'Darwin')
IS_WINDOWS = (system == 'Windows')
# subfolder of workspace prepended to CMAKE_PREFIX_PATH
ENV_VAR_SUBFOLDERS = {
'CMAKE_PREFIX_PATH': '',
'CPATH': 'include',
'LD_LIBRARY_PATH' if not IS_DARWIN else 'DYLD_LIBRARY_PATH': ['lib', os.path.join('lib', 'x86_64-linux-gnu')],
'PATH': 'bin',
'PKG_CONFIG_PATH': [os.path.join('lib', 'pkgconfig'), os.path.join('lib', 'x86_64-linux-gnu', 'pkgconfig')],
'PYTHONPATH': 'lib/python2.7/dist-packages',
}
def rollback_env_variables(environ, env_var_subfolders):
'''
Generate shell code to reset environment variables
by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH.
This does not cover modifications performed by environment hooks.
'''
lines = []
unmodified_environ = copy.copy(environ)
for key in sorted(env_var_subfolders.keys()):
subfolders = env_var_subfolders[key]
if not isinstance(subfolders, list):
subfolders = [subfolders]
for subfolder in subfolders:
value = _rollback_env_variable(unmodified_environ, key, subfolder)
if value is not None:
environ[key] = value
lines.append(assignment(key, value))
if lines:
lines.insert(0, comment('reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH'))
return lines
def _rollback_env_variable(environ, name, subfolder):
'''
For each catkin workspace in CMAKE_PREFIX_PATH remove the first entry from env[NAME] matching workspace + subfolder.
:param subfolder: str '' or subfoldername that may start with '/'
:returns: the updated value of the environment variable.
'''
value = environ[name] if name in environ else ''
env_paths = [path for path in value.split(os.pathsep) if path]
value_modified = False
if subfolder:
if subfolder.startswith(os.path.sep) or (os.path.altsep and subfolder.startswith(os.path.altsep)):
subfolder = subfolder[1:]
if subfolder.endswith(os.path.sep) or (os.path.altsep and subfolder.endswith(os.path.altsep)):
subfolder = subfolder[:-1]
for ws_path in _get_workspaces(environ, include_fuerte=True, include_non_existing=True):
path_to_find = os.path.join(ws_path, subfolder) if subfolder else ws_path
path_to_remove = None
for env_path in env_paths:
env_path_clean = env_path[:-1] if env_path and env_path[-1] in [os.path.sep, os.path.altsep] else env_path
if env_path_clean == path_to_find:
path_to_remove = env_path
break
if path_to_remove:
env_paths.remove(path_to_remove)
value_modified = True
new_value = os.pathsep.join(env_paths)
return new_value if value_modified else None
def _get_workspaces(environ, include_fuerte=False, include_non_existing=False):
'''
Based on CMAKE_PREFIX_PATH return all catkin workspaces.
:param include_fuerte: The flag if paths starting with '/opt/ros/fuerte' should be considered workspaces, ``bool``
'''
# get all cmake prefix paths
env_name = 'CMAKE_PREFIX_PATH'
value = environ[env_name] if env_name in environ else ''
paths = [path for path in value.split(os.pathsep) if path]
# remove non-workspace paths
workspaces = [path for path in paths if os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE)) or (include_fuerte and path.startswith('/opt/ros/fuerte')) or (include_non_existing and not os.path.exists(path))]
return workspaces
def prepend_env_variables(environ, env_var_subfolders, workspaces):
'''
Generate shell code to prepend environment variables
for the all workspaces.
'''
lines = []
lines.append(comment('prepend folders of workspaces to environment variables'))
paths = [path for path in workspaces.split(os.pathsep) if path]
prefix = _prefix_env_variable(environ, 'CMAKE_PREFIX_PATH', paths, '')
lines.append(prepend(environ, 'CMAKE_PREFIX_PATH', prefix))
for key in sorted([key for key in env_var_subfolders.keys() if key != 'CMAKE_PREFIX_PATH']):
subfolder = env_var_subfolders[key]
prefix = _prefix_env_variable(environ, key, paths, subfolder)
lines.append(prepend(environ, key, prefix))
return lines
def _prefix_env_variable(environ, name, paths, subfolders):
'''
Return the prefix to prepend to the environment variable NAME, adding any path in NEW_PATHS_STR without creating duplicate or empty items.
'''
value = environ[name] if name in environ else ''
environ_paths = [path for path in value.split(os.pathsep) if path]
checked_paths = []
for path in paths:
if not isinstance(subfolders, list):
subfolders = [subfolders]
for subfolder in subfolders:
path_tmp = path
if subfolder:
path_tmp = os.path.join(path_tmp, subfolder)
# exclude any path already in env and any path we already added
if path_tmp not in environ_paths and path_tmp not in checked_paths:
checked_paths.append(path_tmp)
prefix_str = os.pathsep.join(checked_paths)
if prefix_str != '' and environ_paths:
prefix_str += os.pathsep
return prefix_str
def assignment(key, value):
if not IS_WINDOWS:
return 'export %s="%s"' % (key, value)
else:
return 'set %s=%s' % (key, value)
def comment(msg):
if not IS_WINDOWS:
return '# %s' % msg
else:
return 'REM %s' % msg
def prepend(environ, key, prefix):
if key not in environ or not environ[key]:
return assignment(key, prefix)
if not IS_WINDOWS:
return 'export %s="%s$%s"' % (key, prefix, key)
else:
return 'set %s=%s%%%s%%' % (key, prefix, key)
def find_env_hooks(environ, cmake_prefix_path):
'''
Generate shell code with found environment hooks
for the all workspaces.
'''
lines = []
lines.append(comment('found environment hooks in workspaces'))
generic_env_hooks = []
generic_env_hooks_workspace = []
specific_env_hooks = []
specific_env_hooks_workspace = []
generic_env_hooks_by_filename = {}
specific_env_hooks_by_filename = {}
generic_env_hook_ext = 'bat' if IS_WINDOWS else 'sh'
specific_env_hook_ext = environ['CATKIN_SHELL'] if not IS_WINDOWS and 'CATKIN_SHELL' in environ and environ['CATKIN_SHELL'] else None
# remove non-workspace paths
workspaces = [path for path in cmake_prefix_path.split(os.pathsep) if path and os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE))]
for workspace in reversed(workspaces):
env_hook_dir = os.path.join(workspace, 'etc', 'catkin', 'profile.d')
if os.path.isdir(env_hook_dir):
for filename in sorted(os.listdir(env_hook_dir)):
if filename.endswith('.%s' % generic_env_hook_ext):
# remove previous env hook with same name if present
if filename in generic_env_hooks_by_filename:
i = generic_env_hooks.index(generic_env_hooks_by_filename[filename])
generic_env_hooks.pop(i)
generic_env_hooks_workspace.pop(i)
# append env hook
generic_env_hooks.append(os.path.join(env_hook_dir, filename))
generic_env_hooks_workspace.append(workspace)
generic_env_hooks_by_filename[filename] = generic_env_hooks[-1]
elif specific_env_hook_ext is not None and filename.endswith('.%s' % specific_env_hook_ext):
# remove previous env hook with same name if present
if filename in specific_env_hooks_by_filename:
i = specific_env_hooks.index(specific_env_hooks_by_filename[filename])
specific_env_hooks.pop(i)
specific_env_hooks_workspace.pop(i)
# append env hook
specific_env_hooks.append(os.path.join(env_hook_dir, filename))
specific_env_hooks_workspace.append(workspace)
specific_env_hooks_by_filename[filename] = specific_env_hooks[-1]
env_hooks = generic_env_hooks + specific_env_hooks
env_hooks_workspace = generic_env_hooks_workspace + specific_env_hooks_workspace
count = len(env_hooks)
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_COUNT', count))
for i in range(count):
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d' % i, env_hooks[i]))
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d_WORKSPACE' % i, env_hooks_workspace[i]))
return lines
def _parse_arguments(args=None):
parser = argparse.ArgumentParser(description='Generates code blocks for the setup.SHELL script.')
parser.add_argument('--extend', action='store_true', help='Skip unsetting previous environment variables to extend context')
return parser.parse_known_args(args=args)[0]
if __name__ == '__main__':
try:
try:
args = _parse_arguments()
except Exception as e:
print(e, file=sys.stderr)
sys.exit(1)
# environment at generation time
CMAKE_PREFIX_PATH = '/home/trevor/ROS/catkin_ws/devel;/opt/ros/indigo'.split(';')
# prepend current workspace if not already part of CPP
base_path = os.path.dirname(__file__)
if base_path not in CMAKE_PREFIX_PATH:
CMAKE_PREFIX_PATH.insert(0, base_path)
CMAKE_PREFIX_PATH = os.pathsep.join(CMAKE_PREFIX_PATH)
environ = dict(os.environ)
lines = []
if not args.extend:
lines += rollback_env_variables(environ, ENV_VAR_SUBFOLDERS)
lines += prepend_env_variables(environ, ENV_VAR_SUBFOLDERS, CMAKE_PREFIX_PATH)
lines += find_env_hooks(environ, CMAKE_PREFIX_PATH)
print('\n'.join(lines))
# need to explicitly flush the output
sys.stdout.flush()
except IOError as e:
# and catch potantial "broken pipe" if stdout is not writable
# which can happen when piping the output to a file but the disk is full
if e.errno == errno.EPIPE:
print(e, file=sys.stderr)
sys.exit(2)
raise
sys.exit(0)
| |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'User.first_name'
db.add_column('facebook_users_user', 'first_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=300),
keep_default=False)
# Adding field 'User.last_name'
db.add_column('facebook_users_user', 'last_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=300),
keep_default=False)
# Adding field 'User.middle_name'
db.add_column('facebook_users_user', 'middle_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=300),
keep_default=False)
# Adding field 'User.gender'
db.add_column('facebook_users_user', 'gender',
self.gf('django.db.models.fields.CharField')(default='', max_length=10),
keep_default=False)
# Adding field 'User.locale'
db.add_column('facebook_users_user', 'locale',
self.gf('django.db.models.fields.CharField')(default='', max_length=5),
keep_default=False)
# Adding field 'User.link'
db.add_column('facebook_users_user', 'link',
self.gf('django.db.models.fields.URLField')(default='', max_length=300),
keep_default=False)
# Adding field 'User.cover'
db.add_column('facebook_users_user', 'cover',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.username'
db.add_column('facebook_users_user', 'username',
self.gf('django.db.models.fields.CharField')(default='', max_length=300),
keep_default=False)
# Adding field 'User.third_party_id'
db.add_column('facebook_users_user', 'third_party_id',
self.gf('django.db.models.fields.CharField')(default='', max_length=300),
keep_default=False)
# Adding field 'User.updated_time'
db.add_column('facebook_users_user', 'updated_time',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(1970, 1, 1, 0, 0)),
keep_default=False)
# Adding field 'User.email'
db.add_column('facebook_users_user', 'email',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
# Adding field 'User.timezone'
db.add_column('facebook_users_user', 'timezone',
self.gf('django.db.models.fields.IntegerField')(null=True),
keep_default=False)
# Adding field 'User.bio'
db.add_column('facebook_users_user', 'bio',
self.gf('django.db.models.fields.TextField')(default=''),
keep_default=False)
# Adding field 'User.birthday'
db.add_column('facebook_users_user', 'birthday',
self.gf('django.db.models.fields.CharField')(default='', max_length=300),
keep_default=False)
# Adding field 'User.languages'
db.add_column('facebook_users_user', 'languages',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.installed'
db.add_column('facebook_users_user', 'installed',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.verified'
db.add_column('facebook_users_user', 'verified',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'User.currency'
db.add_column('facebook_users_user', 'currency',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.devices'
db.add_column('facebook_users_user', 'devices',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.education'
db.add_column('facebook_users_user', 'education',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.hometown'
db.add_column('facebook_users_user', 'hometown',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.interested_in'
db.add_column('facebook_users_user', 'interested_in',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.location'
db.add_column('facebook_users_user', 'location',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.payment_pricepoints'
db.add_column('facebook_users_user', 'payment_pricepoints',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.favorite_athletes'
db.add_column('facebook_users_user', 'favorite_athletes',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.favorite_teams'
db.add_column('facebook_users_user', 'favorite_teams',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.political'
db.add_column('facebook_users_user', 'political',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
# Adding field 'User.picture'
db.add_column('facebook_users_user', 'picture',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
# Adding field 'User.quotes'
db.add_column('facebook_users_user', 'quotes',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
# Adding field 'User.relationship_status'
db.add_column('facebook_users_user', 'relationship_status',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
# Adding field 'User.religion'
db.add_column('facebook_users_user', 'religion',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
# Adding field 'User.security_settings'
db.add_column('facebook_users_user', 'security_settings',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.significant_other'
db.add_column('facebook_users_user', 'significant_other',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.video_upload_limits'
db.add_column('facebook_users_user', 'video_upload_limits',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
# Adding field 'User.website'
db.add_column('facebook_users_user', 'website',
self.gf('django.db.models.fields.URLField')(default='', max_length=100),
keep_default=False)
# Adding field 'User.work'
db.add_column('facebook_users_user', 'work',
self.gf('annoying.fields.JSONField')(max_length=500, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'User.first_name'
db.delete_column('facebook_users_user', 'first_name')
# Deleting field 'User.last_name'
db.delete_column('facebook_users_user', 'last_name')
# Deleting field 'User.middle_name'
db.delete_column('facebook_users_user', 'middle_name')
# Deleting field 'User.gender'
db.delete_column('facebook_users_user', 'gender')
# Deleting field 'User.locale'
db.delete_column('facebook_users_user', 'locale')
# Deleting field 'User.link'
db.delete_column('facebook_users_user', 'link')
# Deleting field 'User.cover'
db.delete_column('facebook_users_user', 'cover')
# Deleting field 'User.username'
db.delete_column('facebook_users_user', 'username')
# Deleting field 'User.third_party_id'
db.delete_column('facebook_users_user', 'third_party_id')
# Deleting field 'User.updated_time'
db.delete_column('facebook_users_user', 'updated_time')
# Deleting field 'User.email'
db.delete_column('facebook_users_user', 'email')
# Deleting field 'User.timezone'
db.delete_column('facebook_users_user', 'timezone')
# Deleting field 'User.bio'
db.delete_column('facebook_users_user', 'bio')
# Deleting field 'User.birthday'
db.delete_column('facebook_users_user', 'birthday')
# Deleting field 'User.languages'
db.delete_column('facebook_users_user', 'languages')
# Deleting field 'User.installed'
db.delete_column('facebook_users_user', 'installed')
# Deleting field 'User.verified'
db.delete_column('facebook_users_user', 'verified')
# Deleting field 'User.currency'
db.delete_column('facebook_users_user', 'currency')
# Deleting field 'User.devices'
db.delete_column('facebook_users_user', 'devices')
# Deleting field 'User.education'
db.delete_column('facebook_users_user', 'education')
# Deleting field 'User.hometown'
db.delete_column('facebook_users_user', 'hometown')
# Deleting field 'User.interested_in'
db.delete_column('facebook_users_user', 'interested_in')
# Deleting field 'User.location'
db.delete_column('facebook_users_user', 'location')
# Deleting field 'User.payment_pricepoints'
db.delete_column('facebook_users_user', 'payment_pricepoints')
# Deleting field 'User.favorite_athletes'
db.delete_column('facebook_users_user', 'favorite_athletes')
# Deleting field 'User.favorite_teams'
db.delete_column('facebook_users_user', 'favorite_teams')
# Deleting field 'User.political'
db.delete_column('facebook_users_user', 'political')
# Deleting field 'User.picture'
db.delete_column('facebook_users_user', 'picture')
# Deleting field 'User.quotes'
db.delete_column('facebook_users_user', 'quotes')
# Deleting field 'User.relationship_status'
db.delete_column('facebook_users_user', 'relationship_status')
# Deleting field 'User.religion'
db.delete_column('facebook_users_user', 'religion')
# Deleting field 'User.security_settings'
db.delete_column('facebook_users_user', 'security_settings')
# Deleting field 'User.significant_other'
db.delete_column('facebook_users_user', 'significant_other')
# Deleting field 'User.video_upload_limits'
db.delete_column('facebook_users_user', 'video_upload_limits')
# Deleting field 'User.website'
db.delete_column('facebook_users_user', 'website')
# Deleting field 'User.work'
db.delete_column('facebook_users_user', 'work')
models = {
'facebook_users.user': {
'Meta': {'ordering': "['name']", 'object_name': 'User'},
'bio': ('django.db.models.fields.TextField', [], {}),
'birthday': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'cover': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'currency': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'devices': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'education': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'favorite_athletes': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'favorite_teams': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'graph_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'hometown': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'installed': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'interested_in': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'languages': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '300'}),
'locale': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'location': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'middle_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'payment_pricepoints': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'picture': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'political': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'quotes': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'relationship_status': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'religion': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'security_settings': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'significant_other': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'third_party_id': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'timezone': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'updated_time': ('django.db.models.fields.DateTimeField', [], {}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'video_upload_limits': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '100'}),
'work': ('annoying.fields.JSONField', [], {'max_length': '500', 'null': 'True'})
}
}
complete_apps = ['facebook_users']
| |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
import unittest
import numpy as np
from op_test import OpTest
from test_softmax_op import stable_softmax
CUDA_BLOCK_SIZE = 512
class CTCForward(object):
def __init__(self, softmax, softmax_lod, labels, labels_lod, blank,
norm_by_times):
self.softmax = softmax
self.softmax_lod = softmax_lod
assert labels.shape[1] == 1
self.labels = labels
self.labels_lod = labels_lod
self.blank = blank
self.norm_by_times = norm_by_times
self.level = 0
self.num_classes = softmax.shape[1]
self.batch_size = len(softmax_lod[self.level])
assert self.batch_size == len(labels_lod[self.level])
self.loss = np.zeros([self.batch_size, 1], dtype="float32")
self.gradient = np.zeros(self.softmax.shape, dtype="float32")
# float64
self.EXP_MAX = sys.float_info.max
self.EXP_MIN = sys.float_info.min
self.LOG_ZERO = np.log(self.EXP_MIN)
self.LOG_INFINITY = np.log(self.EXP_MAX)
def safe_exp(self, x):
if x <= self.LOG_ZERO:
return 0.0
if x >= self.LOG_INFINITY:
return self.EXP_MAX
return np.exp(x)
def safe_log(self, x):
if x <= self.EXP_MIN:
return self.LOG_ZERO
return np.log(x)
# x = lna and y = lnb are in log scale, ln(a / b) = lna - lnb
def log_div(self, x, y):
res = x - y
if res <= self.LOG_ZERO:
return self.LOG_ZERO
if res >= self.LOG_INFINITY:
return self.LOG_INFINITY
return res
# x = lna and y = lnb are in log scale, ln(a * b) = lna + lnb
def log_mul(self, x, y):
res = x + y
if res <= self.LOG_ZERO:
return self.LOG_ZERO
if res >= self.LOG_INFINITY:
return self.LOG_INFINITY
return res
# x = lna and y = lnb are in log scale,
# ln(a + b) = lna + ln(1 + exp(lnb - lna)), where b > a
def log_add(self, x, y):
if x < y:
t = y
y = x
x = t
return x + self.safe_log(1 + self.safe_exp(y - x))
def segment_range(self, time, total_times, total_segments):
start = max(0, total_segments - (2 * (total_times - time)))
end = min(total_segments, 2 * (time + 1))
return start, end
def forward_a_sequence(self, softmax_a_sequence, labels_a_sequence):
total_times = softmax_a_sequence.shape[0]
total_segments = labels_a_sequence.shape[0] * 2 + 1
required_times = labels_a_sequence.shape[0]
old_label = -1
for i in range(labels_a_sequence.shape[0]):
# two contingous labels with the same value
if labels_a_sequence[i, 0] == old_label:
required_times = required_times + 1
old_label = labels_a_sequence[i, 0]
if total_times < required_times:
return 0
# calculate the forward and backward variables,
# reference Chapter 7.3 of "Alex Grave, Supervised Sequence
# Labelling with Recurrent Neural Networks"
log_acts = np.zeros([total_times, self.num_classes], dtype="float32")
for i in range(total_times):
for j in range(self.num_classes):
log_acts[i, j] = self.safe_log(softmax_a_sequence[i, j])
# calculate the forward variables
forward_vars = np.zeros([total_times, total_segments], dtype="float32")
for i in range(total_times):
for j in range(total_segments):
forward_vars[i, j] = self.LOG_ZERO
for i in range(total_times):
# dp initialization at t0
if i == 0:
forward_vars[i, 0] = log_acts[0, self.blank]
if total_segments > 1:
forward_vars[i, 1] = log_acts[0, labels_a_sequence[i, 0]]
continue
# dp from t1
start, end = self.segment_range(i, total_times, total_segments)
for k in range(end - start):
j = k + start
if j & 1 == 1:
label_idx = j // 2
label_val = labels_a_sequence[label_idx, 0]
fv = self.log_add(forward_vars[i - 1, j],
forward_vars[i - 1, j - 1])
if j > 1 and label_val != labels_a_sequence[label_idx - 1,
0]:
fv = self.log_add(fv, forward_vars[i - 1, j - 2])
fv = self.log_mul(fv, log_acts[i, label_val])
else:
fv = forward_vars[i - 1, j]
if j > 0:
fv = self.log_add(fv, forward_vars[i - 1, j - 1])
fv = self.log_mul(fv, log_acts[i, self.blank])
forward_vars[i, j] = fv
# sum the last two value as log_prob
log_prob = forward_vars[total_times - 1, total_segments - 1]
if total_segments > 1:
log_prob = self.log_add(
log_prob, forward_vars[total_times - 1, total_segments - 2])
return -log_prob
def forward(self):
softmax_offset = 0
labels_offset = 0
for i in range(self.batch_size):
softmax_start_i = softmax_offset
softmax_end_i = softmax_offset + self.softmax_lod[self.level][i]
labels_start_i = labels_offset
labels_end_i = labels_offset + self.labels_lod[self.level][i]
softmax_a_sequence = self.softmax[softmax_start_i:softmax_end_i, :]
labels_a_sequence = self.labels[labels_start_i:labels_end_i, :]
self.loss[i] = self.forward_a_sequence(softmax_a_sequence,
labels_a_sequence)
softmax_offset += self.softmax_lod[self.level][i]
labels_offset += self.labels_lod[self.level][i]
return self.loss
class TestWarpCTCOp(OpTest):
def config(self):
self.batch_size = 4
self.num_classes = 8
self.logits_lod = [[4, 1, 3, 3]]
self.labels_lod = [[3, 1, 4, 4]]
self.blank = self.num_classes - 1
self.norm_by_times = False
def setUp(self):
self.op_type = "warpctc"
self.config()
logits = np.random.uniform(
0.1, 1.0,
[sum(self.logits_lod[0]), self.num_classes]).astype("float32")
softmax = np.apply_along_axis(stable_softmax, 1, logits)
# labels should not be blank
labels = np.random.randint(
0,
self.num_classes - 1, [sum(self.labels_lod[0]), 1],
dtype="int32")
ctc = CTCForward(softmax, self.logits_lod, labels, self.labels_lod,
self.blank, self.norm_by_times)
loss = ctc.forward()
max_sequence_length = 0
for i in range(self.batch_size):
max_sequence_length = max(max_sequence_length,
self.logits_lod[0][i])
self.gradient = np.zeros(
[max_sequence_length, self.batch_size, self.num_classes],
dtype="float32")
self.inputs = {
"Logits": (logits, self.logits_lod),
"Label": (labels, self.labels_lod)
}
self.outputs = {"Loss": loss}
self.attrs = {"blank": self.blank, "norm_by_times": self.norm_by_times}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.outputs['WarpCTCGrad'] = self.gradient
self.check_grad(["Logits"], "Loss", max_relative_error=0.007)
class TestWarpCTCOpCase1(TestWarpCTCOp):
def config(self):
self.batch_size = 4
self.num_classes = CUDA_BLOCK_SIZE + 2
self.logits_lod = [[4, 1, 3, 3]]
self.labels_lod = [[3, 1, 4, 4]]
self.blank = 0
self.norm_by_times = False
if __name__ == "__main__":
unittest.main()
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
An `in-folder` reduction script for The Best and Brightest Metal-Poor Stars
observed through Gemini.
Note: You should really be running prepare.py, which will generate a `reduce.sh`
script that calls this file.
"""
__author__ = "Andy Casey <arc@ast.cam.ac.uk>"
__version__ = "June, 2015"
import logging
import os
import time
from glob import glob
import numpy as np
from astropy.io import fits
from astropy.table import Table
# I am ashamed:
try:
from pyraf import iraf
except ImportError:
raise ImportError("No pyraf module -- did you forget to do run `ur_setup`?")
# Set up logging.
logging.basicConfig(level=logging.DEBUG, filename="reduction.log",
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
datefmt="%m-%d %H:%M", filemode="w")
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)
logging.getLogger("reduction").addHandler(console)
logger = logging.getLogger("reduction")
def log_iraf_result(result, raise_errors=False):
message = "\n".join(result)
logger.info("Result:\n{0}".format(message))
if raise_errors and "error" in message:
raise IrafError(message)
if not os.path.exists("login.cl"):
raise IOError("no login.cl file -- have you run mkiraf? [select xgterm]")
logger.info("Learning..")
iraf.fitsutil()
iraf.gemini()
iraf.gmos()
logger.info("Unlearning..")
iraf.unlearn("gemini")
iraf.unlearn("gmos")
iraf.unlearn("gemtools")
iraf.set(stdimage="imtgmos2", homedir=os.path.expanduser("~/"))
logger.info("Cleaning up from any previous reductions..")
os.system("rm -Rf master_flat.fits mosaic_master_flat.fits stgscg*.fits "
"cg*.fits tgs*.fits g*.fits estgsc*.fits database J??????.?????????.?.fits")
files = np.array(glob("[N|S]20*.fits"))
logger.info("Getting object types..")
obstypes = np.array([iraf.hselect(filename + "[0]", fields="OBSTYPE",
expr="1=1", Stdout=1)[0] for filename in files])
obsclass = np.array([iraf.hselect(filename + "[0]", fields="OBSCLASS",
expr="1=1", Stdout=1)[0] for filename in files])
folder = os.path.dirname(files[0])
# Check that we have one of each
if "OBJECT" not in obstypes:
raise IOError("no OBJECT image found in {0}".format(folder))
if "ARC" not in obstypes:
raise IOError("no ARC image found in {0}".format(folder))
if "FLAT" not in obstypes:
raise IOError("no FLAT image found in {0}".format(folder))
arc_filenames = files[obstypes == "ARC"]
flat_filename = files[obstypes == "FLAT"][0]
object_filenames = files[(obstypes == "OBJECT") * (obsclass == "science")]
# Prepare the data
logger.info("Preparing..")
prepare_result = iraf.gprepare("*.fits", fl_addmdf=True, Stdout=1)
log_iraf_result(prepare_result)
# Do cosmic ray rejection on the science frames
for filename in object_filenames:
logger.info("Doing cosmic ray rejection on {}..".format(filename))
cosmic_ray_result = iraf.gscrrej("g{0}".format(filename),
"cg{0}".format(filename), Stdout=1)
log_iraf_result(cosmic_ray_result)
# Create a master flat
logger.info("Creating master flat..")
flat_result = iraf.gsflat(inflats="g{0}".format(flat_filename),
specflat="master_flat.fits", fl_over=False, fl_trim=True, fl_dark=False,
fl_fixpix=False, fl_inter=False, function="chebyshev", order=15,
fl_detec=True, ovs_flinter=False, fl_vardq=False, fl_bias=False, Stdout=1)
log_iraf_result(flat_result)
# Create a mosaic of the master flat
logger.info("Creating master flat mosaic..")
mosaic_result = iraf.gmosaic("master_flat.fits", outpref="mosaic_", Stdout=1)
log_iraf_result(mosaic_result)
# Reduce the science frame
logger.info("Reducing science frame(s)..")
for object_filename in object_filenames:
logger.info("Reducing {}".format(object_filename))
reduce_science_result = iraf.gsreduce("cg{0}".format(object_filename),
fl_inter=False, fl_over=False, fl_trim=True, fl_dark=False,
fl_flat=True, flatim="mosaic_master_flat.fits", fl_gmosaic=True,
fl_fixpix=True, fl_bias=False, fl_cut=True, fl_gsappwave=True,
ovs_flinter=False, fl_vardq=False, yoffset=5.0, Stdout=1)
log_iraf_result(reduce_science_result)
# Reduce the arc frames
logger.info("Reducing arc frames..")
for filename in arc_filenames:
reduce_arc_result = iraf.gsreduce("g{0}".format(filename),
fl_over=False, fl_trim=True, fl_bias=False, fl_dark=False,
fl_flat=False, fl_cut=True, fl_gsappwave=True, yoffset=5.0, Stdout=1)
log_iraf_result(reduce_arc_result)
logger.info("Running gswavelength..")
#gswavelength_result = iraf.gswavelength("gsg{0}".format(filename),
# fl_inter="NO", nsum=5, step=5, function='chebyshev', order=6,
# fitcxord=5, fitcyord=4, Stdout=1)
gswavelength_result = iraf.gswavelength("gsg{0}".format(filename),
fl_inter="NO", nsum=5, step=5, function="chebyshev", order="6",
fitcxord=5, fitcyord=4, Stdout=1, niterate=10, low_reject=1.5,
high_reject=1.5, fl_dbwrite="YES", fl_overwrite="yes")
log_iraf_result(gswavelength_result)
# Apply transformations
logger.info("Applying wavelength transformations to arc..")
gstransform_arc_result = iraf.gstransform("gsg{0}".format(filename),
wavtraname="gsg{0}".format(filename), Stdout=1)
log_iraf_result(gstransform_arc_result)
logger.info("Doing wavelength transformations, sky and extraction:")
for object_filename in object_filenames:
logger.info("Working on filename {}".format(object_filename))
logger.info("Applying wavelength transformations to object..")
# Get nearest arc by time.
arc_times = []
for arc_filename in arc_filenames:
image = fits.open(arc_filename)
time_string = "{0}T{1}".format(
image[0].header["DATE"], image[0].header["UT"]) \
if "T" not in image[0].header["DATE"] else image[0].header["DATE"]
arc_times.append(time.mktime(time.strptime(time_string.split(".")[0],
"%Y-%m-%dT%H:%M:%S")))
image = fits.open(object_filename)
time_string = "{0}T{1}".format(
image[0].header["DATE"], image[0].header["UT"]) \
if "T" not in image[0].header["DATE"] else image[0].header["DATE"]
obs_time = time.mktime(time.strptime(time_string.split(".")[0],
"%Y-%m-%dT%H:%M:%S"))
# Get closest arc.
index = np.argmin(np.abs(np.array(arc_times) - obs_time))
arc_filename = arc_filenames[index]
log_iraf_result(["ASSOCIATING ARC {0} WITH FILENAME {1}".format(
arc_filename, object_filename)])
log_iraf_result(["ASSOCIATING FLAT {0} WITH FILENAME {1}".format(
flat_filename, object_filename)])
gstransform_object_result = iraf.gstransform(
"gscg{0}".format(object_filename),
wavtraname="gsg{0}".format(arc_filename), Stdout=1)
log_iraf_result(gstransform_object_result)
logger.info("Subtracting sky..")
sky_subtraction_result = iraf.gsskysub("tgscg{0}".format(object_filename),
fl_inter=False, Stdout=1)
log_iraf_result(sky_subtraction_result)
logger.info("Extracting..")
extract_result = iraf.gsextract("stgscg{0}".format(object_filename),
fl_inter=False, find=True, back="fit", bfunc="chebyshev", border=1,
tfunct="spline3", torder=5, tnsum=20, tstep=50, refimage="",
apwidth=1.3, recent=True, trace=True, fl_vardq=False, Stdout=1)
log_iraf_result(extract_result)
logger.info("Producing 1D spectrum..")
reduced_filenames = glob("estgscg*.fits")
image = fits.open(reduced_filenames[0])
dispersion = image[2].header["CRVAL1"] \
+ np.arange(image[2].header["NAXIS1"]) * image[2].header["CD1_1"]
# Stack the flux (this assumes sequential exposures)
flux = np.zeros_like(dispersion)
for filename in reduced_filenames:
with fits.open(filename) as science_image:
flux += science_image[2].data.flatten()
# And create an easy save file:
primary_hdu = fits.PrimaryHDU(header=image[0].header)
disp = image[2].header["CRVAL1"] \
+ np.arange(image[2].header["NAXIS1"]) * image[2].header["CD1_1"]
data_hdu = fits.new_table([
fits.Column(name="disp", format="1D", array=disp),
fits.Column(name="flux", format="1D", array=flux),
fits.Column(name="inv_var", format="1D", array=1.0/flux)])
hdu_list = fits.HDUList([primary_hdu, data_hdu])
hdu_list.writeto("{0}-{1}".format(image[0].header["OBJECT"],
filename))
flux[0 >= flux] = np.nan
primary_hdu = fits.PrimaryHDU(header=image[0].header)
data_hdu = fits.new_table([
fits.Column(name="disp", format="1D", array=dispersion),
fits.Column(name="flux", format="1D", array=flux),
# I am further ashamed:
fits.Column(name="variance", format="1D", array=flux)
])
hdu_list = fits.HDUList([primary_hdu, data_hdu])
hdu_list.writeto("{}.fits".format(image[0].header["OBJECT"]))
logger.info("Created extracted spectrum {}.fits".format(image[0].header["OBJECT"]))
| |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class lbwlm(base_resource) :
""" Configuration for web log manager resource. """
def __init__(self) :
self._wlmname = ""
self._ipaddress = ""
self._port = 0
self._lbuid = ""
self._katimeout = 0
self._secure = ""
self._state = ""
self.___count = 0
@property
def wlmname(self) :
"""The name of the Work Load Manager.<br/>Minimum length = 1.
"""
try :
return self._wlmname
except Exception as e:
raise e
@wlmname.setter
def wlmname(self, wlmname) :
"""The name of the Work Load Manager.<br/>Minimum length = 1
"""
try :
self._wlmname = wlmname
except Exception as e:
raise e
@property
def ipaddress(self) :
"""The IP address of the WLM.
"""
try :
return self._ipaddress
except Exception as e:
raise e
@ipaddress.setter
def ipaddress(self, ipaddress) :
"""The IP address of the WLM.
"""
try :
self._ipaddress = ipaddress
except Exception as e:
raise e
@property
def port(self) :
"""The port of the WLM.<br/>Range 1 - 65535.
"""
try :
return self._port
except Exception as e:
raise e
@port.setter
def port(self, port) :
"""The port of the WLM.<br/>Range 1 - 65535
"""
try :
self._port = port
except Exception as e:
raise e
@property
def lbuid(self) :
"""The LBUID for the Load Balancer to communicate to the Work Load Manager.
"""
try :
return self._lbuid
except Exception as e:
raise e
@lbuid.setter
def lbuid(self, lbuid) :
"""The LBUID for the Load Balancer to communicate to the Work Load Manager.
"""
try :
self._lbuid = lbuid
except Exception as e:
raise e
@property
def katimeout(self) :
"""The idle time period after which NS would probe the WLM. The value ranges from 1 to 1440 minutes.<br/>Default value: 2<br/>Maximum length = 1440.
"""
try :
return self._katimeout
except Exception as e:
raise e
@katimeout.setter
def katimeout(self, katimeout) :
"""The idle time period after which NS would probe the WLM. The value ranges from 1 to 1440 minutes.<br/>Default value: 2<br/>Maximum length = 1440
"""
try :
self._katimeout = katimeout
except Exception as e:
raise e
@property
def secure(self) :
"""Use this parameter to enable secure mode of communication with WLM.<br/>Possible values = YES, NO.
"""
try :
return self._secure
except Exception as e:
raise e
@property
def state(self) :
"""State of the WLM.<br/>Possible values = ACTIVE, INACTIVE, UNKNOWN.
"""
try :
return self._state
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(lbwlm_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lbwlm
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.wlmname) :
return str(self.wlmname)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
""" Use this API to add lbwlm.
"""
try :
if type(resource) is not list :
addresource = lbwlm()
addresource.wlmname = resource.wlmname
addresource.ipaddress = resource.ipaddress
addresource.port = resource.port
addresource.lbuid = resource.lbuid
addresource.katimeout = resource.katimeout
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ lbwlm() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].wlmname = resource[i].wlmname
addresources[i].ipaddress = resource[i].ipaddress
addresources[i].port = resource[i].port
addresources[i].lbuid = resource[i].lbuid
addresources[i].katimeout = resource[i].katimeout
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
""" Use this API to delete lbwlm.
"""
try :
if type(resource) is not list :
deleteresource = lbwlm()
if type(resource) != type(deleteresource):
deleteresource.wlmname = resource
else :
deleteresource.wlmname = resource.wlmname
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ lbwlm() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].wlmname = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ lbwlm() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].wlmname = resource[i].wlmname
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
""" Use this API to update lbwlm.
"""
try :
if type(resource) is not list :
updateresource = lbwlm()
updateresource.wlmname = resource.wlmname
updateresource.katimeout = resource.katimeout
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ lbwlm() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].wlmname = resource[i].wlmname
updateresources[i].katimeout = resource[i].katimeout
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
""" Use this API to unset the properties of lbwlm resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = lbwlm()
if type(resource) != type(unsetresource):
unsetresource.wlmname = resource
else :
unsetresource.wlmname = resource.wlmname
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ lbwlm() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].wlmname = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ lbwlm() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].wlmname = resource[i].wlmname
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
""" Use this API to fetch all the lbwlm resources that are configured on netscaler.
"""
try :
if not name :
obj = lbwlm()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = lbwlm()
obj.wlmname = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [lbwlm() for _ in range(len(name))]
obj = [lbwlm() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = lbwlm()
obj[i].wlmname = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
""" Use this API to fetch filtered set of lbwlm resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbwlm()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
""" Use this API to count the lbwlm resources configured on NetScaler.
"""
try :
obj = lbwlm()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
""" Use this API to count filtered the set of lbwlm resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbwlm()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Secure:
YES = "YES"
NO = "NO"
class State:
ACTIVE = "ACTIVE"
INACTIVE = "INACTIVE"
UNKNOWN = "UNKNOWN"
class lbwlm_response(base_response) :
def __init__(self, length=1) :
self.lbwlm = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lbwlm = [lbwlm() for _ in range(length)]
| |
import unittest
from libxlpy import *
class TestBook(unittest.TestCase):
def setUp(self):
self.book = Book()
def test_load(self):
self.assertTrue(
self.book.load('./book.xls')
)
self.assertFalse(
self.book.load('./unexisting_file')
)
def test_addSheet(self):
sheet = self.book.addSheet('foo')
self.assertEqual('XLPySheet', type(sheet).__name__)
def test_getSheet(self):
sheet = self.book.getSheet(0)
self.assertIsNone(sheet)
self.book.addSheet('foo')
sheet = self.book.getSheet(0)
self.assertEqual('XLPySheet', type(sheet).__name__)
def test_sheetType(self):
self.book.addSheet('foo')
self.assertEqual(
self.book.sheetType(0),
SHEETTYPE_SHEET)
self.assertEqual(
self.book.sheetType(99),
SHEETTYPE_UNKNOWN)
def test_delSheet(self):
self.book.addSheet('foo')
self.book.addSheet('bar')
self.assertIsNotNone(self.book.getSheet(1))
self.book.delSheet(1)
self.assertIsNone(self.book.getSheet(1))
def test_sheetCount(self):
self.assertEqual(0, self.book.sheetCount())
self.book.addSheet('foo')
self.assertEqual(1, self.book.sheetCount())
def test_addFormat(self):
fmt = self.book.addFormat()
self.assertEqual('XLPyFormat', type(fmt).__name__)
def test_addFont(self):
fnt = self.book.addFont()
self.assertEqual('XLPyFont', type(fnt).__name__)
def test_addCustomNumFormat(self):
index = self.book.addCustomNumFormat("fmt");
self.assertIsNotNone(index)
self.assertEqual('fmt',
self.book.customNumFormat(index))
def test_customNumFormat(self):
self.assertIsNone(
self.book.customNumFormat(0))
def test_format(self):
fmt = self.book.format(0)
self.assertEqual('XLPyFormat', type(fmt).__name__)
def test_formatSize(self):
num = self.book.formatSize()
self.book.addFormat()
self.assertEqual(num + 1, self.book.formatSize())
def test_activeSheet(self):
index = self.book.activeSheet()
self.assertEqual(index, 0)
def test_setActiveSheet(self):
self.book.setActiveSheet(10)
self.assertEqual(0, self.book.activeSheet())
sheet = self.book.addSheet('foo')
sheet = self.book.addSheet('bar')
sheet = self.book.addSheet('foobar')
self.book.setActiveSheet(2)
self.assertEqual(2, self.book.activeSheet())
def test_pictureSize(self):
self.assertEqual(0, self.book.pictureSize())
index = self.book.addPicture("./logo.png")
self.assertEqual(1, self.book.pictureSize())
def test_getPicture(self):
(t, img) = self.book.getPicture(0)
self.assertEqual(255, t)
index = self.book.addPicture("./logo.png")
(t, img) = self.book.getPicture(index)
self.assertEqual(0, t)
def test_defaultFont(self):
(name, size) = self.book.defaultFont()
self.assertIsInstance(name, str)
self.assertIsInstance(size, int)
def test_setDefaultFont(self):
name, size = "Mono", 14
self.book.setDefaultFont(name, size)
self.assertEqual(
self.book.defaultFont(),
(name, size))
def test_font(self):
font = self.book.font(0)
self.assertEqual('XLPyFont', type(font).__name__)
font = self.book.font(999) # invalid font index
self.assertIsNone(font)
def test_fontSize(self):
# default value
self.assertEqual(5,
self.book.fontSize())
def test_datePack(self):
self.assertIsInstance(
self.book.datePack(2000, 1, 1, 1, 0, 0, 0), float)
def test_dateUnpack(self):
pack = self.book.datePack(2000, 1, 1, 1, 0, 0, 0)
unpack = self.book.dateUnpack(pack)
self.assertEqual(unpack,
(2000, 1, 1, 1, 0, 0, 0))
def test_colorPack(self):
self.assertIsInstance(
self.book.colorPack(0, 0, 0), int)
def test_colorUnpack(self):
r, g, b = 0, 127, 255
pack = self.book.colorPack(r, g, b)
unpack = self.book.colorUnpack(pack)
self.assertEqual(unpack, (r,g,b))
def test_addPicture(self):
index = self.book.addPicture("./logo.png")
self.assertEqual(0, index)
def test_addPicture2(self):
f = open('./logo.png')
index = self.book.addPicture2(f.read())
self.assertEqual(0, index)
self.assertEqual('ok', self.book.errorMessage())
self.book.addPicture2('invalid image data')
self.assertEqual('unknown picture format', self.book.errorMessage())
def test_refR1C1(self):
self.assertFalse(self.book.refR1C1())
def test_setRefR1C1(self):
self.book.setRefR1C1(True)
self.assertTrue(self.book.refR1C1())
self.book.setRefR1C1(False)
self.assertFalse(self.book.refR1C1())
def test_rgbMode(self):
self.assertFalse(self.book.rgbMode())
def test_setRgbMode(self):
self.book.setRgbMode(True)
self.assertTrue(self.book.rgbMode())
self.book.setRgbMode(False)
self.assertFalse(self.book.rgbMode())
def test_biffVersion(self):
self.assertIsInstance(self.book.biffVersion(), int)
@unittest.skip("Not available on libxl")
def test_IsDate1904(self):
self.assertFalse(self.book.isDate1904())
@unittest.skip("Not available on libxl")
def test_setDate1904(self):
self.assertIsNone(self.book.setDate1904(1))
self.assertTrue(self.book.isDate1904())
self.assertIsNone(self.book.setDate1904(0))
self.assertFalse(self.book.isDate1904())
def test_setKey(self):
self.assertIsNone( self.book.setKey("foo", "bar") )
def test_setLocale(self):
self.assertTrue(self.book.setLocale("UTF-8"))
self.assertFalse(self.book.setLocale("BadLocale"))
def test_errorMessage(self):
self.assertEqual('ok', self.book.errorMessage())
# perform some bad op
self.book.load('ThereIsNoSuchFile.xls')
self.assertNotEqual('ok', self.book.errorMessage())
if __name__ == '__main__':
unittest.main()
| |
"""
The queue module is responsible for interacting with the local batch or
queueing system, putting tasks on the queue and removing them as necessary.
"""
import os
import time
import logging
from contextlib import contextmanager
import socket
import tornado.httpclient
import tornado.gen
from tornado.concurrent import run_on_executor
import certifi
import iceprod
import iceprod.server
from iceprod.server import module
from iceprod.server.globus import SiteGlobusProxy
import iceprod.core.functions
class StopException(Exception):
pass
logger = logging.getLogger('modules_queue')
class queue(module.module):
"""
Run the queue module, which queues jobs onto the local grid system(s).
"""
def __init__(self,*args,**kwargs):
# run default init
super(queue,self).__init__(*args,**kwargs)
self.proxy = None
self.max_duration = 3600*12
def start(self):
"""Start the queue"""
super(queue,self).start()
# set up x509 proxy
proxy_kwargs = {}
if 'gridftp_cfgfile' in self.cfg['queue']:
proxy_kwargs['cfgfile'] = self.cfg['queue']['gridftp_cfgfile']
self.proxy = SiteGlobusProxy(**proxy_kwargs)
# set up job cacert
use_ssl = 'system' in self.cfg and 'ssl' in self.cfg['system'] and self.cfg['system']['ssl']
if (use_ssl and 'cert' in self.cfg['system']['ssl']):
if 'I3PROD' in os.environ:
remote_cacert = os.path.expandvars(os.path.join('$I3PROD','etc','remote_cacert'))
else:
remote_cacert = os.path.expandvars(os.path.join('$PWD','remote_cacert'))
with open(remote_cacert,'w') as f:
f.write(open(certifi.where()).read())
f.write('\n# IceProd local cert\n')
f.write(open(self.cfg['system']['ssl']['cert']).read())
self.cfg['system']['remote_cacert'] = remote_cacert
# some setup
self.plugins = []
plugin_names = [x for x in self.cfg['queue'] if isinstance(self.cfg['queue'][x],dict)]
plugin_cfg = [self.cfg['queue'][x] for x in plugin_names]
plugin_types = [x['type'] for x in plugin_cfg]
logger.info('queueing plugins in cfg: %r',{x:y for x,y in zip(plugin_names,plugin_types)})
if not plugin_names:
logger.debug('%r',self.cfg['queue'])
logger.warning('no queueing plugins found. deactivating queue')
self.stop()
return
# try to find plugins
raw_types = iceprod.server.listmodules('iceprod.server.plugins')
logger.info('available modules: %r',raw_types)
plugins_tmp = []
for i,t in enumerate(plugin_types):
t = t.lower()
p = None
for r in raw_types:
r_name = r.rsplit('.',1)[1].lower()
if r_name == t:
# exact match
logger.debug('exact plugin match - %s',r)
p = r
break
elif t.startswith(r_name):
# partial match
if p is None:
logger.debug('partial plugin match - %s',r)
p = r
else:
name2 = p.rsplit('.',1)[1]
if len(r_name) > len(name2):
logger.debug('better plugin match - %s',r)
p = r
if p is not None:
plugins_tmp.append((p,plugin_names[i],plugin_cfg[i]))
else:
logger.error('Cannot find plugin for grid %s of type %s',plugin_names[i],t)
# instantiate all plugins that are required
gridspec_types = {}
if 'max_task_queued_time' in self.cfg['queue']:
self.max_duration += self.cfg['queue']['max_task_queued_time']
if 'max_task_processing_time' in self.cfg['queue']:
self.max_duration += self.cfg['queue']['max_task_processing_time']
for p,p_name,p_cfg in plugins_tmp:
logger.warning('queueing plugin found: %s = %s', p_name, p_cfg['type'])
# try instantiating the plugin
args = (self.cfg['site_id']+'.'+p_name, p_cfg, self.cfg,
self.modules, self.io_loop, self.executor, self.statsd,
self.rest_client)
try:
self.plugins.append(iceprod.server.run_module(p,*args))
except Exception as e:
logger.error('Error importing plugin',exc_info=True)
else:
desc = p_cfg['description'] if 'description' in p_cfg else ''
gridspec_types[self.cfg['site_id']+'.'+p_name] = {
'type': p_cfg['type'],
'description': desc,
}
duration = 0
if 'max_task_queued_time' in p_cfg:
duration += p_cfg['max_task_queued_time']
if 'max_task_processing_time' in p_cfg:
duration += p_cfg['max_task_processing_time']
if duration > self.max_duration:
self.max_duration = duration
# add gridspec and types to the db
args = {
'host': socket.getfqdn(),
'queues': {p_name:p_cfg['type'] for p,p_name,p_cfg in plugins_tmp},
'version': iceprod.__version__,
}
if 'grid_id' in self.cfg and self.cfg['grid_id']:
try:
self.rest_client.request_seq('GET',
'/grids/{}'.format(self.cfg['grid_id']))
except Exception:
logger.warning('grid_id %s not present in DB',
self.cfg['grid_id'], exc_info=True)
del self.cfg['grid_id']
if 'grid_id' not in self.cfg:
# register grid
try:
ret = self.rest_client.request_seq('POST',
'/grids', args)
self.cfg['grid_id'] = ret['result']
except Exception:
logger.fatal('cannot register grid in DB', exc_info=True)
raise
else:
# update grid
try:
ret = self.rest_client.request_seq('PATCH',
'/grids/{}'.format(self.cfg['grid_id']), args)
except Exception:
logger.warning('error updating grid in DB', exc_info=True)
self.io_loop.add_callback(self.queue_loop)
async def queue_loop(self):
"""Run the queueing loop"""
# check and clean grids
for p in self.plugins:
try:
await p.check_and_clean()
except Exception:
logger.error('plugin %s.check_and_clean() raised exception',
p.__class__.__name__,exc_info=True)
# check proxy cert
try:
self.check_proxy(self.max_duration)
except Exception:
logger.error('error checking proxy',exc_info=True)
# queue tasks to grids
for p in self.plugins:
try:
await p.queue()
except Exception:
logger.error('plugin %s.queue() raised exception',
p.__class__.__name__,exc_info=True)
# set timeout
if 'queue' in self.cfg and 'queue_interval' in self.cfg['queue']:
timeout = self.cfg['queue']['queue_interval']
if timeout <= 0:
timeout = 300
else:
timeout = 300
self.io_loop.call_later(timeout, self.queue_loop)
def check_proxy(self, duration=None):
"""
Check the x509 proxy.
Blocking function.
"""
try:
if duration:
self.proxy.set_duration(duration//3600)
self.proxy.update_proxy()
self.cfg['queue']['x509proxy'] = self.proxy.get_proxy()
except Exception:
logger.warning('cannot setup x509 proxy', exc_info=True)
| |
import sys
import os
import os.path
import re
import readline
import json
from swift.common.bufferedhttp import http_connect_raw as http_connect
try:
import swiftclient as cloud
except ImportError:
print """OpenStack Swift python API package are needed, download at:
swiftclient https://github.com/openstack/python-swiftclient
"""
sys.exit(1)
import swiftclient as cloud
readline.parse_and_bind('tab: complete')
class Global:
authurl="http://10.245.123.72:8080/auth/v1.0/"
conn=None
testmode=1
adminkey=888888
def connfailcb(msg="bye"):
"""call back for connection failed.
"""
print msg
sys.exit(1)
class Connection:
def __init__(self,group, username, pwd, failcb=None):
self.group=group
self.user=username
self.pwd=pwd
self.conn=None
self.connfailcb=failcb
def get_role(self):
try:
res=http_connect("10.245.123.72", 8080, "GET", \
"/auth/v2/%s/%s"%(self.group, self.user), \
{"X-Auth-Admin-User":".super_admin", \
"X-Auth-Admin-Key": "%s"%(Global.adminkey)}).getresponse()
info=res.read()
info=json.loads(info)
for val in [ x["name"] for x in info["groups"]]:
if val.startswith("."):
return (0, val)
return (0, "non-admin")
except Exception, e:
return (1, None)
def connect(self):
try:
self.conn=cloud.Connection(\
user="%s:%s"%(self.group, self.user), key=self.pwd, \
authurl=Global.authurl)
self.conn.get_auth()
return True
except cloud.ClientException , e:
exctype, value=sys.exc_info()[:2]
print "%s: %s"%(exctype.__name__, value)
if self.connfailcb is not None:
self.connfailcb("connection failed:")
return False
def list_containers(self):
msg=[]
try:
for item in self.conn.get_account()[1]:
#msg.append( "name: %s, obj count: %d, total bytes: %d"%(\
# item["name"], item["count"], item["bytes"]))
if not item["name"].endswith("_segments"): #not display segment containers
header=self.conn.head_container(item["name"])
x={"name": None, "account": None, "count":None, "read-ACL":None,\
"write-ACL":None,}
x["name"]=item["name"]
x["account"]=self.conn.url.rsplit("/", 1)[-1]
x["count"]=item["count"]
x["bytes"]=item["bytes"]
x["read-ACL"]=header.get("x-container-read", " ")
x["write-ACL"]=header.get("x-container-write", " ")
msg.append(x)
return (0, msg)
except cloud.ClientException , e:
exctype, value=sys.exc_info()[:2]
msg.append("%s:"%exctype.__name__)
for x in str(value).split("\n"):
msg.append(x)
return (1, msg)
def list_objects(self, container):
msg=[]
try:
count = 0
for item in self.conn.get_container(container)[1]:
#print "name: ", item["name"]
#print " bytes: ", item["bytes"]
#print " content_type: ", item["content_type"]
#print " hash: ", item["hash"]
#print " last_modified", item["last_modified"]
obj=item["name"]
headers=self.conn.head_object(container, obj)
x=item
x["bytes"]=headers.get("content-length")
x["manifest"]=headers.get("x-object-manifest", "")
count += 1
msg.append(x)
#print "( %d objects are listed)"%count
return (0, msg)
except cloud.ClientException , e:
exctype, value=sys.exc_info()[:2]
return (1, "%s: %s"%(exctype.__name__, value))
def _get_object_info(self, container, name):
""" return tuple (returncode, outmsg, errmsg)
returncode: 0 for success, 1 for failed,
outmsg: list of strings
errmsg: list of strings
"""
try:
x=self.conn.head_object(container, name)
return (0, '\n'.join([" %s: %s"%(i, x[i]) for i in x]))
except cloud.ClientException , e:
exctype, value=sys.exc_info()[:2]
return (1, "%s: %s"%(exctype.__name__, value))
def _upload_object(self, container, obj, name):
try:
size=os.path.getsize(name)
with open(name, "rb") as f:
self.conn.put_object(container, obj, f, content_length=size)
return (0, "upload succeeded")
except cloud.ClientException , e:
exctype, value=sys.exc_info()[:2]
return (1, "%s: %s"%(exctype.__name__, value))
def _upload_segment_object(self, container, obj, name, segsize):
size=os.path.getsize(name)
if size <=segsize:
msg="file size smaller than segment size: %d <= %d\n"%(size, segsize)
r=self._upload_object(container, obj, name)
return (r[0], msg+r[1])
else:
msgout=""
try:
#obj=name
if obj.startswith('./') or obj.startswith('.\\'):
obj = obj[2:]
if obj.startswith('/'):
obj = obj[1:]
objmtime=os.path.getmtime(name) #modified time of file
fullsize=os.path.getsize(name)
#create sements container
segcontainer="%s_segments"%container
try:
self.conn.put_container(segcontainer)
except cloud.ClientException, err:
msg = ' '.join(str(x) for x in (err.http_status, err.http_reason))
if err.http_response_content:
if msg:
msg += ': '
msg += err.http_response_content[:60]
msgout +='Error trying to create container %r: %s\n'%(segcontainer, msg)
return (1, msgout)
except Exception, err:
raise
#set master objects
manifest= "%s/%s/%s/%s/"%(segcontainer, obj, objmtime, fullsize)
self.conn.put_object(container, obj, "", content_length=0, \
headers={\
"x-object-meta-mtime": "%s"%objmtime,
"x-object-manifest": manifest,
})
#upload segments
segment=0
segment_start=0
with open(name, 'rb') as fp:
while segment_start < fullsize:
segment_size = segsize
if segment_start + segsize > fullsize:
segment_size = fullsize - segment_start
path="%s/%s/%s/%08d"%(obj, objmtime, fullsize, segment)
self.conn.put_object(segcontainer, path, fp, \
content_length=segment_size)
msgout += "upload segment: %s/%s\n"%(segcontainer, path)
segment += 1
segment_start += segment_size
#end with
return (0, msgout)
except cloud.ClientException, err:
exctype, value=sys.exc_info()[:2]
return (1, msgout+"%s: %s"%(exctype.__name__, value))
def upload_object(self, container, obj, fname, segsize=None):
if not os.path.exists(fname):
print "file %s not exists"%(fname);return
if not os.path.isfile(fname):
print "file %s is not a file"%(fname); return
ct=self._get_object_info(container, obj)
msg=[]
if 0 == ct[0]:
msg.append( "object %s/%s already exists:"%(container, obj))
msg.extend( ct[1].strip().split("\n"))
return (0, msg)
else:
r=None
if segsize is None:
r=self._upload_object(container, obj, fname)
else:
r=self._upload_segment_object(container, obj, fname, segsize)
msg.extend( r[1].strip().split("\n"))
return (r[0], msg)
#print "not exists"
def delete_object(self, container, obj):
#ok, for the segmented data, we will get the x-object-manifest first
try:
manifest = None
try:
manifest = self.conn.head_object(container, obj).get(
'x-object-manifest')
except cloud.ClientException, err:
if err.http_status != 404: #not found
raise
self.conn.delete_object(container, obj)
if manifest is not None: #delete segmented data as well,but not the container
scontainer, sprefix=manifest.split("/", 1)
for delobj in self.conn.get_container(scontainer, prefix=sprefix)[1]:
self.conn.delete_object(scontainer, delobj["name"])
print "delete segment: %s/%s"%(scontainer, delobj["name"])
return (0, "delete succeeded")
except cloud.ClientException, err:
if err.http_status != 404:
raise
print 'Object %s not found' %repr('%s/%s' % (container, obj))
return (1, "Cannot delet this object")
#try:
# self.conn.delete_object(container, obj)
# print "delete succeeded"
#except (IOError, cloud.ClientException), e:
# exctype, value=sys.exc_info()[:2]
# print "%s: %s"%(exctype.__name__, value)
def download_object(self, container, obj, dst):
ct=self._get_object_info(container, obj)
chunksize=65535
if 0 == ct[0]: #object exists
try:
with open(dst, 'wb') as f:
x=self.conn.get_object(container, obj, chunksize)[1]
for line in x: #x is a generator
f.write(line)
return (0, "download succeed")
except (IOError, cloud.ClientException), e:
exctype, value=sys.exc_info()[:2]
return (1, "%s: %s"%(exctype.__name__, value))
else:
return (1, ct[1])
def dispatch(parlist):
if parlist[0] == "?" or parlist[0] == "help" or parlist[0]== "h": #stat the file
help()
return
if parlist[0]== "use":
if len(parlist) == 4:
Global.conn=None
Global.conn=Connection(parlist[1], parlist[2], parlist[3])
if not Global.conn.connect():
Global.conn=None
else:
print "login as %s:%s"%(parlist[1], parlist[2])
else:
print "Example: use groupA userA passwordA"
return
if Global.conn is None:
print "Please login first, example: use groupA userA passwordA"
return
if parlist[0] == "list":
if len(parlist) == 1:
Global.conn.list_containers()
elif len(parlist) == 2:
Global.conn.list_objects(parlist[1])
else:
print "list [container]"
elif parlist[0] == "upload":
if len(parlist) == 3:
Global.conn.upload_object(parlist[1], parlist[2])
elif len(parlist) == 4:
if re.match("^[0-9]+(B|K|M|G)$", parlist[3]) is None:
print "segment size examples: 2K, 1M, 2B, 1G. M: Mega-bytes..."
else:
unit=parlist[3][-1]
size=int(parlist[3][:-1])
segsize=None
if "B" == unit:
segsize= size
elif "K" == unit:
segsize=size*1024
elif "M" == unit:
segsize= size*1024*1024
elif "G" == unit:
segsize= size*1024*1024*1024
Global.conn.upload_object(parlist[1], parlist[2], segsize)
else:
print "Example: upload containerA objA [segmensize]"
elif parlist[0] == "download":
if len(parlist) == 4:
Global.conn.download_object(parlist[1], parlist[2], parlist[3])
else:
print "Example: download containerA objA fileA"
elif parlist[0] == "delete":
if len(parlist) == 3:
Global.conn.delete_object(parlist[1], parlist[2])
else:
print "Example: delete containerA objA"
else:
print "%s not support"%parlist[0]
def main():
print "type h, help or ? for help. Ctl+d to exit."
while 1:
try:
par=raw_input("\n(Ctrl+d exit; Ctrl+c interrupt)>> ")
print
parlist=par.strip().split()
if parlist:
dispatch(parlist)
except (KeyboardInterrupt, ):
print
continue
def help():
print "use group username password: use 'group:username' and 'password' to login."
print "list [container]: list the containers for the account or the objects for a container."
print "upload container object srcfile [segsize]: upload local file to container"
print "download container object dstfile: download object to local"
print "delete container object: delete object from container"
def test():
Global.conn=Connection("groupX", "test1", "test1pass")
Global.conn.connect()
#Global.conn.list_objects("testcontainer2")
#Global.conn.download_object("tbcontainer", "08-swift-recon.txt", "fuck")
Global.conn.upload_object("testcontainer2", "a.mov", "batman.mov", 20*1024*1024)
sys.exit(0)
if __name__ == "__main__":
#test()
try:
if Global.testmode is not None:
Global.conn=Connection("groupX", "test1", "test1pass")
Global.conn.connect()
main()
except EOFError, e:
print; sys.exit(1)
except (Exception, ), e:
import traceback
traceback.print_exc()
| |
from abc import ABCMeta, abstractmethod, abstractproperty
from typing import Dict as ptDict, Type as ptType
import itertools
import weakref
import numpy as np
from numba.core.utils import cached_property, get_hashable_key
# Types are added to a global registry (_typecache) in order to assign
# them unique integer codes for fast matching in _dispatcher.c.
# However, we also want types to be disposable, therefore we ensure
# each type is interned as a weak reference, so that it lives only as
# long as necessary to keep a stable type code.
# NOTE: some types can still be made immortal elsewhere (for example
# in _dispatcher.c's internal caches).
_typecodes = itertools.count()
def _autoincr():
n = next(_typecodes)
# 4 billion types should be enough, right?
assert n < 2 ** 32, "Limited to 4 billion types"
return n
_typecache: ptDict[weakref.ref, weakref.ref] = {}
def _on_type_disposal(wr, _pop=_typecache.pop):
_pop(wr, None)
class _TypeMetaclass(ABCMeta):
"""
A metaclass that will intern instances after they are created.
This is done by first creating a new instance (including calling
__init__, which sets up the required attributes for equality
and hashing), then looking it up in the _typecache registry.
"""
def __init__(cls, name, bases, orig_vars):
# __init__ is hooked to mark whether a Type class being defined is a
# Numba internal type (one which is defined somewhere under the `numba`
# module) or an external type (one which is defined elsewhere, for
# example a user defined type).
super(_TypeMetaclass, cls).__init__(name, bases, orig_vars)
root = (cls.__module__.split('.'))[0]
cls._is_internal = root == "numba"
def _intern(cls, inst):
# Try to intern the created instance
wr = weakref.ref(inst, _on_type_disposal)
orig = _typecache.get(wr)
orig = orig and orig()
if orig is not None:
return orig
else:
inst._code = _autoincr()
_typecache[wr] = wr
return inst
def __call__(cls, *args, **kwargs):
"""
Instantiate *cls* (a Type subclass, presumably) and intern it.
If an interned instance already exists, it is returned, otherwise
the new instance is returned.
"""
inst = type.__call__(cls, *args, **kwargs)
return cls._intern(inst)
def _type_reconstructor(reconstructor, reconstructor_args, state):
"""
Rebuild function for unpickling types.
"""
obj = reconstructor(*reconstructor_args)
if state:
obj.__dict__.update(state)
return type(obj)._intern(obj)
class Type(metaclass=_TypeMetaclass):
"""
The base class for all Numba types.
It is essential that proper equality comparison is implemented. The
default implementation uses the "key" property (overridable in subclasses)
for both comparison and hashing, to ensure sane behaviour.
"""
mutable = False
# Rather the type is reflected at the python<->nopython boundary
reflected = False
def __init__(self, name):
self.name = name
@property
def key(self):
"""
A property used for __eq__, __ne__ and __hash__. Can be overridden
in subclasses.
"""
return self.name
@property
def mangling_args(self):
"""
Returns `(basename, args)` where `basename` is the name of the type
and `args` is a sequence of parameters of the type.
Subclass should override to specialize the behavior.
By default, this returns `(self.name, ())`.
"""
return self.name, ()
def __repr__(self):
return self.name
def __hash__(self):
return hash(self.key)
def __eq__(self, other):
return self.__class__ is other.__class__ and self.key == other.key
def __ne__(self, other):
return not (self == other)
def __reduce__(self):
reconstructor, args, state = super(Type, self).__reduce__()
return (_type_reconstructor, (reconstructor, args, state))
def unify(self, typingctx, other):
"""
Try to unify this type with the *other*. A third type must
be returned, or None if unification is not possible.
Only override this if the coercion logic cannot be expressed
as simple casting rules.
"""
return None
def can_convert_to(self, typingctx, other):
"""
Check whether this type can be converted to the *other*.
If successful, must return a string describing the conversion, e.g.
"exact", "promote", "unsafe", "safe"; otherwise None is returned.
"""
return None
def can_convert_from(self, typingctx, other):
"""
Similar to *can_convert_to*, but in reverse. Only needed if
the type provides conversion from other types.
"""
return None
def is_precise(self):
"""
Whether this type is precise, i.e. can be part of a successful
type inference. Default implementation returns True.
"""
return True
def augment(self, other):
"""
Augment this type with the *other*. Return the augmented type,
or None if not supported.
"""
return None
# User-facing helpers. These are not part of the core Type API but
# are provided so that users can write e.g. `numba.boolean(1.5)`
# (returns True) or `types.int32(types.int32[:])` (returns something
# usable as a function signature).
def __call__(self, *args):
from numba.core.typing import signature
if len(args) == 1 and not isinstance(args[0], Type):
return self.cast_python_value(args[0])
return signature(self, # return_type
*args)
def __getitem__(self, args):
"""
Return an array of this type.
"""
from numba.core.types import Array
ndim, layout = self._determine_array_spec(args)
return Array(dtype=self, ndim=ndim, layout=layout)
def _determine_array_spec(self, args):
# XXX non-contiguous by default, even for 1d arrays,
# doesn't sound very intuitive
def validate_slice(s):
return isinstance(s, slice) and s.start is None and s.stop is None
if isinstance(args, (tuple, list)) and all(map(validate_slice, args)):
ndim = len(args)
if args[0].step == 1:
layout = 'F'
elif args[-1].step == 1:
layout = 'C'
else:
layout = 'A'
elif validate_slice(args):
ndim = 1
if args.step == 1:
layout = 'C'
else:
layout = 'A'
else:
# Raise a KeyError to not be handled by collection constructors (e.g. list).
raise KeyError(f"Can only index numba types with slices with no start or stop, got {args}.")
return ndim, layout
def cast_python_value(self, args):
raise NotImplementedError
@property
def is_internal(self):
""" Returns True if this class is an internally defined Numba type by
virtue of the module in which it is instantiated, False else."""
return self._is_internal
def dump(self, tab=''):
print(f'{tab}DUMP {type(self).__name__}[code={self._code}, name={self.name}]')
# XXX we should distinguish between Dummy (no meaningful
# representation, e.g. None or a builtin function) and Opaque (has a
# meaningful representation, e.g. ExternalFunctionPointer)
class Dummy(Type):
"""
Base class for types that do not really have a representation and are
compatible with a void*.
"""
class Hashable(Type):
"""
Base class for hashable types.
"""
class Number(Hashable):
"""
Base class for number types.
"""
def unify(self, typingctx, other):
"""
Unify the two number types using Numpy's rules.
"""
from numba.np import numpy_support
if isinstance(other, Number):
# XXX: this can produce unsafe conversions,
# e.g. would unify {int64, uint64} to float64
a = numpy_support.as_dtype(self)
b = numpy_support.as_dtype(other)
sel = np.promote_types(a, b)
return numpy_support.from_dtype(sel)
class Callable(Type):
"""
Base class for callables.
"""
@abstractmethod
def get_call_type(self, context, args, kws):
"""
Using the typing *context*, resolve the callable's signature for
the given arguments. A signature object is returned, or None.
"""
@abstractmethod
def get_call_signatures(self):
"""
Returns a tuple of (list of signatures, parameterized)
"""
@abstractmethod
def get_impl_key(self, sig):
"""
Returns the impl key for the given signature
"""
class DTypeSpec(Type):
"""
Base class for types usable as "dtype" arguments to various Numpy APIs
(e.g. np.empty()).
"""
@abstractproperty
def dtype(self):
"""
The actual dtype denoted by this dtype spec (a Type instance).
"""
class IterableType(Type):
"""
Base class for iterable types.
"""
@abstractproperty
def iterator_type(self):
"""
The iterator type obtained when calling iter() (explicitly or implicitly).
"""
class Sized(Type):
"""
Base class for objects that support len()
"""
class ConstSized(Sized):
"""
For types that have a constant size
"""
@abstractmethod
def __len__(self):
pass
class IteratorType(IterableType):
"""
Base class for all iterator types.
Derived classes should implement the *yield_type* attribute.
"""
def __init__(self, name, **kwargs):
super(IteratorType, self).__init__(name, **kwargs)
@abstractproperty
def yield_type(self):
"""
The type of values yielded by the iterator.
"""
# This is a property to avoid recursivity (for pickling)
@property
def iterator_type(self):
return self
class Container(Sized, IterableType):
"""
Base class for container types.
"""
class Sequence(Container):
"""
Base class for 1d sequence types. Instances should have the *dtype*
attribute.
"""
class MutableSequence(Sequence):
"""
Base class for 1d mutable sequence types. Instances should have the
*dtype* attribute.
"""
class ArrayCompatible(Type):
"""
Type class for Numpy array-compatible objects (typically, objects
exposing an __array__ method).
Derived classes should implement the *as_array* attribute.
"""
# If overridden by a subclass, it should also implement typing
# for '__array_wrap__' with arguments (input, formal result).
array_priority = 0.0
@abstractproperty
def as_array(self):
"""
The equivalent array type, for operations supporting array-compatible
objects (such as ufuncs).
"""
# For compatibility with types.Array
@cached_property
def ndim(self):
return self.as_array.ndim
@cached_property
def layout(self):
return self.as_array.layout
@cached_property
def dtype(self):
return self.as_array.dtype
class Literal(Type):
"""Base class for Literal types.
Literal types contain the original Python value in the type.
A literal type should always be constructed from the `literal(val)`
function.
"""
# *ctor_map* is a dictionary mapping Python types to Literal subclasses
# for constructing a numba type for a given Python type.
# It is used in `literal(val)` function.
# To add new Literal subclass, register a new mapping to this dict.
ctor_map: ptDict[type, ptType['Literal']] = {}
# *_literal_type_cache* is used to cache the numba type of the given value.
_literal_type_cache = None
def __init__(self, value):
if type(self) is Literal:
raise TypeError(
"Cannot be constructed directly. "
"Use `numba.types.literal(value)` instead",
)
self._literal_init(value)
fmt = "Literal[{}]({})"
super(Literal, self).__init__(fmt.format(type(value).__name__, value))
def _literal_init(self, value):
self._literal_value = value
# We want to support constants of non-hashable values, therefore
# fall back on the value's id() if necessary.
self._key = get_hashable_key(value)
@property
def literal_value(self):
return self._literal_value
@property
def literal_type(self):
if self._literal_type_cache is None:
from numba.core import typing
ctx = typing.Context()
try:
res = ctx.resolve_value_type(self.literal_value)
except ValueError:
# Not all literal types have a literal_value that can be
# resolved to a type, for example, LiteralStrKeyDict has a
# literal_value that is a python dict for which there's no
# `typeof` support.
msg = "{} has no attribute 'literal_type'".format(self)
raise AttributeError(msg)
self._literal_type_cache = res
return self._literal_type_cache
class TypeRef(Dummy):
"""Reference to a type.
Used when a type is passed as a value.
"""
def __init__(self, instance_type):
self.instance_type = instance_type
super(TypeRef, self).__init__('typeref[{}]'.format(self.instance_type))
@property
def key(self):
return self.instance_type
class InitialValue(object):
"""
Used as a mixin for a type will potentially have an initial value that will
be carried in the .initial_value attribute.
"""
def __init__(self, initial_value):
self._initial_value = initial_value
@property
def initial_value(self):
return self._initial_value
class Poison(Type):
"""
This is the "bottom" type in the type system. It won't unify and it's
unliteral version is Poison of itself. It's advisable for debugging purposes
to call the constructor with the type that's being poisoned (for whatever
reason) but this isn't strictly required.
"""
def __init__(self, ty):
self.ty = ty
super(Poison, self).__init__(name="Poison<%s>" % ty)
def __unliteral__(self):
return Poison(self)
def unify(self, typingctx, other):
return None
| |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=no-member, invalid-name, protected-access, no-self-use
# pylint: disable=too-many-branches, too-many-arguments, no-self-use
# pylint: disable=too-many-lines, arguments-differ
"""Definition of various recurrent neural network layers."""
from __future__ import print_function
__all__ = ['RNN', 'LSTM', 'GRU']
from ... import ndarray
from .. import Block
from . import rnn_cell
class _RNNLayer(Block):
"""Implementation of recurrent layers."""
def __init__(self, hidden_size, num_layers, layout,
dropout, bidirectional, input_size,
i2h_weight_initializer, h2h_weight_initializer,
i2h_bias_initializer, h2h_bias_initializer,
mode, **kwargs):
super(_RNNLayer, self).__init__(**kwargs)
assert layout == 'TNC' or layout == 'NTC', \
"Invalid layout %s; must be one of ['TNC' or 'NTC']"%layout
self._hidden_size = hidden_size
self._num_layers = num_layers
self._mode = mode
self._layout = layout
self._dropout = dropout
self._dir = 2 if bidirectional else 1
self._input_size = input_size
self._i2h_weight_initializer = i2h_weight_initializer
self._h2h_weight_initializer = h2h_weight_initializer
self._i2h_bias_initializer = i2h_bias_initializer
self._h2h_bias_initializer = h2h_bias_initializer
self._gates = {'rnn_relu': 1, 'rnn_tanh': 1, 'lstm': 4, 'gru': 3}[mode]
self.i2h_weight = []
self.h2h_weight = []
self.i2h_bias = []
self.h2h_bias = []
ng, ni, nh = self._gates, input_size, hidden_size
for i in range(num_layers):
for j in (['l', 'r'] if self._dir == 2 else ['l']):
self.i2h_weight.append(
self.params.get('%s%d_i2h_weight'%(j, i), shape=(ng*nh, ni),
init=i2h_weight_initializer,
allow_deferred_init=True))
self.h2h_weight.append(
self.params.get('%s%d_h2h_weight'%(j, i), shape=(ng*nh, nh),
init=h2h_weight_initializer,
allow_deferred_init=True))
self.i2h_bias.append(
self.params.get('%s%d_i2h_bias'%(j, i), shape=(ng*nh,),
init=i2h_bias_initializer,
allow_deferred_init=True))
self.h2h_bias.append(
self.params.get('%s%d_h2h_bias'%(j, i), shape=(ng*nh,),
init=h2h_bias_initializer,
allow_deferred_init=True))
ni = nh * self._dir
for param_list in [self.i2h_weight, self.h2h_weight, self.i2h_bias, self.h2h_bias]:
for p in param_list:
self._reg_params[p.name] = p
self._unfused = self._unfuse()
def __repr__(self):
s = '{name}({mapping}, {_layout}'
if self._num_layers != 1:
s += ', num_layers={_num_layers}'
if self._dropout != 0:
s += ', dropout={_dropout}'
if self._dir == 2:
s += ', bidirectional'
s += ')'
shape = self.i2h_weight[0].shape
mapping = '{0} -> {1}'.format(shape[1] if shape[1] else None, shape[0] // self._gates)
return s.format(name=self.__class__.__name__,
mapping=mapping,
**self.__dict__)
def state_info(self, batch_size=0):
raise NotImplementedError
def _unfuse(self):
"""Unfuses the fused RNN in to a stack of rnn cells."""
get_cell = {'rnn_relu': lambda **kwargs: rnn_cell.RNNCell(self._hidden_size,
activation='relu',
**kwargs),
'rnn_tanh': lambda **kwargs: rnn_cell.RNNCell(self._hidden_size,
activation='tanh',
**kwargs),
'lstm': lambda **kwargs: rnn_cell.LSTMCell(self._hidden_size,
**kwargs),
'gru': lambda **kwargs: rnn_cell.GRUCell(self._hidden_size,
**kwargs)}[self._mode]
stack = rnn_cell.SequentialRNNCell(prefix=self.prefix, params=self.params)
with stack.name_scope():
ni = self._input_size
for i in range(self._num_layers):
kwargs = {'input_size': ni,
'i2h_weight_initializer': self._i2h_weight_initializer,
'h2h_weight_initializer': self._h2h_weight_initializer,
'i2h_bias_initializer': self._i2h_bias_initializer,
'h2h_bias_initializer': self._h2h_bias_initializer}
if self._dir == 2:
stack.add(rnn_cell.BidirectionalCell(
get_cell(prefix='l%d_'%i, **kwargs),
get_cell(prefix='r%d_'%i, **kwargs)))
else:
stack.add(get_cell(prefix='l%d_'%i, **kwargs))
if self._dropout > 0 and i != self._num_layers - 1:
stack.add(rnn_cell.DropoutCell(self._dropout))
ni = self._hidden_size * self._dir
return stack
def begin_state(self, batch_size=0, func=ndarray.zeros, **kwargs):
"""Initial state for this cell.
Parameters
----------
batch_size: int
Only required for `NDArray` API. Size of the batch ('N' in layout).
Dimension of the input.
func : callable, default `ndarray.zeros`
Function for creating initial state.
For Symbol API, func can be `symbol.zeros`, `symbol.uniform`,
`symbol.var` etc. Use `symbol.var` if you want to directly
feed input as states.
For NDArray API, func can be `ndarray.zeros`, `ndarray.ones`, etc.
**kwargs :
Additional keyword arguments passed to func. For example
`mean`, `std`, `dtype`, etc.
Returns
-------
states : nested list of Symbol
Starting states for the first RNN step.
"""
states = []
for i, info in enumerate(self.state_info(batch_size)):
if info is not None:
info.update(kwargs)
else:
info = kwargs
states.append(func(name='%sh0_%d'%(self.prefix, i), **info))
return states
def forward(self, inputs, states=None):
batch_size = inputs.shape[self._layout.find('N')]
skip_states = states is None
if skip_states:
states = self.begin_state(batch_size, ctx=inputs.context)
if isinstance(states, ndarray.NDArray):
states = [states]
for state, info in zip(states, self.state_info(batch_size)):
if state.shape != info['shape']:
raise ValueError(
"Invalid recurrent state shape. Expecting %s, got %s."%(
str(info['shape']), str(state.shape)))
if self._input_size == 0:
for i in range(self._dir):
self.i2h_weight[i].shape = (self._gates*self._hidden_size, inputs.shape[2])
self.i2h_weight[i]._finish_deferred_init()
if inputs.context.device_type == 'gpu' or \
self._mode in ['lstm', 'gru'] and not self._dropout:
out = self._forward_kernel(inputs, states)
else:
out = self._forward(inputs, states)
# out is (output, state)
return out[0] if skip_states else out
def _forward(self, inputs, states):
"""forward using gluon cell"""
ns = len(states)
axis = self._layout.find('T')
states = sum(zip(*((j for j in i) for i in states)), ())
outputs, states = self._unfused.unroll(
inputs.shape[axis], inputs, states,
layout=self._layout, merge_outputs=True)
new_states = []
for i in range(ns):
state = ndarray.concat(*(j.reshape((1,)+j.shape) for j in states[i::ns]), dim=0)
new_states.append(state)
return outputs, new_states
def _forward_kernel(self, inputs, states):
""" forward using CUDNN or CPU kenrel"""
if self._layout == 'NTC':
inputs = ndarray.swapaxes(inputs, dim1=0, dim2=1)
ctx = inputs.context
params = sum(zip(self.i2h_weight, self.h2h_weight), ())
params += sum(zip(self.i2h_bias, self.h2h_bias), ())
params = (i.data(ctx).reshape((-1,)) for i in params)
params = ndarray.concat(*params, dim=0)
rnn = ndarray.RNN(inputs, params, *states, state_size=self._hidden_size,
num_layers=self._num_layers, bidirectional=self._dir == 2,
p=self._dropout, state_outputs=True, mode=self._mode)
if self._mode == 'lstm':
outputs, states = rnn[0], [rnn[1], rnn[2]]
else:
outputs, states = rnn[0], [rnn[1]]
if self._layout == 'NTC':
outputs = ndarray.swapaxes(outputs, dim1=0, dim2=1)
return outputs, states
class RNN(_RNNLayer):
r"""Applies a multi-layer Elman RNN with `tanh` or `ReLU` non-linearity to an input sequence.
For each element in the input sequence, each layer computes the following
function:
.. math::
h_t = \tanh(w_{ih} * x_t + b_{ih} + w_{hh} * h_{(t-1)} + b_{hh})
where :math:`h_t` is the hidden state at time `t`, and :math:`x_t` is the output
of the previous layer at time `t` or :math:`input_t` for the first layer.
If nonlinearity='relu', then `ReLU` is used instead of `tanh`.
Parameters
----------
hidden_size: int
The number of features in the hidden state h.
num_layers: int, default 1
Number of recurrent layers.
activation: {'relu' or 'tanh'}, default 'relu'
The activation function to use.
layout : str, default 'TNC'
The format of input and output tensors. T, N and C stand for
sequence length, batch size, and feature dimensions respectively.
dropout: float, default 0
If non-zero, introduces a dropout layer on the outputs of each
RNN layer except the last layer.
bidirectional: bool, default False
If `True`, becomes a bidirectional RNN.
i2h_weight_initializer : str or Initializer
Initializer for the input weights matrix, used for the linear
transformation of the inputs.
h2h_weight_initializer : str or Initializer
Initializer for the recurrent weights matrix, used for the linear
transformation of the recurrent state.
i2h_bias_initializer : str or Initializer
Initializer for the bias vector.
h2h_bias_initializer : str or Initializer
Initializer for the bias vector.
input_size: int, default 0
The number of expected features in the input x.
If not specified, it will be inferred from input.
prefix : str or None
Prefix of this `Block`.
params : ParameterDict or None
Shared Parameters for this `Block`.
Inputs:
- **data**: input tensor with shape `(sequence_length, batch_size, input_size)`
when `layout` is "TNC". For other layouts, dimensions are permuted accordingly
using transpose() operator which adds performance overhead. Consider creating
batches in TNC layout during data batching step.
- **states**: initial recurrent state tensor with shape
`(num_layers, batch_size, num_hidden)`. If `bidirectional` is True,
shape will instead be `(2*num_layers, batch_size, num_hidden)`. If
`states` is None, zeros will be used as default begin states.
Outputs:
- **out**: output tensor with shape `(sequence_length, batch_size, num_hidden)`
when `layout` is "TNC". If `bidirectional` is True, output shape will instead
be `(sequence_length, batch_size, 2*num_hidden)`
- **out_states**: output recurrent state tensor with the same shape as `states`.
If `states` is None `out_states` will not be returned.
Examples
--------
>>> layer = mx.gluon.rnn.RNN(100, 3)
>>> layer.initialize()
>>> input = mx.nd.random.uniform(shape=(5, 3, 10))
>>> # by default zeros are used as begin state
>>> output = layer(input)
>>> # manually specify begin state.
>>> h0 = mx.nd.random.uniform(shape=(3, 3, 100))
>>> output, hn = layer(input, h0)
"""
def __init__(self, hidden_size, num_layers=1, activation='relu',
layout='TNC', dropout=0, bidirectional=False,
i2h_weight_initializer=None, h2h_weight_initializer=None,
i2h_bias_initializer='zeros', h2h_bias_initializer='zeros',
input_size=0, **kwargs):
super(RNN, self).__init__(hidden_size, num_layers, layout,
dropout, bidirectional, input_size,
i2h_weight_initializer, h2h_weight_initializer,
i2h_bias_initializer, h2h_bias_initializer,
'rnn_'+activation, **kwargs)
def state_info(self, batch_size=0):
return [{'shape': (self._num_layers * self._dir, batch_size, self._hidden_size),
'__layout__': 'LNC'}]
class LSTM(_RNNLayer):
r"""Applies a multi-layer long short-term memory (LSTM) RNN to an input sequence.
For each element in the input sequence, each layer computes the following
function:
.. math::
\begin{array}{ll}
i_t = sigmoid(W_{ii} x_t + b_{ii} + W_{hi} h_{(t-1)} + b_{hi}) \\
f_t = sigmoid(W_{if} x_t + b_{if} + W_{hf} h_{(t-1)} + b_{hf}) \\
g_t = \tanh(W_{ig} x_t + b_{ig} + W_{hc} h_{(t-1)} + b_{hg}) \\
o_t = sigmoid(W_{io} x_t + b_{io} + W_{ho} h_{(t-1)} + b_{ho}) \\
c_t = f_t * c_{(t-1)} + i_t * g_t \\
h_t = o_t * \tanh(c_t)
\end{array}
where :math:`h_t` is the hidden state at time `t`, :math:`c_t` is the
cell state at time `t`, :math:`x_t` is the hidden state of the previous
layer at time `t` or :math:`input_t` for the first layer, and :math:`i_t`,
:math:`f_t`, :math:`g_t`, :math:`o_t` are the input, forget, cell, and
out gates, respectively.
Parameters
----------
hidden_size: int
The number of features in the hidden state h.
num_layers: int, default 1
Number of recurrent layers.
layout : str, default 'TNC'
The format of input and output tensors. T, N and C stand for
sequence length, batch size, and feature dimensions respectively.
dropout: float, default 0
If non-zero, introduces a dropout layer on the outputs of each
RNN layer except the last layer.
bidirectional: bool, default False
If `True`, becomes a bidirectional RNN.
i2h_weight_initializer : str or Initializer
Initializer for the input weights matrix, used for the linear
transformation of the inputs.
h2h_weight_initializer : str or Initializer
Initializer for the recurrent weights matrix, used for the linear
transformation of the recurrent state.
i2h_bias_initializer : str or Initializer, default 'lstmbias'
Initializer for the bias vector. By default, bias for the forget
gate is initialized to 1 while all other biases are initialized
to zero.
h2h_bias_initializer : str or Initializer
Initializer for the bias vector.
input_size: int, default 0
The number of expected features in the input x.
If not specified, it will be inferred from input.
prefix : str or None
Prefix of this `Block`.
params : `ParameterDict` or `None`
Shared Parameters for this `Block`.
Inputs:
- **data**: input tensor with shape `(sequence_length, batch_size, input_size)`
when `layout` is "TNC". For other layouts, dimensions are permuted accordingly
using transpose() operator which adds performance overhead. Consider creating
batches in TNC layout during data batching step.
- **states**: a list of two initial recurrent state tensors. Each has shape
`(num_layers, batch_size, num_hidden)`. If `bidirectional` is True,
shape will instead be `(2*num_layers, batch_size, num_hidden)`. If
`states` is None, zeros will be used as default begin states.
Outputs:
- **out**: output tensor with shape `(sequence_length, batch_size, num_hidden)`
when `layout` is "TNC". If `bidirectional` is True, output shape will instead
be `(sequence_length, batch_size, 2*num_hidden)`
- **out_states**: a list of two output recurrent state tensors with the same
shape as in `states`. If `states` is None `out_states` will not be returned.
Examples
--------
>>> layer = mx.gluon.rnn.LSTM(100, 3)
>>> layer.initialize()
>>> input = mx.nd.random.uniform(shape=(5, 3, 10))
>>> # by default zeros are used as begin state
>>> output = layer(input)
>>> # manually specify begin state.
>>> h0 = mx.nd.random.uniform(shape=(3, 3, 100))
>>> c0 = mx.nd.random.uniform(shape=(3, 3, 100))
>>> output, hn = layer(input, [h0, c0])
"""
def __init__(self, hidden_size, num_layers=1, layout='TNC',
dropout=0, bidirectional=False, input_size=0,
i2h_weight_initializer=None, h2h_weight_initializer=None,
i2h_bias_initializer='zeros', h2h_bias_initializer='zeros',
**kwargs):
super(LSTM, self).__init__(hidden_size, num_layers, layout,
dropout, bidirectional, input_size,
i2h_weight_initializer, h2h_weight_initializer,
i2h_bias_initializer, h2h_bias_initializer,
'lstm', **kwargs)
def state_info(self, batch_size=0):
return [{'shape': (self._num_layers * self._dir, batch_size, self._hidden_size),
'__layout__': 'LNC'},
{'shape': (self._num_layers * self._dir, batch_size, self._hidden_size),
'__layout__': 'LNC'}]
class GRU(_RNNLayer):
r"""Applies a multi-layer gated recurrent unit (GRU) RNN to an input sequence.
For each element in the input sequence, each layer computes the following
function:
.. math::
\begin{array}{ll}
r_t = sigmoid(W_{ir} x_t + b_{ir} + W_{hr} h_{(t-1)} + b_{hr}) \\
i_t = sigmoid(W_{ii} x_t + b_{ii} + W_hi h_{(t-1)} + b_{hi}) \\
n_t = \tanh(W_{in} x_t + b_{in} + r_t * (W_{hn} h_{(t-1)}+ b_{hn})) \\
h_t = (1 - i_t) * n_t + i_t * h_{(t-1)} \\
\end{array}
where :math:`h_t` is the hidden state at time `t`, :math:`x_t` is the hidden
state of the previous layer at time `t` or :math:`input_t` for the first layer,
and :math:`r_t`, :math:`i_t`, :math:`n_t` are the reset, input, and new gates, respectively.
Parameters
----------
hidden_size: int
The number of features in the hidden state h
num_layers: int, default 1
Number of recurrent layers.
layout : str, default 'TNC'
The format of input and output tensors. T, N and C stand for
sequence length, batch size, and feature dimensions respectively.
dropout: float, default 0
If non-zero, introduces a dropout layer on the outputs of each
RNN layer except the last layer
bidirectional: bool, default False
If True, becomes a bidirectional RNN.
i2h_weight_initializer : str or Initializer
Initializer for the input weights matrix, used for the linear
transformation of the inputs.
h2h_weight_initializer : str or Initializer
Initializer for the recurrent weights matrix, used for the linear
transformation of the recurrent state.
i2h_bias_initializer : str or Initializer
Initializer for the bias vector.
h2h_bias_initializer : str or Initializer
Initializer for the bias vector.
input_size: int, default 0
The number of expected features in the input x.
If not specified, it will be inferred from input.
prefix : str or None
Prefix of this `Block`.
params : ParameterDict or None
Shared Parameters for this `Block`.
Inputs:
- **data**: input tensor with shape `(sequence_length, batch_size, input_size)`
when `layout` is "TNC". For other layouts, dimensions are permuted accordingly
using transpose() operator which adds performance overhead. Consider creating
batches in TNC layout during data batching step.
- **states**: initial recurrent state tensor with shape
`(num_layers, batch_size, num_hidden)`. If `bidirectional` is True,
shape will instead be `(2*num_layers, batch_size, num_hidden)`. If
`states` is None, zeros will be used as default begin states.
Outputs:
- **out**: output tensor with shape `(sequence_length, batch_size, num_hidden)`
when `layout` is "TNC". If `bidirectional` is True, output shape will instead
be `(sequence_length, batch_size, 2*num_hidden)`
- **out_states**: output recurrent state tensor with the same shape as `states`.
If `states` is None `out_states` will not be returned.
Examples
--------
>>> layer = mx.gluon.rnn.GRU(100, 3)
>>> layer.initialize()
>>> input = mx.nd.random.uniform(shape=(5, 3, 10))
>>> # by default zeros are used as begin state
>>> output = layer(input)
>>> # manually specify begin state.
>>> h0 = mx.nd.random.uniform(shape=(3, 3, 100))
>>> output, hn = layer(input, h0)
"""
def __init__(self, hidden_size, num_layers=1, layout='TNC',
dropout=0, bidirectional=False, input_size=0,
i2h_weight_initializer=None, h2h_weight_initializer=None,
i2h_bias_initializer='zeros', h2h_bias_initializer='zeros',
**kwargs):
super(GRU, self).__init__(hidden_size, num_layers, layout,
dropout, bidirectional, input_size,
i2h_weight_initializer, h2h_weight_initializer,
i2h_bias_initializer, h2h_bias_initializer,
'gru', **kwargs)
def state_info(self, batch_size=0):
return [{'shape': (self._num_layers * self._dir, batch_size, self._hidden_size),
'__layout__': 'LNC'}]
| |
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseBadRequest, HttpResponseServerError, HttpResponseForbidden
from django.template import RequestContext
from django.shortcuts import get_object_or_404, render_to_response
from models import GeneratedBioregion, DrawnBioregion, UserSettings, ThiessenPolygon, StoryPoint, FriendRequest
from models import BioregionError
import datetime
from django.utils import simplejson
from django.contrib.auth.models import User
from django.contrib.gis.geos import Polygon, GEOSGeometry
import json
from operator import itemgetter
from allauth.socialaccount.models import SocialToken, SocialAccount, SocialApp
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.conf import settings
def home(request, template_name='fbapp/home.html', extra_context={}):
"""
Launch screen / Home page for application
"""
token = ""
avatar_url = ""
user_locus = {}
gen_id = "null"
if request.user.is_authenticated():
tokens = SocialToken.objects.filter(account__user=request.user)
if tokens.count() > 0:
token = tokens[0]
provider = token.account.provider
else :
token = None
provider = None
avatar_url = SocialAccount.objects.get(user=request.user, provider=provider).get_avatar_url()
userSettings, created = UserSettings.objects.get_or_create(user=request.user)
try:
user_bioregion = userSettings.get_bioregion()
except BioregionError:
user_bioregion = "null"
if not user_bioregion == "null":
user_locus = user_bioregion.geometry_final.json
gen_id = user_bioregion.id
newsSources = {
'ns_public_story_points': userSettings.ns_public_story_points,
'ns_friend_story_points': userSettings.ns_friend_story_points,
'ns_tweets': userSettings.ns_tweets
}
locus_name = userSettings.locus_name
else:
locus_name = ""
newsSources = {
'ns_public_story_points': True,
'ns_friend_story_points': True,
'ns_tweets': True
}
try:
userName = request.user.get_full_name()
userProvider = request.user.socialaccount_set.values()[0]['provider']
except:
userName = ''
userProvider = None
context = RequestContext(
request,{
"token": token,
"userLocus": user_locus,
"avatar": avatar_url,
"genId": gen_id,
"userId": request.user.id,
"userName": userName,
"userProvider": userProvider,
"appID": settings.APP_ID,
"locusName": locus_name,
"newsSources": json.dumps(newsSources, ensure_ascii=False)
}
)
context.update(extra_context)
return render_to_response(template_name, context_instance=context)
def set_user_settings(request):
userSettings, created = UserSettings.objects.get_or_create(user=request.user)
news_sources = simplejson.loads(request.POST.get('news_sources'))
userSettings.ns_public_story_points = news_sources['ns_public_story_points']
userSettings.ns_friend_story_points = news_sources['ns_friend_story_points']
userSettings.ns_tweets = news_sources['ns_tweets']
userSettings.locus_name = request.POST.get('locus_name')
locus_type = request.POST.get('locus_type')
if request.POST.get('wkt') != "":
if locus_type == 'drawn':
geom = GEOSGeometry(request.POST.get('wkt'), srid=settings.GEOMETRY_DB_SRID)
try:
drawnBioregion = DrawnBioregion.objects.get(user=request.user)
drawnBioregion.geometry_final = geom
drawnBioregion.save()
except DrawnBioregion.DoesNotExist:
drawnBioregion = DrawnBioregion.objects.create(user=request.user, name=request.user.username, geometry_final=geom)
userSettings.bioregion_drawn = drawnBioregion
userSettings.bioregion_gen = None
elif locus_type == 'generated':
DrawnBioregion.objects.filter(user=request.user).delete()
bioregion_gen = request.POST.get('bioregion_gen')
userSettings.bioregion_gen = GeneratedBioregion.objects.get(id=bioregion_gen)
userSettings.bioregion_drawn = None
else:
DrawnBioregion.objects.filter(user=request.user).delete()
userSettings.bioregion_gen = None
userSettings.bioregion_drawn = None
userSettings.save()
return HttpResponse(simplejson.dumps({
'message': 'groovy',
'status':200
}))
def set_storypoints(request):
try:
user = User.objects.get(id=request.POST.get('source_user_id'))
geom = GEOSGeometry(request.POST.get('geometry'), srid=settings.GEOMETRY_DB_SRID)
point, created = StoryPoint.objects.get_or_create(
geometry=geom,
title=request.POST.get('title'),
content=request.POST.get('content'),
image=request.POST.get('image'),
source_user=user,
is_permanent=request.POST.get('isPerm')
)
feature = {
'storyPoint': {
'id': point.id,
'source_user_id': point.source_user.id,
'source_type': point.source_type,
'source_link': point.source_link,
'title': point.title,
'content': point.content,
'image': point.image,
'date': point.date_string(),
'isPerm': point.is_permanent,
'flagged': point.flagged,
'flag_reason': point.flag_reason
},
'source': point.source_type
}
return HttpResponse(simplejson.dumps({
'message': 'groovy',
'feature': feature,
'status': 200
}))
except:
return HttpResponse(simplejson.dumps({
'message': 'story point did not save.',
'status': 500
}))
def delete_user_settings(request):
#Todo
pass
def get_bioregions(request):
qs = GeneratedBioregion.objects.all()
bioregions = render_to_geojson(
qs,
geom_attribute='geometry_final',
mimetype = 'text/plain',
pretty_print=True,
excluded_fields=['date_created', 'date_modified']
)
return bioregions
def get_friends_bioregions(request):
friends = simplejson.loads(request.GET['friends'])
user_ids = []
user_bioregion_mapping = {}
draw_bioregion_ids = []
gen_bioregion_ids = []
friend_ids = [friend['id'] for friend in friends]
u_settings_qs = UserSettings.objects.filter(user__id__in=friend_ids)
for setting in u_settings_qs:
if setting.has_bioregion():
user_bioregion_mapping[setting.user_id] = {
'type': setting.bioregion_type(),
'br_id': setting.get_bioregion().id
}
if setting.bioregion_type() == "Drawn":
draw_bioregion_ids.append(setting.get_bioregion().id)
else:
gen_bioregion_ids.append(setting.get_bioregion().id)
gen_qs = GeneratedBioregion.objects.filter(id__in=gen_bioregion_ids)
draw_qs = DrawnBioregion.objects.filter(id__in=draw_bioregion_ids)
collection = {}
if gen_qs.count() > 0 :
gen_bioregions_collection = render_to_geojson(
gen_qs,
geom_attribute='geometry_final',
mimetype = 'text/plain',
pretty_print=True,
excluded_fields=['date_created', 'date_modified'],
return_response=False
)
collection = gen_bioregions_collection
if draw_qs.count() > 0:
draw_bioregions_collection = render_to_geojson(
draw_qs,
geom_attribute='geometry_final',
mimetype = 'text/plain',
pretty_print=True,
excluded_fields=['date_created', 'date_modified'],
return_response=False
)
collection = draw_bioregions_collection
if draw_qs.count() > 0 and gen_qs.count() > 0:
collection['features'] = draw_bioregions_collection['features'] + gen_bioregions_collection['features']
collection['user_feature_mapping'] = user_bioregion_mapping
response = HttpResponse()
response.write('%s' % simplejson.dumps(collection, indent=1))
response['Content-length'] = str(len(response.content))
response['Content-Type'] = 'text/plain'
return response
def get_storypoints(request, user):
from django.contrib.gis.geos import Point
if not request.user.is_authenticated():
return HttpResponse(simplejson.dumps({
'message': 'User is not authenticated',
'status': 401
}))
usetting = UserSettings.objects.get(user=request.user)
geom = usetting.get_bioregion().geometry_final
if usetting.ns_tweets:
# get centroid for twitter request
centroid = geom.centroid
max_point = geom.envelope[0][0]
max_pt_obj = Point(max_point[0], max_point[1])
# get radius for twitter request
radius = centroid.distance(max_pt_obj)
centroid.transform(4326)
geocode = str(centroid.y) + ',' + str(centroid.x) + ',' + str(radius/1000) + 'km'
included_tweets = []
geo_tweets = []
for term in ['climate', 'ecology', 'resilience', 'agriculture']:
url = 'https://api.twitter.com/1.1/search/tweets.json?count=100&q=%s&geocode=%s' % (term,geocode)
broad_tweets = oauth_req(url, 'twitter')
tweets = simplejson.loads(broad_tweets)
geo_tweets += [x for x in tweets['statuses'] if x['geo']!=None]
for tweet in geo_tweets:
point = Point(tweet['geo']['coordinates'][1], tweet['geo']['coordinates'][0])
point.srid=4326
point.transform(3857)
if point.within(geom):
tweet['point'] = point
if not any(x['id']== tweet['id'] for x in included_tweets):
included_tweets.append(tweet)
#TODO - don't store (most) storypoints locally - only posts.
### For example, if we had a 'stored' source type, we could continue to use the below
if user == 'json':
my_story_points = StoryPoint.objects.filter(Q(geometry__within=geom) | Q(source_user=usetting.user))
# qs = StoryPoint.objects.all()
qs = my_story_points
else:
qs = StoryPoint.objects.filter(source_type='user', source_user=user)
features = []
for point in qs.order_by('-created'):
if point.source_type != 'user':
image = point.image
source_user_id = None
source_user_name = None
else:
image = point.avatar()
source_user_id = point.source_user.id
source_user_name = point.source_user.get_full_name()
feature = {
'id' : str(point.id),
'geometry': {
'type': 'Point',
'coordinates': [
point.geometry.coords[0],
point.geometry.coords[1]
]
},
'type': 'Feature',
'properties': {
'storyPoint': {
'id': point.id,
'source_user_id': source_user_id,
'source_user_name': source_user_name,
'source_type': point.source_type,
'source_link': point.source_link,
'title': point.title,
'content': point.content,
'image': image,
'date': point.date_string(),
'isPerm': point.is_permanent,
'flagged': point.flagged,
'flag_reason': point.flag_reason
},
'source_type': point.source_type
}
}
features.append(feature)
for point in included_tweets:
image = point['user']['profile_image_url']
source_user_id = None
try:
feature = {
'id' : str(point['id']),
'geometry': {
'type': 'Point',
'coordinates': [
point['point'].x,
point['point'].y
]
},
'type': 'Feature',
'properties': {
'storyPoint': {
'id': point['id'],
'source_user_id': source_user_id,
'source_type': 'twitter',
'source_link': 'http://www.twitter.com/' + point['user']['screen_name'],
'title': '@' + point['user']['screen_name'],
'content': point['text'],
'image': image,
'date': point['created_at'],
'isPerm': False,
'flagged': False, #TODO
'flag_reason': None #point.flag_reason
},
'source_type': 'twitter' #point.source_type
}
}
except:
pass
features.append(feature)
storypoints = {
"srid": 900913,
"crs": {
"type": "link",
"properties": {
"href": "http://spatialreference.org/ref/epsg/900913/",
"type": "proj4"
}
},
"type": "FeatureCollection",
"features": features
}
response = HttpResponse()
response.write('%s' % simplejson.dumps(storypoints, indent=1))
response['Content-length'] = str(len(response.content))
response['Content-Type'] = 'text/plain'
return response
#Courtesy of https://dev.twitter.com/docs/auth/oauth/single-user-with-examples
def oauth_req(url, provider_name, http_method="GET", post_body='',
http_headers=''):
import oauth2 as oauth
socialApp = SocialApp.objects.get(provider=provider_name)
key = socialApp.client_id
secret = socialApp.secret
#TODO: url -> provider.url, add "params{}"
consumer = oauth.Consumer(key=key, secret=secret)
token = oauth.Token(key=settings.TWITTER_ACCESS_TOKEN, secret=settings.TWITTER_ACCESS_TOKEN_SECRET)
client = oauth.Client(consumer, token)
resp, content = client.request(
url,
method=http_method,
body=post_body,
headers=http_headers
)
return content
def edit_storypoint(request, storypoint_id):
try:
storypoint = StoryPoint.objects.get(id=storypoint_id)
except ValueError:
return HttpResponse(simplejson.dumps({
'message': 'Invalid post id: ID must be an integer',
'status': 400
})
)
except ObjectDoesNotExist:
return HttpResponse(simplejson.dumps({
'message': 'Post with given ID does not exist.',
'status': 404
})
)
if request.user.is_authenticated() and request.user.id == storypoint.source_user_id:
new_content = request.POST.get('content')
storypoint.content = new_content
storypoint.save()
return HttpResponse(simplejson.dumps({
'message': 'Post updated.',
'status': 200
})
)
else:
return HttpResponse(simplejson.dumps({
'message': 'You do not have permission to edit this post.',
'status': 401
}))
def delete_storypoint(request, storypoint_id):
try:
storypoint = StoryPoint.objects.get(id=storypoint_id)
except ValueError:
return HttpResponse(simplejson.dumps({
'message': 'Invalid post id: ID must be an integer',
'status': 400
})
)
except ObjectDoesNotExist:
return HttpResponse(simplejson.dumps({
'message': 'Post with given ID does not exist.',
'status': 404
})
)
if request.user.is_authenticated() and request.user.id == storypoint.source_user_id:
storypoint.delete()
response=HttpResponse(simplejson.dumps({
'message': 'Post deleted.',
'status': 200
}))
else:
response=HttpResponse(simplejson.dumps({
'message': 'You do not have permission to delete this post.',
'status': 401
}))
return response
def get_bioregions_by_point(request):
pnt_wkt = 'POINT(' + request.GET['lon'] + ' ' + request.GET['lat'] + ')'
size_class = request.GET['size']
try:
thiessen = ThiessenPolygon.objects.get(geometry__contains=pnt_wkt)
except:
thiessen = None
pass
qs = GeneratedBioregion.objects.filter(thiessen=thiessen, size_class=size_class)
bioregions = render_to_geojson(
qs,
geom_attribute='geometry_final',
mimetype = 'text/plain',
pretty_print=True,
excluded_fields=['date_created', 'date_modified']
)
return bioregions
def get_friend_requests(request):
if request.user.is_authenticated():
friend_requests = FriendRequest.objects.filter((Q(requester=request.user)|Q(requestee=request.user))&Q(status='new'))
response_json = []
for req in friend_requests:
response_json.append({'id':req.id, 'requestee': req.requestee.get_full_name(), 'requester': req.requester.get_full_name(), 'status': req.status})
return HttpResponse(simplejson.dumps({
'status': 200,
'friend_requests': response_json
})
)
def create_friend_request(request):
if request.user.is_authenticated():
requestee_id = simplejson.loads(request.POST.get('requestee_id'))
requestee = User.objects.get(id=requestee_id)
requester=request.user
query_status = FriendRequest.objects.filter((Q(requester=requester, requestee=requestee)|Q(requester=requestee, requestee=requester))&Q(status='new'))
if query_status.count() == 0:
FriendRequest.objects.create(requester=requester, requestee=requestee, status='new')
return HttpResponse(simplejson.dumps({
'status':200,
'message': 'Friend request sent'
})
)
else:
return HttpResponse(simplejson.dumps({
'status':200,
'message': 'Friendship request already exists'
})
)
# TODO: This method is for testing only - delete when done with friend work!
def generate_friend_requests(request):
existing_friendships = get_locus_friendships(request.user)
existing_frienship_ids = [x.id for x in existing_friendships]
unfriended_users = User.objects.filter(~Q(id__in=existing_frienship_ids))
for stranger in unfriended_users:
FriendRequest.objects.create(requester=request.user, requestee=stranger, status='accepted')
return HttpResponse(simplejson.dumps({
'status': 200,
'message': 'Refresh to see your new friends!'
})
)
def accept_friend_request(request):
if request.user.is_authenticated():
request_id = simplejson.loads(request.POST.get('request_id'))
friend_request = FriendRequest.objects.get(id=request_id)
friend_request.__setattr__('status', 'accepted')
friend_request.save()
return HttpResponse(simplejson.dumps({
'status': 200,
'message': 'Friend request accepted'
})
)
def decline_friend_request(request):
if request.user.is_authenticated():
request_id = simplejson.loads(request.POST.get('request_id'))
friend_request = FriendRequest.objects.get(id=request_id)
friend_request.__setattr__('status', 'rejected')
friend_request.save()
return HttpResponse(simplejson.dumps({
'status': 200,
'message': 'Friend request declined'
})
)
def delete_friendship(request):
message = "An error occurred. Friendship unchanged."
status = 500
if request.user.is_authenticated():
unfriend_id = simplejson.loads(request.POST.get('unfriend_id'))
friend_request = FriendRequest.objects.filter(Q(requester=request.user, requestee__id=unfriend_id)|Q(requester__id=unfriend_id, requestee=request.user))
if friend_request.count() == 1:
friend_request[0].delete()
status= 200
message= 'Friend removed'
elif friend_request.count() == 0:
status=409
message="No friendship to remove."
else:
message="More than one friendship returned. Contact an administrator."
return HttpResponse(simplejson.dumps({
'status': status,
'message': message
})
)
def get_formatted_user_list(user_ids):
users = []
for user in user_ids:
providers = []
uids = []
account_qs = SocialAccount.objects.filter(user__id=user['id'])
for account in account_qs:
providers.append(account.provider)
uids.append(account.uid)
if len(providers) == 0:
providers = ['none']
user['providers'] = providers
user['uids'] = uids
users.append(user)
return users
def get_locus_friendships(user):
requested_friendships = FriendRequest.objects.filter(requester=user, status='accepted')
friend_ids = [{'id': x.requestee.id, 'name': x.requestee.get_full_name()} for x in requested_friendships]
accepted_friendships = FriendRequest.objects.filter(requestee=user, status='accepted')
friend_ids += [{'id': x.requester.id, 'name': x.requester.get_full_name()} for x in accepted_friendships]
friends = get_formatted_user_list(friend_ids)
return friends
def get_user_strangers(friend_ids):
strangers = User.objects.filter(~Q(id__in=friend_ids))
real_strangers = []
for stranger in strangers:
if stranger.get_full_name() != '':
real_strangers.append(stranger)
stranger_ids = [{'id':x.id, 'name': x.get_full_name()} for x in real_strangers[:4]]
user_strangers = get_formatted_user_list(stranger_ids + [])
return user_strangers
def get_friends(request):
friends = simplejson.loads(request.POST.get('friends'))
friend_ids = [friend['id'] for friend in friends]
user_friends_qs = SocialAccount.objects.filter(uid__in=friend_ids, provider='facebook')
user_ids = [user.uid for user in user_friends_qs]
user_friends = get_locus_friendships(request.user)
sent_friend_requests = FriendRequest.objects.filter(requester=request.user, status='new')
user_strangers = get_user_strangers([x['id'] for x in user_friends + [{'id':request.user.id}] + [{'id':y.requestee.id} for y in sent_friend_requests]])
pending_friend_requests = FriendRequest.objects.filter(requestee=request.user, status='new')
requests = []
for req in pending_friend_requests:
formatted_req = get_formatted_user_list([{'id':req.requester.id, 'name':req.requester.get_full_name()}])[0]
formatted_req['request_id'] = req.id
requests.append(formatted_req)
just_friends = []
sorted_friends = sorted(friends, key=itemgetter('name'))
for friend in sorted_friends:
if friend['id'] in user_ids:
if not any(x['uids'].__contains__(friend['id']) for x in user_friends):
if not hasattr(friend, 'uids'):
soc_acc = SocialAccount.objects.get(uid=friend['id'])
user_obj_id = soc_acc.user.id
friend['uids']=[UserSettings.objects.get(user__id=user_obj_id).id]
friend['providers']=['facebook']
user_friends.append(friend)
else:
just_friends.append(friend)
# TODO create list of non-friend users in your bioregion (50ish)
return HttpResponse(simplejson.dumps({
'just_friends': just_friends,
'user_friends': user_friends,
'user_strangers': user_strangers,
'friend_requests': requests,
'message': 'Friend lists generated',
'status': 200
}))
def render_to_geojson(query_set, geom_field=None, geom_attribute=None, extra_attributes=[],mimetype='text/plain', pretty_print=False, excluded_fields=[],included_fields=[],proj_transform=None, return_response=True):
'''
Shortcut to render a GeoJson FeatureCollection from a Django QuerySet.
Currently computes a bbox and adds a crs member as a sr.org link
'''
excluded_fields.append('_state')
collection = {}
if hasattr(query_set,'_meta'): # its a model instance
fields = query_set._meta.fields
query_set = [query_set]
else:
fields = query_set.model._meta.fields
if geom_attribute:
geometry_name = geom_attribute
geo_field = None
if '.' in geom_attribute:
prop, meth = geom_attribute.split('.')
if len(query_set):
p = getattr(query_set[0],prop)
geo_field = getattr(p,meth)
if callable(geo_field):
geo_field = geo_field()
else:
if len(query_set):
geo_field = getattr(query_set[0],geom_attribute)
if callable(geo_field):
geo_field = geo_field()
if not geo_field:
srid = 4326
else:
srid = geo_field.srid
else:
geo_fields = [f for f in fields if isinstance(f, GeometryField)]
#attempt to assign geom_field that was passed in
if geom_field:
geo_fieldnames = [x.name for x in geo_fields]
try:
geo_field = geo_fields[geo_fieldnames.index(geom_field)]
except:
raise Exception('%s is not a valid geometry on this model' % geom_field)
else:
if not len(geo_fields):
raise Exception('There appears to be no valid geometry on this model')
geo_field = geo_fields[0] # no support yet for multiple geometry fields
#remove other geom fields from showing up in attributes
if len(geo_fields) > 1:
for field in geo_fields:
if field.name not in excluded_fields:
excluded_fields.append(field.name)
geometry_name = geo_field.name
srid = geo_field.srid
if proj_transform:
to_srid = proj_transform
else:
to_srid = srid
# Gather the projection information
crs = {}
crs['type'] = "link"
crs_properties = {}
crs_properties['href'] = 'http://spatialreference.org/ref/epsg/%s/' % to_srid
crs_properties['type'] = 'proj4'
crs['properties'] = crs_properties
collection['crs'] = crs
collection['srid'] = to_srid
# Build list of features
features = []
if query_set.distinct():
for item in query_set:
feat = {}
feat['type'] = 'Feature'
if included_fields:
d = {}
for f in included_fields:
if hasattr(item,f):
d[f] = getattr(item,f)
else:
d = item.__dict__.copy()
for field in excluded_fields:
if field in d.keys():
d.pop(field)
if geometry_name in d:
d.pop(geometry_name)
for attr in extra_attributes:
a = getattr(item,attr)
# crappy way of trying to figure out it this is a
# m2m, aka 'ManyRelatedManager'
if hasattr(a,'values_list'):
a = list(a.values_list('id',flat=True))
if callable(a):
d[attr] = a()
else:
d[attr] = a
if '.' in geometry_name:
prop, meth = geometry_name.split('.')
a = getattr(item,prop)
g = getattr(a,meth)
if callable(g):
g = g()
else:
g = getattr(item,geometry_name)
if g:
if proj_transform:
g.transform(proj_transform)
feat['geometry'] = simplejson.loads(g.geojson)
feat['properties'] = d
features.append(feat)
else:
pass #features.append({'type':'Feature','geometry': {},'properties':{}})
# Label as FeatureCollection and add Features
collection['type'] = "FeatureCollection"
collection['features'] = features
# Attach extent of all features
# if query_set:
# ex = None
# query_set.query.distinct = False
# if hasattr(query_set,'agg_extent'):
# ex = [x for x in query_set.agg_extent.tuple]
# elif '.' in geometry_name:
# prop, meth = geometry_name.split('.')
# a = getattr(item,prop)
# if a:
# ex = [x for x in a.extent()]
# else:
# import pdb
# pdb.set_trace()
# # make sure qs does not have .distinct() in it...
# ex = [x for x in query_set.extent()]
# if ex:
# if proj_transform:
# poly = Polygon.from_bbox(ex)
# poly.srid = srid
# poly.transform(proj_transform)
# ex = poly.extent
# collection['bbox'] = ex
if return_response:
# Return response
response = HttpResponse()
if pretty_print:
response.write('%s' % simplejson.dumps(collection, indent=1))
else:
response.write('%s' % simplejson.dumps(collection))
response['Content-length'] = str(len(response.content))
response['Content-Type'] = mimetype
return response
else:
return collection
| |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for checkpointable object SavedModel loading."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
from tensorflow.python.eager import def_function
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_spec
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import variables
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import save
from tensorflow.python.training.checkpointable import tracking
class LoadTest(test.TestCase):
def cycle(self, obj):
path = tempfile.mkdtemp(prefix=self.get_temp_dir())
save.save(obj, path, signatures={})
return load.load(path)
def test_structure_import(self):
root = tracking.Checkpointable()
root.f = def_function.function(
lambda x: 2. * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root.dep_one = tracking.Checkpointable()
root.dep_two = tracking.Checkpointable()
root.dep_two.dep = tracking.Checkpointable()
root.dep_three = root.dep_two.dep
imported = self.cycle(root)
self.assertIs(imported.dep_three, imported.dep_two.dep)
self.assertIsNot(imported.dep_one, imported.dep_two)
self.assertEqual(4., imported.f(constant_op.constant(2.)).numpy())
def test_variables(self):
root = tracking.Checkpointable()
root.v1 = variables.Variable(1.)
root.v2 = variables.Variable(2.)
root.f = def_function.function(
lambda x: root.v2 * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
imported = self.cycle(root)
self.assertEquals(imported.v1.numpy(), 1.0)
self.assertEquals(imported.v2.numpy(), 2.0)
self.assertEqual(4., imported.f(constant_op.constant(2.)).numpy())
def _make_asset(self, contents):
filename = tempfile.mktemp(prefix=self.get_temp_dir())
with open(filename, "w") as f:
f.write(contents)
return filename
def test_assets_import(self):
file1 = self._make_asset("contents 1")
file2 = self._make_asset("contents 2")
root = tracking.Checkpointable()
root.f = def_function.function(
lambda x: 2. * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root.asset1 = tracking.TrackableAsset(file1)
root.asset2 = tracking.TrackableAsset(file2)
save_dir = os.path.join(self.get_temp_dir(), "save_dir")
save.save(root, save_dir)
file_io.delete_file(file1)
file_io.delete_file(file2)
load_dir = os.path.join(self.get_temp_dir(), "load_dir")
file_io.rename(save_dir, load_dir)
imported = load.load(load_dir)
with open(imported.asset1.asset_path.numpy(), "r") as f:
self.assertEquals("contents 1", f.read())
with open(imported.asset2.asset_path.numpy(), "r") as f:
self.assertEquals("contents 2", f.read())
def test_capture_assets(self):
root = tracking.Checkpointable()
root.vocab = tracking.TrackableAsset(self._make_asset("contents"))
root.f = def_function.function(
lambda: root.vocab.asset_path,
input_signature=[])
imported = self.cycle(root)
origin_output = root.f().numpy()
imported_output = imported.f().numpy()
self.assertNotEqual(origin_output, imported_output)
with open(imported_output, "r") as f:
self.assertEquals("contents", f.read())
def test_assets_dedup(self):
vocab = self._make_asset("contents")
root = tracking.Checkpointable()
root.f = def_function.function(
lambda x: 2. * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root.asset1 = tracking.TrackableAsset(vocab)
root.asset2 = tracking.TrackableAsset(vocab)
imported = self.cycle(root)
self.assertEqual(imported.asset1.asset_path.numpy(),
imported.asset2.asset_path.numpy())
def test_implicit_input_signature(self):
@def_function.function
def func(x):
return 2 * x
root = tracking.Checkpointable()
root.f = func
# Add two traces.
root.f(constant_op.constant(1.))
root.f(constant_op.constant(1))
imported = self.cycle(root)
self.assertEqual(4., imported.f(constant_op.constant(2.)).numpy())
self.assertEqual(14, imported.f(constant_op.constant(7)).numpy())
def test_explicit_input_signature(self):
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def func(x):
return 2 * x
root = tracking.Checkpointable()
root.f = func
imported = self.cycle(root)
self.assertEqual(4., imported.f(constant_op.constant(2.0)).numpy())
def test_function_with_default_bool_input(self):
def func(x, training=False):
if training:
return 2 * x
else:
return 7
root = tracking.Checkpointable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(7, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
imported = self.cycle(root)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(7, imported.f(constant_op.constant(2)).numpy())
def test_positional_arguments(self):
def func(x, training=False, abc=7.1, defg=7.7):
del abc
if training:
return 2 * x
if defg == 7:
return 6
else:
return 7
root = tracking.Checkpointable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(7, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
self.assertEqual(6, root.f(constant_op.constant(1), defg=7.0).numpy())
imported = self.cycle(root)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(7, imported.f(constant_op.constant(2)).numpy())
self.assertEqual(6, imported.f(constant_op.constant(1), defg=7.0).numpy())
def test_member_function(self):
class CheckpointableWithMember(tracking.Checkpointable):
def __init__(self):
super(CheckpointableWithMember, self).__init__()
self._some_value = 20
@def_function.function
def f(self, x, training=False):
if training:
return 2 * x
else:
return 7 + self._some_value
root = CheckpointableWithMember()
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
self.assertEqual(27, root.f(constant_op.constant(1)).numpy())
self.assertEqual(2, root.f(constant_op.constant(1), True).numpy())
imported = self.cycle(root)
self.assertEqual(4, imported.f(constant_op.constant(2), True).numpy())
self.assertEqual(27, imported.f(constant_op.constant(2)).numpy())
if __name__ == "__main__":
test.main()
| |
"""
Definition of nodes for computing reordering and plotting coclass_matrices
"""
import numpy as np
import os
from nipype.utils.filemanip import split_filename as split_f
from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec,
traits, File, TraitedSpec, isdefined)
from graphpype.utils_cor import return_coclass_mat, return_coclass_mat_labels
from graphpype.utils_net import read_Pajek_corres_nodes, read_lol_file
from graphpype.utils import check_np_shapes
from graphpype.utils_plot import plot_ranged_cormat
# PrepareCoclass
class PrepareCoclassInputSpec(BaseInterfaceInputSpec):
mod_files = traits.List(
File(exists=True), desc='list of all files representing modularity \
assignement (in rada, lol files) for each subject', mandatory=True)
node_corres_files = traits.List(
File(exists=True), desc='list of all Pajek files (in txt format) to \
extract correspondance between nodes in rada analysis and original \
subject coordinates for each subject - as obtained from PrepRada',
mandatory=True)
coords_files = traits.List(File(
exists=True), desc='list of all coordinates in numpy space files (in\
txt format) for each subject (after removal of non void data)',
mandatory=True, xor=['labels_files'])
labels_files = traits.List(File(
exists=True), desc='list of labels (in txt format) for each subject\
(after removal of non void data)', mandatory=True,
xor=['coords_files'])
gm_mask_coords_file = File(
exists=True, desc='Coordinates in numpy space, corresponding to all\
possible nodes in the original space', mandatory=False,
xor=['gm_mask_labels_file'])
gm_mask_labels_file = File(
exists=True, desc='Labels for all possible nodes - in case coords are\
varying from one indiv to the other (source space for example)',
mandatory=False, xor=['gm_mask_coords_file'])
class PrepareCoclassOutputSpec(TraitedSpec):
group_coclass_matrix_file = File(
exists=True, desc="all coclass matrices of the group in .npy (pickle\
format)")
sum_coclass_matrix_file = File(
exists=True, desc="sum of coclass matrix of the group in .npy (pickle\
format)")
sum_possible_edge_matrix_file = File(
exists=True, desc="sum of possible edges matrices of the group in .npy\
(pickle format)")
norm_coclass_matrix_file = File(
exists=True, desc="sum of coclass matrix normalized by possible edges\
matrix of the group in .npy (pickle format)")
class PrepareCoclass(BaseInterface):
"""
Prepare a list of coclassification matrices, in a similar reference given
by a coord (resp label) file based on individual coords (resp labels)
files
Inputs:
mod_files:
type = List of Files, exists=True, desc='list of all files
representing modularity assignement (in rada, lol files) for each
subject', mandatory=True
node_corres_files:
type = List of Files, exists=True, desc='list of all Pajek files
(in txt format) to extract correspondance between nodes in rada
analysis and original subject coordinates for each subject (as
obtained from PrepRada)', mandatory=True
coords_files:
type = List of Files, exists=True, desc='list of all coordinates in
numpy space files (in txt format) for each subject (after removal
of non void data)', mandatory=True, xor = ['labels_files']
gm_mask_coords_file:
type = File,exists=True, desc='Coordinates in numpy space,
corresponding to all possible nodes in the original space',
mandatory=False, xor = ['gm_mask_labels_file']
labels_files:
type = List of Files, exists=True, desc='list of labels (in txt
format) for each subject (after removal of non void data)',
mandatory=True, xor = ['coords_files']
gm_mask_labels_file:
type = File, exists=True, desc='Labels for all possible nodes - in
case coords are varying from one indiv to the other (source space
for example)', mandatory=False, xor = ['gm_mask_coords_file']
Outputs:
group_coclass_matrix_file:
type = File,exists=True, desc="all coclass matrices of the group
in .npy format"
sum_coclass_matrix_file:
type = File, exists=True, desc="sum of coclass matrix of the group
in .npy format"
sum_possible_edge_matrix_file:
type = File, exists=True, desc="sum of possible edges matrices of
the group in .npy format"
norm_coclass_matrix_file:
type = File, exists=True, desc="sum of coclass matrix normalized
by possible edges matrix of the group in .npy format"
"""
input_spec = PrepareCoclassInputSpec
output_spec = PrepareCoclassOutputSpec
def _run_interface(self, runtime):
print('in prepare_coclass')
mod_files = self.inputs.mod_files
node_corres_files = self.inputs.node_corres_files
if isdefined(self.inputs.gm_mask_coords_file) and \
isdefined(self.inputs.coords_files):
coords_files = self.inputs.coords_files
gm_mask_coords = np.loadtxt(self.inputs.gm_mask_coords_file)
print(gm_mask_coords.shape)
# read matrix from the first group
# print Z_cor_mat_files
sum_coclass_matrix = np.zeros(
(gm_mask_coords.shape[0], gm_mask_coords.shape[0]), dtype=int)
sum_possible_edge_matrix = np.zeros(
(gm_mask_coords.shape[0], gm_mask_coords.shape[0]), dtype=int)
# print sum_coclass_matrix.shape
group_coclass_matrix = np.zeros(
(gm_mask_coords.shape[0], gm_mask_coords.shape[0],
len(mod_files)), dtype=float)
print(group_coclass_matrix.shape)
assert len(mod_files) == len(coords_files) and len(mod_files) == \
len(node_corres_files), (
"Error, length of mod_files, coords_files and \
node_corres_files are imcompatible {} {} {}".format(
len(mod_files), len(coords_files),
len(node_corres_files)))
for index_file in range(len(mod_files)):
if (os.path.exists(mod_files[index_file]) and
os.path.exists(node_corres_files[index_file]) and
os.path.exists(coords_files[index_file])):
community_vect = read_lol_file(mod_files[index_file])
node_corres_vect = read_Pajek_corres_nodes(
node_corres_files[index_file])
coords = np.loadtxt(coords_files[index_file])
corres_coords = coords[node_corres_vect, :]
coclass_mat, possible_edge_mat = return_coclass_mat(
community_vect, corres_coords, gm_mask_coords)
np.fill_diagonal(coclass_mat, 0)
np.fill_diagonal(possible_edge_mat, 1)
sum_coclass_matrix += coclass_mat
sum_possible_edge_matrix += possible_edge_mat
group_coclass_matrix[:, :, index_file] = coclass_mat
else:
print("Warning, one or more files between {}, {}, {} do\
not exists".format(mod_files[index_file],
node_corres_files[index_file],
coords_files[index_file]))
elif (isdefined(self.inputs.gm_mask_labels_file) and
isdefined(self.inputs.labels_files)):
labels_files = self.inputs.labels_files
gm_mask_labels_file = self.inputs.gm_mask_labels_file
gm_mask_labels = np.array(
[line.strip() for line in open(gm_mask_labels_file)],
dtype='str')
print(gm_mask_labels.shape)
sum_coclass_matrix = np.zeros(
(gm_mask_labels.shape[0], gm_mask_labels.shape[0]), dtype=int)
sum_possible_edge_matrix = np.zeros(
(gm_mask_labels.shape[0], gm_mask_labels.shape[0]), dtype=int)
# print sum_coclass_matrix.shape
group_coclass_matrix = np.zeros(
(gm_mask_labels.shape[0], gm_mask_labels.shape[0],
len(mod_files)), dtype=float)
print(group_coclass_matrix.shape)
assert len(mod_files) == len(labels_files) and len(mod_files) == \
len(node_corres_files), (
"Error, length of mod_files, labels_files and \
node_corres_files are imcompatible {} {} {}".format(
len(mod_files), len(labels_files),
len(node_corres_files)))
for index_file in range(len(mod_files)):
if os.path.exists(mod_files[index_file]) and \
os.path.exists(node_corres_files[index_file]) and\
os.path.exists(labels_files[index_file]):
community_vect = read_lol_file(mod_files[index_file])
node_corres_vect = read_Pajek_corres_nodes(
node_corres_files[index_file])
labels = np.array([line.strip() for line in open(
labels_files[index_file])], dtype='str')
corres_labels = labels[node_corres_vect]
coclass_mat, possible_edge_mat = return_coclass_mat_labels(
community_vect, corres_labels, gm_mask_labels)
np.fill_diagonal(coclass_mat, 0)
np.fill_diagonal(possible_edge_mat, 1)
sum_coclass_matrix += coclass_mat
sum_possible_edge_matrix += possible_edge_mat
group_coclass_matrix[:, :, index_file] = coclass_mat
else:
print("Warning, one or more files between {}, {}, {} do\
not exists".format(mod_files[index_file],
node_corres_files[index_file],
labels_files[index_file]))
else:
print("Error, gm_mask_coords_file XOR gm_mask_labels_file should\
be defined")
return
group_coclass_matrix_file = os.path.abspath('group_coclass_matrix.npy')
np.save(group_coclass_matrix_file, group_coclass_matrix)
print('saving coclass matrix')
sum_coclass_matrix_file = os.path.abspath('sum_coclass_matrix.npy')
np.save(sum_coclass_matrix_file, sum_coclass_matrix)
print('saving possible_edge matrix')
sum_possible_edge_matrix_file = os.path.abspath(
'sum_possible_edge_matrix.npy')
np.save(sum_possible_edge_matrix_file, sum_possible_edge_matrix)
# save norm_coclass_matrix
print()
print(np.where(np.array(sum_possible_edge_matrix == 0)))
norm_coclass_matrix = np.divide(
np.array(sum_coclass_matrix, dtype=float),
np.array(sum_possible_edge_matrix, dtype=float)) * 100
# 0/0
print('saving norm coclass matrix')
norm_coclass_matrix_file = os.path.abspath('norm_coclass_matrix.npy')
np.save(norm_coclass_matrix_file, norm_coclass_matrix)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs["group_coclass_matrix_file"] = os.path.abspath(
'group_coclass_matrix.npy')
outputs["sum_coclass_matrix_file"] = os.path.abspath(
'sum_coclass_matrix.npy')
outputs["sum_possible_edge_matrix_file"] = os.path.abspath(
'sum_possible_edge_matrix.npy')
outputs["norm_coclass_matrix_file"] = os.path.abspath(
'norm_coclass_matrix.npy')
return outputs
# DiffMatrices
class DiffMatricesInputSpec(BaseInterfaceInputSpec):
mat_file1 = File(exists=True, desc='Matrix in npy format', mandatory=True)
mat_file2 = File(exists=True, desc='Matrix in npy format', mandatory=True)
class DiffMatricesOutputSpec(TraitedSpec):
diff_mat_file = File(
exists=True,
desc='Difference of Matrices (mat1 - mat2) in npy format',
mandatory=True)
class DiffMatrices(BaseInterface):
"""
Description:
Compute difference between two matrices, should have same shape
Inputs:
mat_file1:
type = File,exists=True, desc='Matrix in npy format',
mandatory=True
mat_file2:
type = File,exists=True, desc='Matrix in npy format',
mandatory=True
Outputs:
diff_mat_file:
type = File, exists=True,
desc='Difference of Matrices (mat1 - mat2) in npy format',
mandatory=True
Comments:
Not sure where it is used ...
"""
input_spec = DiffMatricesInputSpec
output_spec = DiffMatricesOutputSpec
def _run_interface(self, runtime):
mat_file1 = self.inputs.mat_file1
mat_file2 = self.inputs.mat_file2
mat1 = np.load(mat_file1)
print(mat1.shape)
mat2 = np.load(mat_file2)
print(mat2.shape)
assert check_np_shapes(mat1.shape, mat2.shape), ("Warning, shapes are \
different, cannot substrat matrices")
diff_mat = mat1 - mat2
print(diff_mat)
diff_mat_file = os.path.abspath("diff_matrix.npy")
np.save(diff_mat_file, diff_mat)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs["diff_mat_file"] = os.path.abspath("diff_matrix.npy")
return outputs
# PlotCoclass
class PlotCoclassInputSpec(BaseInterfaceInputSpec):
coclass_matrix_file = File(
exists=True, desc='coclass matrix in npy format', mandatory=True)
labels_file = File(exists=True, desc='labels of nodes', mandatory=False)
list_value_range = traits.ListInt(
desc='force the range of the plot', mandatory=False)
class PlotCoclassOutputSpec(TraitedSpec):
plot_coclass_matrix_file = File(
exists=True, desc="eps file with graphical representation")
class PlotCoclass(BaseInterface):
"""
Description :
Plot coclass matrix with matplotlib matshow
- labels are optional
- range values are optional (default is min and max values of the matrix)
Inputs:
coclass_matrix_file:
type = File, exists=True, desc='coclass matrix in npy format',
mandatory=True
labels_file:
type = File, exists=True, desc='labels of nodes', mandatory=False
list_value_range
type = ListInt, desc='force the range of the plot', mandatory=False
Outputs:
plot_coclass_matrix_file:
type = File, exists=True,
desc="eps file with graphical representation"
"""
input_spec = PlotCoclassInputSpec
output_spec = PlotCoclassOutputSpec
def _run_interface(self, runtime):
coclass_matrix_file = self.inputs.coclass_matrix_file
labels_file = self.inputs.labels_file
list_value_range = self.inputs.list_value_range
coclass_mat = np.load(coclass_matrix_file)
if isdefined(labels_file):
labels = [line.strip() for line in open(labels_file)]
else:
labels = []
if not isdefined(list_value_range):
list_value_range = [np.amin(coclass_mat), np.amax(coclass_mat)]
path, fname, ext = split_f(coclass_matrix_file)
plot_coclass_matrix_file = os.path.abspath('heatmap_' + fname + '.eps')
plot_ranged_cormat(plot_coclass_matrix_file, coclass_mat,
labels, fix_full_range=list_value_range)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
path, fname, ext = split_f(self.inputs.coclass_matrix_file)
outputs["plot_coclass_matrix_file"] = os.path.abspath(
'heatmap_' + fname + '.eps')
return outputs
| |
from nose.tools import * # flake8: noqa
from api.base.settings.defaults import API_BASE
from tests.base import ApiTestCase
from osf.models import Subject
from osf_tests.factories import SubjectFactory, PreprintProviderFactory
class TestPreprintProviderSubjects(ApiTestCase):
def create_subject_rules(self):
'''
Subject Hierarchy
+-----------------------------+
| |
| +-------->B+----->F |
| | |
| A+----------->C |
| | |
| +-------->D+----->G |
| |
| H+------>I+----->J |
| | |
| +----->K |
| |
| L+------>M+----->N |
| | |
| +------->E |
| |
| O |
+-----------------------------+
'''
self.subA = SubjectFactory(text='A')
self.subB = SubjectFactory(text='B', parent=self.subA)
self.subC = SubjectFactory(text='C', parent=self.subA)
self.subD = SubjectFactory(text='D', parent=self.subA)
self.subF = SubjectFactory(text='F', parent=self.subB)
self.subG = SubjectFactory(text='G', parent=self.subD)
self.subH = SubjectFactory(text='H')
self.subI = SubjectFactory(text='I', parent=self.subH)
self.subJ = SubjectFactory(text='J', parent=self.subI)
self.subK = SubjectFactory(text='K', parent=self.subI)
self.subL = SubjectFactory(text='L')
self.subM = SubjectFactory(text='M', parent=self.subL)
self.subE = SubjectFactory(text='E', parent=self.subM)
self.subN = SubjectFactory(text='N', parent=self.subM)
self.subO = SubjectFactory(text='O')
rules = [
([self.subA._id, self.subB._id], False),
([self.subA._id, self.subD._id], True),
([self.subH._id, self.subI._id, self.subJ._id], True),
([self.subL._id], True)
]
# This should allow: A, B, D, G, H, I, J, L, M, N and E
# This should not allow: C, F, K, O
return rules
def setUp(self):
super(TestPreprintProviderSubjects, self).setUp()
self.lawless_preprint_provider = PreprintProviderFactory()
self.ruled_preprint_provider = PreprintProviderFactory()
self.ruled_preprint_provider.subjects_acceptable = self.create_subject_rules()
self.ruled_preprint_provider.save()
self.lawless_url = '/{}preprint_providers/{}/taxonomies/?page[size]=15&'.format(API_BASE, self.lawless_preprint_provider._id)
self.ruled_url = '/{}preprint_providers/{}/taxonomies/?page[size]=15&'.format(API_BASE, self.ruled_preprint_provider._id)
def test_no_rules_grabs_all(self):
res = self.app.get(self.lawless_url)
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 15)
def test_rules_only_grab_acceptable_subjects(self):
res = self.app.get(self.ruled_url)
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 11)
def test_no_rules_with_null_parent_filter(self):
res = self.app.get(self.lawless_url + 'filter[parents]=null')
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 4)
def test_rules_enforced_with_null_parent_filter(self):
res = self.app.get(self.ruled_url + 'filter[parents]=null')
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 3)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_in('A', texts)
assert_in('H', texts)
assert_in('L', texts)
assert_not_in('O', texts)
def test_no_rules_with_parents_filter(self):
res = self.app.get(self.lawless_url + 'filter[parents]={}'.format(self.subB._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 1)
assert_equal(res.json['data'][0]['attributes']['text'], 'F')
res = self.app.get(self.lawless_url + 'filter[parents]={}'.format(self.subI._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 2)
res = self.app.get(self.lawless_url + 'filter[parents]={}'.format(self.subM._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 2)
def test_rules_enforced_with_parents_filter(self):
res = self.app.get(self.ruled_url + 'filter[parents]={}'.format(self.subB._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 0)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_not_in('F', texts)
res = self.app.get(self.ruled_url + 'filter[parents]={}'.format(self.subI._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 1)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_in('J', texts)
assert_not_in('K', texts)
res = self.app.get(self.ruled_url + 'filter[parents]={}'.format(self.subM._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 2)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_in('N', texts)
assert_in('E', texts)
def test_no_rules_with_parent_filter(self):
res = self.app.get(self.lawless_url + 'filter[parent]={}'.format(self.subB._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 1)
assert_equal(res.json['data'][0]['attributes']['text'], 'F')
res = self.app.get(self.lawless_url + 'filter[parent]={}'.format(self.subI._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 2)
res = self.app.get(self.lawless_url + 'filter[parent]={}'.format(self.subM._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 2)
def test_rules_enforced_with_parent_filter(self):
res = self.app.get(self.ruled_url + 'filter[parent]={}'.format(self.subB._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 0)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_not_in('F', texts)
res = self.app.get(self.ruled_url + 'filter[parent]={}'.format(self.subI._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 1)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_in('J', texts)
assert_not_in('K', texts)
res = self.app.get(self.ruled_url + 'filter[parent]={}'.format(self.subM._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 2)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_in('N', texts)
assert_in('E', texts)
def test_no_rules_with_grandparent_filter(self):
res = self.app.get(self.lawless_url + 'filter[parents]={}'.format(self.subA._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 3)
def test_rules_enforced_with_grandparent_filter(self):
res = self.app.get(self.ruled_url + 'filter[parents]={}'.format(self.subA._id))
assert_equal(res.status_code, 200)
assert_equal(res.json['links']['meta']['total'], 2)
texts = [item['attributes']['text'] for item in res.json['data']]
assert_in('B', texts)
assert_in('D', texts)
assert_not_in('C', texts)
class TestPreprintProviderSpecificSubjects(ApiTestCase):
def setUp(self):
super(TestPreprintProviderSpecificSubjects, self).setUp()
self.provider_1 = PreprintProviderFactory()
self.provider_2 = PreprintProviderFactory()
self.root_subject_1 = SubjectFactory(text='R1', provider=self.provider_1)
self.parent_subject_1 = SubjectFactory(text='P1', provider=self.provider_1, parent=self.root_subject_1)
self.child_subject_1 = SubjectFactory(text='C1', provider=self.provider_1, parent=self.parent_subject_1)
self.root_subject_2 = SubjectFactory(text='R2', provider=self.provider_2)
self.parent_subject_2 = SubjectFactory(text='P2', provider=self.provider_2, parent=self.root_subject_2)
self.child_subject_2 = SubjectFactory(text='C2', provider=self.provider_2, parent=self.parent_subject_2)
self.url_1 = '/{}preprint_providers/{}/taxonomies/?page[size]=15&'.format(API_BASE, self.provider_1._id)
self.url_2 = '/{}preprint_providers/{}/taxonomies/?page[size]=15&'.format(API_BASE, self.provider_2._id)
def test_mapped_subjects_are_not_shared_list(self):
res_1 = self.app.get(self.url_1)
res_2 = self.app.get(self.url_2)
assert_equal(res_1.status_code, 200)
assert_equal(res_2.status_code, 200)
assert_equal(res_1.json['links']['meta']['total'], 3)
assert_equal(res_2.json['links']['meta']['total'], 3)
assert_equal(len(set([d['attributes']['text'] for d in res_1.json['data']]) & set([d['attributes']['text'] for d in res_2.json['data']])), 0)
assert_equal(len(set([d['attributes']['text'] for d in res_1.json['data']]) | set([d['attributes']['text'] for d in res_2.json['data']])), 6)
def test_mapped_subjects_are_not_shared_filter(self):
res_1 = self.app.get(self.url_1 + 'filter[parent]={}'.format(self.root_subject_1._id))
res_2 = self.app.get(self.url_2 + 'filter[parent]={}'.format(self.root_subject_2._id))
assert_equal(res_1.status_code, 200)
assert_equal(res_2.status_code, 200)
assert_equal(res_1.json['links']['meta']['total'], 1)
assert_equal(res_2.json['links']['meta']['total'], 1)
assert_equal(len(set([d['attributes']['text'] for d in res_1.json['data']]) & set([d['attributes']['text'] for d in res_2.json['data']])), 0)
assert_equal(len(set([d['attributes']['text'] for d in res_1.json['data']]) | set([d['attributes']['text'] for d in res_2.json['data']])), 2)
def test_mapped_subjects_filter_wrong_provider(self):
res_1 = self.app.get(self.url_1 + 'filter[parent]={}'.format(self.root_subject_2))
res_2 = self.app.get(self.url_2 + 'filter[parent]={}'.format(self.root_subject_1))
assert_equal(res_1.status_code, 200)
assert_equal(res_2.status_code, 200)
assert_equal(res_1.json['links']['meta']['total'], 0)
assert_equal(res_2.json['links']['meta']['total'], 0)
class TestPreprintProviderHighlightedSubjects(ApiTestCase):
def setUp(self):
super(TestPreprintProviderHighlightedSubjects, self).setUp()
self.provider = PreprintProviderFactory()
self.subj_a = SubjectFactory(provider=self.provider, text='A')
self.subj_aa = SubjectFactory(provider=self.provider, text='AA', parent=self.subj_a, highlighted=True)
self.url = '/{}preprint_providers/{}/taxonomies/highlighted/'.format(API_BASE, self.provider._id)
def test_mapped_subjects_filter_wrong_provider(self):
res = self.app.get(self.url)
assert res.status_code == 200
assert len(res.json['data']) == 1
assert res.json['data'][0]['id'] == self.subj_aa._id
class TestCustomTaxonomy(ApiTestCase):
def setUp(self):
super(TestCustomTaxonomy, self).setUp()
self.osf_provider = PreprintProviderFactory(_id='osf', share_title='bepress')
self.asdf_provider = PreprintProviderFactory(_id='asdf', share_title='ASDF')
bepress_subj = SubjectFactory(text='BePress Text', provider=self.osf_provider)
other_subj = SubjectFactory(text='Other Text', bepress_subject=bepress_subj, provider=self.asdf_provider)
self.url = '/{}preprint_providers/{}/taxonomies/'
def test_taxonomy_share_title(self):
bepress_res = self.app.get(self.url.format(API_BASE, self.osf_provider._id))
asdf_res = self.app.get(self.url.format(API_BASE, self.asdf_provider._id))
assert len(bepress_res.json['data']) == len(asdf_res.json['data']) == 1
assert bepress_res.json['data'][0]['attributes']['share_title'] == self.osf_provider.share_title
assert asdf_res.json['data'][0]['attributes']['share_title'] == self.asdf_provider.share_title
| |
#!/usr/bin/python
#SBATCH --job-name=mmp_tel
#SBATCH --output=../log/%j.txt
#SBATCH --error=../log/%j.out
#SBATCH --partition=compute
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=4
#SBATCH --nodes=1
#SBATCH --mem=16384
#SBATCH --mail-user=dec@u.northwestern.edu
#SBATCH --workdir=/lscr2/andersenlab/dec211/mmp_telseq/sra
import os, sys
import glob
import re
import subprocess
from subprocess import PIPE, Popen
from datetime import datetime
def file_exists(filename):
if os.path.isfile(filename) and os.path.getsize(filename) > 0:
return True
else:
return False
class EAV:
"""
Very simple Entity-Attribute-Value Object
"""
def __init__(self):
self.entity = ""
self.sub_entity = ""
self.attribute = ""
self.sub_attribute = ""
self.value = ""
self.timestamp = datetime.now()
self.comment = ""
self.file = None
def __repr__(self):
return "\nEntity:{self.entity}\n\
Entity:{self.sub_entity}\n\
Attribute:{self.attribute}\n\
Sub-Attribute:{self.sub_attribute}\n\
Value:{self.value}\n\
timestamp:{self.timestamp}\n".format(**locals())
def save(self):
if self.file is None:
raise Exception("No Log File Set")
if not file_exists(self.file):
write_header = True
else:
write_header = False
with(open(self.file, "a")) as f:
if write_header is True:
f.write("entity\tsub_entity\tattribute\tsub_attribute\tvalue\tcomment\ttimestamp\n")
line = '\t'.join(map(str,[self.entity,
self.sub_entity,
self.attribute,
self.sub_attribute,
self.value,
self.comment,
self.timestamp]))
f.write(line + "\n")
def get_contigs(bam):
header, err = subprocess.Popen(["samtools","view","-H",bam], stdout=PIPE, stderr=PIPE).communicate()
if err != "":
raise Exception(err)
# Extract contigs from header and convert contigs to integers
contigs = {}
for x in re.findall("@SQ\WSN:(?P<chrom>[A-Za-z0-9_]*)\WLN:(?P<length>[0-9]+)", header):
contigs[x[0]] = int(x[1])
return contigs
def coverage(bam, mtchr = None):
# Check to see if file exists
if os.path.isfile(bam) == False:
raise Exception("Bam file does not exist")
contigs = get_contigs(bam)
# Guess mitochondrial chromosome
mtchr = [x for x in contigs if x.lower().find("m") == 0]
if len(mtchr) != 1:
mtchr = None
else:
mtchr = mtchr[0]
coverage_dict = {}
for c in contigs.keys():
command = "samtools depth -r %s %s | awk '{sum+=$3;cnt++}END{print cnt \"\t\" sum}'" % (c, bam)
coverage_dict[c] = {}
coverage_dict[c]["Bases Mapped"], coverage_dict[c]["Sum of Depths"] = map(int,subprocess.Popen(command, stdout=PIPE, shell = True).communicate()[0].strip().split("\t"))
coverage_dict[c]["Breadth of Coverage"] = coverage_dict[c]["Bases Mapped"] / float(contigs[c])
coverage_dict[c]["Depth of Coverage"] = coverage_dict[c]["Sum of Depths"] / float(contigs[c])
coverage_dict[c]["Length"] = int(contigs[c])
# Calculate Genome Wide Breadth of Coverage and Depth of Coverage
genome_length = float(sum(contigs.values()))
coverage_dict["genome"] = {}
coverage_dict["genome"]["Length"] = int(genome_length)
coverage_dict["genome"]["Bases Mapped"] = sum([x["Bases Mapped"] for k, x in coverage_dict.iteritems() if k != "genome"])
coverage_dict["genome"]["Sum of Depths"] = sum([x["Sum of Depths"] for k, x in coverage_dict.iteritems() if k != "genome"])
coverage_dict["genome"]["Breadth of Coverage"] = sum([x["Bases Mapped"] for k, x in coverage_dict.iteritems() if k != "genome"]) / float(genome_length)
coverage_dict["genome"]["Depth of Coverage"] = sum([x["Sum of Depths"] for k, x in coverage_dict.iteritems() if k != "genome"]) / float(genome_length)
if mtchr != None:
# Calculate nuclear breadth of coverage and depth of coverage
ignore_contigs = [mtchr, "genome", "nuclear"]
coverage_dict["nuclear"] = {}
coverage_dict["nuclear"]["Length"] = sum([x["Length"] for k,x in coverage_dict.iteritems() if k not in ignore_contigs ])
coverage_dict["nuclear"]["Bases Mapped"] = sum([x["Bases Mapped"] for k, x in coverage_dict.iteritems() if k not in ignore_contigs])
coverage_dict["nuclear"]["Sum of Depths"] = sum([x["Sum of Depths"] for k, x in coverage_dict.iteritems() if k not in ignore_contigs])
coverage_dict["nuclear"]["Breadth of Coverage"] = sum([x["Bases Mapped"] for k, x in coverage_dict.iteritems() if k not in ignore_contigs]) / float(coverage_dict["nuclear"]["Length"])
coverage_dict["nuclear"]["Depth of Coverage"] = sum([x["Sum of Depths"] for k, x in coverage_dict.iteritems() if k not in ignore_contigs]) / float(coverage_dict["nuclear"]["Length"])
# Calculate the ratio of mtDNA depth to nuclear depth
coverage_dict["genome"]["mt_ratio"] = coverage_dict[mtchr]["Depth of Coverage"] / float(coverage_dict["nuclear"]["Depth of Coverage"])
# Flatten Dictionary
coverage = []
for k,v in coverage_dict.items():
for x in v.items():
coverage += [(k,x[0], x[1])]
return coverage
line_num = int(sys.argv[1]) - 1
f=open('../strain_info.txt')
lines = f.readlines()
lines = [x.strip().split("\t") for x in lines]
line = lines[line_num]
strain_name = line[0].split(" ")[2]
strain_bp = line[0].split(" ")[4]
reference = "/lscr2/andersenlab/dec211/pyPipeline/genomes/WS245/c_elegans.PRJNA13758.WS245.genomic.fa.gz"
# Download sra files
"""for line in lines:
for i in line[1:]:
strain = line[0].split(" ")[2]
length = line[0].split(" ")[4]
if len(glob.glob("../telseq/{strain}.{length}*".format(**locals()))) == 0:
print strain, length
i06 = i[0:6]
i09 = i[0:9]
loc_string = "ftp://ftp-trace.ncbi.nih.gov/sra/sra-instant/reads/ByRun/sra/SRR/{i06}/{i09}/{i}.sra"
loc_string = loc_string.format(**locals())
print "downloading " + loc_string.format(**locals())
print "curl {loc_string}".format(**locals())
os.system("curl {loc_string} > {i}.sra".format(**locals()))
"""
# Process SRA Files
for i in line[1:]:
os.system("fastq-dump --split-files --gzip {i}.sra ".format(i=i))
#os.system("rm {i}".format(**locals()))
# Generate read group
RG = r'@RG\tID:{i}\tSM:{strain_name}'.format(**locals())
# Align
os.system(r"bwa mem -R '{RG}' -t 4 {reference} {i}_1.fastq.gz {i}_2.fastq.gz > ../bam/{i}.tmp.bam".format(i=i.replace(".sra",""), RG=RG, reference=reference))
# Sort
os.system("samtools sort -O bam -T ../bam/{i}.TEMP.bam -@ 4 ../bam/{i}.tmp.bam > ../bam/{i}.sorted.bam && samtools index ../bam/{i}.sorted.bam".format(**locals()))
# Remove temporary BAM and fastq
os.system("rm {i}_1.fastq.gz && rm {i}_2.fastq.gz".format(i=i))
os.system("rm ../bam/{i}.tmp.bam".format(i=i))
# Combine processed BAM Files.
SRA_files = ' '.join(["../bam/" + x.replace(".sra","") + ".sorted.bam" for x in line[1:]])
if len(["../bam/" + x.replace(".sra","") + ".sorted.bam" for x in line[1:]]) > 1:
merge_command = "samtools merge -f -@ 4 ../bam/{strain_name}.{strain_bp}.bam {SRA_files} && samtools index ../bam/{strain_name}.{strain_bp}.bam".format(**locals())
os.system(merge_command)
else:
os.system("mv {SRA_files} ../bam/{strain_name}.{strain_bp}.bam".format(**locals()))
os.system("samtools index ../bam/{strain_name}.{strain_bp}.bam".format(**locals()))
for i in line[1:]:
os.system("rm ../bam/{i}.sorted.bam && rm ../bam/{i}.sorted.bam.bai".format(i=i))
# Produce Coverage Statistics Here
bam = "../bam/{strain_name}.{strain_bp}.bam".format(**locals())
eav = EAV()
eav.file = "../eav.txt"
eav.entity = strain_name
eav.sub_entity = strain_bp
for contig, k,v in coverage(bam, "MtDNA"):
eav.sub_attribute = contig + " (" + k + ")"
eav.value = v
eav.save()
# Run Telseq Here
#telseq -z 'AATCCG' -u $file.bam -o $file.telseq_elegans.AATCCG.noreadgroup.txt
os.system("telseq -m -z 'TTAGGC' -u ../bam/{strain_name}.{strain_bp}.bam -o ../telseq/{strain_name}.{strain_bp}.telseq_elegans.TTAGGC.noreadgroup.txt".format(**locals()))
os.system("telseq -m -z 'GTATGC' -u ../bam/{strain_name}.{strain_bp}.bam -o ../telseq/{strain_name}.{strain_bp}.telseq_elegans.GTATGC.noreadgroup.txt".format(**locals()))
#telseq -z 'GTCTAG' -u $file.bam -o $file.telseq_elegans.GTCTAG.noreadgroup.txt
# Delete sra file
for i in line[1:]:
#os.system("rm {i}.sra ".format(i=i))
pass
# Delete bam file
os.system("rm ../bam/{strain_name}.{strain_bp}.bam".format(**locals()))
| |
# Copyright (c) 2016, Meteotest
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Meteotest nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Wrapper around h5py that synchronizes reading and writing of hdf5 files
(parallel reading is possible, writing is serialized)
!!! IMPORTANT !!!
Note that the locks used are not recursive/reentrant. Therefore, a synchronized
method (decorated by @reader or @writer) must *not* call other synchronized
methods, otherwise we get a deadlock!
"""
import os
import h5py
from .sync import reader, writer
from hurray.server.log import app_log
# TODO Note that self.file must never be (accidentally) modified because the
# whole @reader/@writer synchronization relies on it!
class Node(object):
"""
Wrapper for h5py.Node
"""
def __init__(self, file, path):
"""
Args:
file: full path to hdf5 file
path: full path to the hdf5 node (not to be confused with path of
the file)
"""
self.file = file
self._path = path
self.attrs = AttributeManager(self.file, self._path)
@reader
def __getitem__(self, key):
"""
Raises:
KeyError if object does not exist.
"""
# sometimes the underlying hdf5 C library writes errors to stdout,
# e.g., if a path is not found in a file.
# cf. http://stackoverflow.com/questions/15117128/
# h5py-in-memory-file-and-multiprocessing-error
h5py._errors.silence_errors()
if key.startswith('/'): # absolute path
path = key
else: # relative path
path = os.path.join(self.path, key)
with h5py.File(self.file, 'r') as f:
node = f[path]
return self._wrap_class(node)
@property
def path(self):
"""
wrapper
"""
return self._path
@property
def name(self):
"""
Name of the group or dataset
"""
name = os.path.split(self._path)[1]
# set root group's name to "/"
name = "/" if name == "" else name
return name
def _wrap_class(self, node):
"""
Wraps h5py objects into h5pyswmr objects.
Args:
node: instance of h5py.Group or h5py.Dataset
Returns:
Corresponding object as a h5pyswmr object
Raises:
TypeError if ``obj`` is of unknown type
"""
if isinstance(node, h5py.File):
return File(name=self.file, mode="r")
if isinstance(node, h5py.Group):
return Group(file=self.file, path=node.name)
elif isinstance(node, h5py.Dataset):
return Dataset(file=self.file, path=node.name)
else:
raise TypeError('unknown h5py node object')
class Group(Node):
"""
Wrapper for h5py.Group
"""
def __init__(self, file, path):
Node.__init__(self, file, path)
def __repr__(self):
return "<HDF5 Group (path={0})>".format(self.path)
@writer
def create_group(self, name):
"""
Wrapper around ``h5py.Group.create_group()``
"""
with h5py.File(self.file, 'r+') as f:
group = f[self.path]
created_group = group.create_group(name)
path = created_group.name
return Group(self.file, path=path)
@writer
def require_group(self, name):
"""
Wrapper around ``h5py.Group.require_group()``
"""
with h5py.File(self.file, 'r+') as f:
group = f[self.path]
created_group = group.require_group(name)
path = created_group.name
return Group(self.file, path=path)
@writer
def create_dataset(self, **kwargs):
"""
Wrapper around ``h5py.Group.create_dataset()``
"""
overwrite = kwargs.get('overwrite', False)
name = kwargs['name']
# remove additional arguments because they are not supported by h5py
try:
del kwargs['overwrite']
except Exception:
pass
with h5py.File(self.file, 'r+') as f:
group = f[self.path]
if overwrite and name in group:
del group[name]
dst = group.create_dataset(**kwargs)
path = dst.name
return Dataset(self.file, path=path)
@writer
def require_dataset(self, **kwargs):
"""
Wrapper around ``h5py.Group.require_dataset()``
"""
with h5py.File(self.file, 'r+') as f:
group = f[self.path]
dst = group.require_dataset(**kwargs)
path = dst.name
return Dataset(self.file, path=path)
@reader
def keys(self):
with h5py.File(self.file, 'r') as f:
# w/o list() it does not work with py3 (returns a view on a closed
# hdf5 file)
keys = list(f[self.path].keys())
return keys
# TODO visit() and visititems() do not yet work because @reader methods
# are not reentrant! => just wrap code into an inner function!
# @reader
# def visit(self, func):
# """
# Wrapper around h5py.Group.vist()
# Args:
# func: a unary function
# """
# with h5py.File(self.file, 'r') as f:
# return f[self.path].visit(func)
# @reader
# def visititems(self, func):
# """
# Wrapper around h5py.Group.visititems()
# Args:
# func: a 2-ary function
# """
# with h5py.File(self.file, 'r') as f:
# grp = f[self.path]
# def proxy(name):
# obj = self._wrap_class(grp[name])
# return func(name, obj)
# return self.visit(proxy)
@reader
def tree(self):
"""
Return tree data structure consisting of all groups and datasets.
A tree node is defined recursively as a tuple:
[Dataset/Group, [children]]
Returns: list
"""
def buildtree(treenode):
h5py_obj, children = treenode
assert (children == [])
if isinstance(h5py_obj, h5py.Dataset):
# wrap h5py dataset object
treenode[0] = self._wrap_class(h5py_obj)
return
else:
# wrap h5py group object
treenode[0] = self._wrap_class(h5py_obj)
for name, childobj in h5py_obj.items():
newnode = [childobj, []]
children.append(newnode)
buildtree(newnode)
tree = None
with h5py.File(self.file, 'r') as f:
root = f[self.path]
tree = [root, []] # [h5py object, children]
buildtree(tree)
return tree
@reader
def items(self):
"""
Returns a list of (name, object) pairs for objects directly
attached to this group. Values for broken soft or external links
show up as None.
Note that this differs from h5py, where a list (Py2) or a
"set-like object" (Py3) is returned.
"""
result = []
with h5py.File(self.file, 'r') as f:
for name, obj in f[self.path].items():
result.append((name, self._wrap_class(obj)))
return result
@reader
def __contains__(self, key):
with h5py.File(self.file, 'r') as f:
group = f[self.path]
return key in group
@writer
def __delitem__(self, key):
with h5py.File(self.file, 'r+') as f:
group = f[self.path]
del group[key]
class File(Group):
"""
Wrapper for h5py.File
"""
def __init__(self, name, mode="r", *args, **kwargs):
"""
try to open/create an h5py.File object
"""
app_log.debug("XXX {},{} ...".format(name, mode))
# call h5py.File() in case file needs to be created
if mode in ("w", "w-", "x", "a"):
self.file = name # this is crucial for the @writer annotation
@writer
def init(self):
with h5py.File(name=name, mode=mode, *args, **kwargs):
pass
init(self)
Group.__init__(self, name, '/')
def __enter__(self):
"""
simple context manager (so we can use 'with File() as f')
"""
# TODO can be removed
return self
def __exit__(self, type, value, tb):
pass
def __repr__(self):
return "<HDF5 File ({0})>".format(self.file)
@property
@reader
def filesize(self):
"""
Return size of the file in bytes
"""
stat = os.stat(self.file)
return stat.st_size
@writer
def rename(self, new):
"""
Rename hdf5 file
Args:
new: new filename
Raises:
FileExistsError if file ``new`` already exists
"""
if os.path.isfile(new):
raise FileExistsError("file {} exists".format(new))
os.rename(self.file, new)
@writer
def delete(self):
"""
Remove hdf5 file
"""
os.remove(self.file)
class Dataset(Node):
"""
Wrapper for h5py.Dataset
"""
def __init__(self, file, path):
Node.__init__(self, file, path)
@reader
def __getitem__(self, slice):
"""
implement multidimensional slicing for datasets
"""
with h5py.File(self.file, 'r') as f:
return f[self.path][slice]
@writer
def __setitem__(self, slice, value):
"""
Broadcasting for datasets. Example: mydataset[0,:] = np.arange(100)
"""
with h5py.File(self.file, 'r+') as f:
f[self.path][slice] = value
@writer
def resize(self, size, axis=None):
with h5py.File(self.file, 'r+') as f:
f[self.path].resize(size, axis)
@property
@reader
def shape(self):
with h5py.File(self.file, 'r') as f:
return f[self.path].shape
@property
@reader
def dtype(self):
with h5py.File(self.file, 'r') as f:
return f[self.path].dtype
class AttributeManager(object):
"""
Provides same features as AttributeManager from h5py.
"""
def __init__(self, h5file, path):
"""
Args:
h5file: file name of hdf5 file
path: full path to hdf5 node
"""
self.file = h5file
self.path = path
@reader
def __iter__(self):
# In order to be compatible with h5py, we return a generator.
# However, to preserve thread-safety, we must make sure that the hdf5
# file is closed while the generator is being traversed.
with h5py.File(self.file, 'r') as f:
node = f[self.path]
keys = [key for key in node.attrs]
return (key for key in keys)
@reader
def keys(self):
"""
Returns attribute keys (list)
"""
with h5py.File(self.file, 'r') as f:
node = f[self.path]
return list(node.attrs.keys())
@reader
def __contains__(self, key):
with h5py.File(self.file, 'r') as f:
node = f[self.path]
return key in node.attrs
@reader
def __getitem__(self, key):
with h5py.File(self.file, 'r') as f:
node = f[self.path]
return node.attrs[key]
@writer
def __setitem__(self, key, value):
with h5py.File(self.file, 'r+') as f:
node = f[self.path]
node.attrs[key] = value
@writer
def __delitem__(self, key):
with h5py.File(self.file, 'r+') as f:
node = f[self.path]
del node.attrs[key]
@reader
def get(self, key, defaultvalue):
"""
Return attribute value or return a default value if key is missing.
Args:
key: attribute key
defaultvalue: default value to be returned if key is missing
"""
with h5py.File(self.file, 'r') as f:
node = f[self.path]
return node.get(key, defaultvalue)
| |
import os
import random
import re
from collections import OrderedDict
from dataclasses import dataclass
from typing import Any, Dict, Optional
from django.conf import settings
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
from django.template import loader
from django.views.generic import TemplateView
from zerver.context_processors import zulip_default_context
from zerver.decorator import add_google_analytics_context
from zerver.lib.integrations import CATEGORIES, INTEGRATIONS, HubotIntegration, WebhookIntegration
from zerver.lib.request import REQ, get_request_notes, has_request_variables
from zerver.lib.subdomains import get_subdomain
from zerver.lib.templates import render_markdown_path
from zerver.models import Realm
from zerver.openapi.openapi import get_endpoint_from_operationid, get_openapi_summary
@dataclass
class DocumentationArticle:
article_path: str
article_http_status: int
endpoint_path: Optional[str]
endpoint_method: Optional[str]
def add_api_uri_context(context: Dict[str, Any], request: HttpRequest) -> None:
context.update(zulip_default_context(request))
subdomain = get_subdomain(request)
if subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN or not settings.ROOT_DOMAIN_LANDING_PAGE:
display_subdomain = subdomain
html_settings_links = True
else:
display_subdomain = "yourZulipDomain"
html_settings_links = False
display_host = Realm.host_for_subdomain(display_subdomain)
api_url_scheme_relative = display_host + "/api"
api_url = settings.EXTERNAL_URI_SCHEME + api_url_scheme_relative
zulip_url = settings.EXTERNAL_URI_SCHEME + display_host
context["external_uri_scheme"] = settings.EXTERNAL_URI_SCHEME
context["api_url"] = api_url
context["api_url_scheme_relative"] = api_url_scheme_relative
context["zulip_url"] = zulip_url
context["html_settings_links"] = html_settings_links
if html_settings_links:
settings_html = '<a href="/#settings">Zulip settings page</a>'
subscriptions_html = '<a target="_blank" href="/#streams">streams page</a>'
else:
settings_html = "Zulip settings page"
subscriptions_html = "streams page"
context["settings_html"] = settings_html
context["subscriptions_html"] = subscriptions_html
class ApiURLView(TemplateView):
def get_context_data(self, **kwargs: Any) -> Dict[str, str]:
context = super().get_context_data(**kwargs)
add_api_uri_context(context, self.request)
return context
class MarkdownDirectoryView(ApiURLView):
path_template = ""
def get_path(self, article: str) -> DocumentationArticle:
http_status = 200
if article == "":
article = "index"
elif article == "include/sidebar_index":
pass
elif "/" in article:
article = "missing"
http_status = 404
elif len(article) > 100 or not re.match("^[0-9a-zA-Z_-]+$", article):
article = "missing"
http_status = 404
path = self.path_template % (article,)
endpoint_name = None
endpoint_method = None
# The following is a somewhat hacky approach to extract titles from articles.
# Hack: `context["article"] has a leading `/`, so we use + to add directories.
article_path = os.path.join(settings.DEPLOY_ROOT, "templates") + path
if (not os.path.exists(article_path)) and self.path_template == "/zerver/api/%s.md":
try:
endpoint_name, endpoint_method = get_endpoint_from_operationid(article)
path = "/zerver/api/api-doc-template.md"
except AssertionError:
return DocumentationArticle(
article_path=self.path_template % ("missing",),
article_http_status=404,
endpoint_path=None,
endpoint_method=None,
)
try:
loader.get_template(path)
return DocumentationArticle(
article_path=path,
article_http_status=http_status,
endpoint_path=endpoint_name,
endpoint_method=endpoint_method,
)
except loader.TemplateDoesNotExist:
return DocumentationArticle(
article_path=self.path_template % ("missing",),
article_http_status=404,
endpoint_path=None,
endpoint_method=None,
)
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
article = kwargs["article"]
context: Dict[str, Any] = super().get_context_data()
documentation_article = self.get_path(article)
context["article"] = documentation_article.article_path
# For disabling the "Back to home" on the homepage
context["not_index_page"] = not context["article"].endswith("/index.md")
if self.path_template == "/zerver/help/%s.md":
context["page_is_help_center"] = True
context["doc_root"] = "/help/"
context["doc_root_title"] = "Help center"
sidebar_article = self.get_path("include/sidebar_index")
sidebar_index = sidebar_article.article_path
title_base = "Zulip Help Center"
else:
context["page_is_api_center"] = True
context["doc_root"] = "/api/"
context["doc_root_title"] = "API documentation"
sidebar_article = self.get_path("sidebar_index")
sidebar_index = sidebar_article.article_path
title_base = "Zulip API documentation"
# The following is a somewhat hacky approach to extract titles from articles.
# Hack: `context["article"] has a leading `/`, so we use + to add directories.
article_path = os.path.join(settings.DEPLOY_ROOT, "templates") + context["article"]
if os.path.exists(article_path):
with open(article_path) as article_file:
first_line = article_file.readlines()[0]
# Strip the header and then use the first line to get the article title
if context["article"] == "/zerver/api/api-doc-template.md":
endpoint_name, endpoint_method = (
documentation_article.endpoint_path,
documentation_article.endpoint_method,
)
assert endpoint_name is not None
assert endpoint_method is not None
article_title = get_openapi_summary(endpoint_name, endpoint_method)
elif self.path_template == "/zerver/api/%s.md" and "{generate_api_title(" in first_line:
api_operation = context["OPEN_GRAPH_URL"].split("/api/")[1]
endpoint_name, endpoint_method = get_endpoint_from_operationid(api_operation)
article_title = get_openapi_summary(endpoint_name, endpoint_method)
else:
article_title = first_line.lstrip("#").strip()
endpoint_name = endpoint_method = None
if context["not_index_page"]:
context["OPEN_GRAPH_TITLE"] = f"{article_title} ({title_base})"
else:
context["OPEN_GRAPH_TITLE"] = title_base
request_notes = get_request_notes(self.request)
request_notes.placeholder_open_graph_description = (
f"REPLACMENT_OPEN_GRAPH_DESCRIPTION_{int(2**24 * random.random())}"
)
context["OPEN_GRAPH_DESCRIPTION"] = request_notes.placeholder_open_graph_description
context["sidebar_index"] = sidebar_index
# An "article" might require the api_uri_context to be rendered
api_uri_context: Dict[str, Any] = {}
add_api_uri_context(api_uri_context, self.request)
api_uri_context["run_content_validators"] = True
context["api_uri_context"] = api_uri_context
if endpoint_name and endpoint_method:
context["api_uri_context"]["API_ENDPOINT_NAME"] = endpoint_name + ":" + endpoint_method
add_google_analytics_context(context)
return context
def get(self, request: HttpRequest, article: str = "") -> HttpResponse:
documentation_article = self.get_path(article)
http_status = documentation_article.article_http_status
result = super().get(self, article=article)
if http_status != 200:
result.status_code = http_status
return result
def add_integrations_context(context: Dict[str, Any]) -> None:
alphabetical_sorted_categories = OrderedDict(sorted(CATEGORIES.items()))
alphabetical_sorted_integration = OrderedDict(sorted(INTEGRATIONS.items()))
enabled_integrations_count = len(list(filter(lambda v: v.is_enabled(), INTEGRATIONS.values())))
# Subtract 1 so saying "Over X integrations" is correct. Then,
# round down to the nearest multiple of 10.
integrations_count_display = ((enabled_integrations_count - 1) // 10) * 10
context["categories_dict"] = alphabetical_sorted_categories
context["integrations_dict"] = alphabetical_sorted_integration
context["integrations_count_display"] = integrations_count_display
def add_integrations_open_graph_context(context: Dict[str, Any], request: HttpRequest) -> None:
path_name = request.path.rstrip("/").split("/")[-1]
description = (
"Zulip comes with over a hundred native integrations out of the box, "
"and integrates with Zapier and IFTTT to provide hundreds more. "
"Connect the apps you use every day to Zulip."
)
if path_name in INTEGRATIONS:
integration = INTEGRATIONS[path_name]
context["OPEN_GRAPH_TITLE"] = f"Connect {integration.display_name} to Zulip"
context["OPEN_GRAPH_DESCRIPTION"] = description
elif path_name in CATEGORIES:
category = CATEGORIES[path_name]
context["OPEN_GRAPH_TITLE"] = f"Connect your {category} tools to Zulip"
context["OPEN_GRAPH_DESCRIPTION"] = description
elif path_name == "integrations":
context["OPEN_GRAPH_TITLE"] = "Connect the tools you use to Zulip"
context["OPEN_GRAPH_DESCRIPTION"] = description
class IntegrationView(ApiURLView):
template_name = "zerver/integrations/index.html"
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
context: Dict[str, Any] = super().get_context_data(**kwargs)
add_integrations_context(context)
add_integrations_open_graph_context(context, self.request)
add_google_analytics_context(context)
return context
@has_request_variables
def integration_doc(request: HttpRequest, integration_name: str = REQ()) -> HttpResponse:
if not request.is_ajax():
return HttpResponseNotFound()
try:
integration = INTEGRATIONS[integration_name]
except KeyError:
return HttpResponseNotFound()
context: Dict[str, Any] = {}
add_api_uri_context(context, request)
context["integration_name"] = integration.name
context["integration_display_name"] = integration.display_name
context["recommended_stream_name"] = integration.stream_name
if isinstance(integration, WebhookIntegration):
context["integration_url"] = integration.url[3:]
if isinstance(integration, HubotIntegration):
context["hubot_docs_url"] = integration.hubot_docs_url
doc_html_str = render_markdown_path(integration.doc, context)
return HttpResponse(doc_html_str)
| |
#!/usr/bin/python3
import argparse
import json
import os
import subprocess
import sys
from copy import deepcopy
from joblib import Parallel, delayed
rtt_prefix = 'ctr_seed_1fe40505e131963c_'
# def confs
config_base = {
'notes': 'generated by generator.py',
'seed': '1fe40505e131963c',
'tv-size': None,
'tv-count': None
}
counter_stream = {
'type': 'counter'
}
hw_stream = {
'type': 'hw_counter',
'hw': 4
}
random_stream = {
'type': 'pcg32_stream'
}
false_stream = {
'type': 'false_stream'
}
rnd_plt_ctx_stream = {
'type': 'rnd_plt_ctx_stream',
'source': None
}
sac_stream = {
'type': 'sac'
}
plaintext_target_stream = counter_stream
class FunArgs:
def __init__(self, block_size, key_size, iv_size=None, rounds=()):
self.block_size = block_size
self.key_size = key_size
self.iv_size = iv_size
self.rounds = rounds
# used funs in batch
stream_cipher_funs = { }
stream_cipher_default = {
'type': 'stream_cipher',
'generator': 'pcg32',
'algorithm': None,
'round': None,
'block_size': None,
'plaintext': plaintext_target_stream,
'key_size': None,
'key': random_stream,
'iv_size': None,
'iv': false_stream
}
hash_funs = {
'BLAKE': FunArgs(32, None, None, (0, 1, 2, 3)),
'Grostl': FunArgs(32, None, None, (1, 2, 3, 4)),
'JH': FunArgs(32, None, None, (5, 6, 7, 8, 9)),
'Keccak': FunArgs(32, None, None, (1, 2, 3, 4)),
'MD6': FunArgs(32, None, None, (5, 6, 7, 8, 9, 10)),
'Skein': FunArgs(32, None, None, (1, 2, 3, 4)),
'Gost': FunArgs(32, None, None, (1, 2, 3)),
'MD5': FunArgs(16, None, None, (6, 7, 8, 9, 10, 11)),
'RIPEMD160': FunArgs(20, None, None, (7, 8, 9, 10)),
'SHA1': FunArgs(20, None, None, (11, 12, 13, 14)),
'SHA2': FunArgs(32, None, None, (5, 6, 7)),
'Tiger': FunArgs(24, None, None, (1, 2)),
'Whirlpool': FunArgs(64, None, None, (2, 4)),
}
hash_default = {
'type': 'hash',
'generator': 'pcg32',
'algorithm': None,
'round': None,
'hash_size': None,
'input_size': None,
'source': plaintext_target_stream
}
block_funs = {
'AES': FunArgs(16, 16, None, (1, 2, 3, 4)),
'BLOWFISH': FunArgs(8, 32, None, (1, 2, 3, 4)),
'MARS': FunArgs(16, 16, None, (0, 1)),
'TWOFISH': FunArgs(16, 16, None, (1, 2, 3, 4)),
'SERPENT': FunArgs(16, 16, None, (1, 2, 3, 4)),
'RC6': FunArgs(16, 16, None, (2, 3, 4, 5)),
'SIMON': FunArgs(16, 16, None, (13, 14, 15, 16, 17)),
'SPECK': FunArgs(16, 16, None, (6, 7, 8, 9)),
'SINGLE-DES': FunArgs(8, 7, None, (3, 4, 5, 6)),
'TRIPLE-DES': FunArgs(8, 21, None, (1, 2, 3)),
'TEA': FunArgs(8, 16, None, (2, 3, 4, 5)),
'GOST': FunArgs(8, 32, None, (6, 7, 8, 9)),
'ARIA': FunArgs(16, 16, None, (1, 2, 3)),
'CAMELLIA': FunArgs(16, 16, None, (1, 2, 3, 4)),
'CAST': FunArgs(8, 16, None, (1, 2, 3, 4, 5)),
'IDEA': FunArgs(8, 16, None, (1, 2, 3)),
'SEED': FunArgs(16, 16, None, (1, 2, 3, 4)),
'KASUMI' : FunArgs(8, 16, None, (1, 2, 3, 4, 5)),
'MISTY1' : FunArgs(8, 16, None, (1, 2, 3)),
'KUZNYECHIK' : FunArgs(16, 32, None, (1, 2, 3)),
'NOEKEON' : FunArgs(16, 16, None, (1, 2, 3, 4)),
'SHACAL2' : FunArgs(32, 64, None, (2, 3, 4, 5, 6, 7)),
'XTEA' : FunArgs(8, 16, None, (1, 2, 3, 4, 5)),
}
block_default = {
'type': 'block',
'init_frequency': 'only_once',
'algorithm': None,
'round': None,
'block_size': 16,
'plaintext': plaintext_target_stream,
'key_size': 16,
'key': random_stream,
'iv_size': 16,
'iv': false_stream
}
def prepare_cfg(project, fun, rounds, tv_size, tv_num):
cfg_name = '{}_r{:02d}_b{}.json'.format(fun, rounds, tv_size)
bin_name = '{}_r{:02d}_b{}.bin'.format(fun, rounds, tv_size)
with open(cfg_name, 'w') as f:
current_cfg = deepcopy(config_base)
current_cfg['tv_size'] = tv_size
current_cfg['tv_count'] = tv_num
current_cfg['file_name'] = bin_name
if project == "stream_cipher":
stream = deepcopy(stream_cipher_default)
stream['algorithm'] = fun
stream['round'] = rounds
stream['block_size'] = stream_cipher_funs[fun].block_size
stream['key_size'] = stream_cipher_funs[fun].key_size
stream['iv_size'] = stream_cipher_funs[fun].iv_size
current_cfg['stream'] = stream
elif project == "hash":
stream = deepcopy(hash_default)
stream['algorithm'] = fun
stream['round'] = rounds
stream['hash_size'] = hash_funs[fun].block_size
stream['input_size'] = hash_funs[fun].block_size
current_cfg['stream'] = stream
elif project == "block":
stream = deepcopy(block_default)
stream['algorithm'] = fun
stream['round'] = rounds
stream['block_size'] = block_funs[fun].block_size
stream['key_size'] = block_funs[fun].key_size
current_cfg['stream'] = stream
else: # rnd
stream = deepcopy(random_stream)
stream['algorithm'] = fun
stream['round'] = 0
stream['block_size'] = 16
current_cfg['stream'] = stream
f.write(json.dumps(current_cfg))
f.close()
return cfg_name
def run_single(args):
project, fun, rounds, tv_size, data, num_tv, generator_binary = args
cfg_name = prepare_cfg(project, fun, rounds, tv_size, num_tv)
cmd = '{} -c={}'.format(generator_binary, cfg_name)
print("Executing: " + cmd)
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
process.wait()
binfile_name = cfg_name.split('.')[0] + '.bin'
cfg_size = ['1MB.json', '10MB.json', '100MB.json', '1000MB.json', 'default-8GB.json']
if 1000000 <= data < 10000000:
cfg = cfg_size[0]
elif 10000000 <= data < 100000000:
cfg = cfg_size[1]
elif 100000000 <= data < 1000000000:
cfg = cfg_size[2]
elif 1000000000 <= data < 8000000000:
cfg = cfg_size[3]
elif 8000000000 <= data:
cfg = cfg_size[4]
else:
exit("Too small data for testing.")
return None
cmd = 'submit_experiment --all_batteries -c {0} -f {1} -n {2}{1}'.format(cfg, binfile_name, rtt_prefix)
print("Executing: " + cmd)
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
process.wait()
cmd = 'rm {0}'.format(binfile_name)
print("Executing: " + cmd)
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
process.wait()
def single_setup_generator(generator_binary, data=None, num_tv=None):
def _yield_single_setup(funs, project):
for fun in funs:
args = funs[fun]
for rounds_count in args.rounds:
if data:
data_out = data
num_tv_out = data // args.block_size
else:
data_out = num_tv * args.block_size
num_tv_out = num_tv
yield [project, fun, rounds_count, args.block_size, data_out, num_tv_out, generator_binary]
yield from _yield_single_setup(stream_cipher_funs, 'stream_cipher')
yield from _yield_single_setup(hash_funs, 'hash')
yield from _yield_single_setup(block_funs, 'block')
def run_all(binary, data=None, num_tv=None):
Parallel(n_jobs=-1)(delayed(run_single)(single_setup)
for single_setup in single_setup_generator(binary, data, num_tv))
def get_tv_size(main_args):
if main_args.stream_type == "stream_cipher":
return stream_cipher_funs[main_args.fun].block_size
if main_args.stream_type == "hash":
return hash_funs[main_args.fun].block_size
if main_args.stream_type == "block":
return block[main_args.fun].block_size
return 16
def main_args_to_fnc(main_args):
project = main_args.stream_type
fun = main_args.fun
rounds = main_args.rounds
tv_size = get_tv_size(main_args)
tv_num = (main_args.num // tv_size) if main_args.data else main_args.num
prepare_cfg(project, fun, rounds, tv_size, tv_num)
def main():
parser = argparse.ArgumentParser()
single_execution_args(parser)
parser.add_argument(
'-a',
'--all',
action='store_true',
default=False,
help='Whether we should execute all experiments, or just single one'
)
parser.add_argument(
'-p',
'--path_to_generator_binary',
type=str,
default='./generator',
help='Path to the binary of generator (or newly called eacirc_streams binary)'
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
'-n',
'--num_tv',
action='store_true',
default=False,
help='Number of test vectors generated'
)
group.add_argument(
'-d',
'--data',
action='store_true',
default=False,
help='Number of generated bytes'
)
parser.add_argument(
'num',
metavar='N',
type=int,
default=1000000,
help='the number of TV or data')
main_args, _ = parser.parse_known_args()
if main_args.num_tv == main_args.data:
sys.exit('Choose EITHER --num_tv or --data')
if main_args.all:
print('Running all experiments')
data = main_args.num if main_args.data else None
num_tv = main_args.num if main_args.num_tv else None
run_all(main_args.path_to_generator_binary, data=data, num_tv=num_tv)
else:
single_execution_parse(main_args)
main_args_to_fnc(main_args)
os.system(main_args.path_to_generator_binary)
def single_execution_parse(main_args):
if main_args.stream_type == '':
if main_args.fun in stream_cipher_funs:
main_args.stream_type = 'stream_cipher'
elif main_args.fun in hash_funs:
main_args.stream_type = 'hash'
elif main_args.fun in block_funs:
main_args.stream_type = 'block'
else:
sys.exit('Unknown function and unspecified stream. Set -s! Function was: ' + main_args.fun)
else:
if main_args.fun in stream_cipher_funs and main_args.stream_type != 'stream-cipher':
sys.exit('Mismatch arguments: function '
+ main_args.fun
+ ' is from stream-cipher, your stream_type is '
+ main_args.stream_type)
elif main_args.fun in hash_funs and main_args.stream_type != 'hash':
sys.exit('Mismatch arguments: function '
+ main_args.fun
+ ' is from hash, your stream_type is '
+ main_args.stream_type)
elif main_args.fun in block_funs and main_args.stream_type != 'block':
sys.exit('Mismatch arguments: function '
+ main_args.fun
+ ' is from block, your stream_type is '
+ main_args.stream_type)
print('generator.py: preparing config for function '
+ main_args.fun
+ ' from '
+ main_args.stream_type
+ ' reduced to '
+ str(main_args.rounds)
+ ' rounds.')
def single_execution_args(parser):
parser.add_argument(
'-s',
'--stream_type',
type=str,
default='',
help='Stream: for AES, DES... = block, Salsa... = stream-cipher, Keccak... = hash'
)
parser.add_argument(
'-f',
'--fun',
type=str,
default='PRNG',
help='Function used for data generation'
)
parser.add_argument(
'-r',
'--rounds',
type=int,
default=1,
help='Function is reduced to --rounds'
)
if __name__ == '__main__':
main()
| |
from threading import Thread, Lock
from Queue import Queue
import traceback
import zookeeper
import json
import time
import sys
from .chroot import ChrootMirror
from .node import Node
from .js import JsNode
from .zk import ZooKeeperException
from .zk import NodeExistsException
from .zk import NoNodeException
from .zk import fix_path
from .zk import describe_state
from .zk import EXPIRED_SESSION_STATE
from .zk import CONNECTED_STATE
from .zk import CHANGED_EVENT
from .zk import CHILD_EVENT
from .zk import CREATED_EVENT
from .zk import DELETED_EVENT
from .zk import SESSION_EVENT
from .zk import NONODE
from .zk import ALL_ACL
from .zk import OK
from .zk import silence
DEBUG=False
def debug(*args):
global DEBUG
if not DEBUG:
return
sys.stderr.write(' '.join(map(str, args)) + "\n")
class Mirror(object):
def __init__(self):
silence()
self.__q = Queue()
self.__async = Thread(target=run_tasks, args=(self.__q,))
self.__async.daemon = True
self.__async.start()
self.__zk = -1
self.__state = 0
self.__nodes = {}
self.__nodelck = Lock()
self.__socklck = Lock()
self.__missing = set()
self.__misslck = Lock()
self.__disconnected = time.time()
self.__state_cbs = {}
# List of actions that failed while we were not connected
self.__pending = []
def connstr(self):
try:
return self.__initstr
except AttributeError:
return None
def connect(self, *servers):
if not servers:
servers = ('localhost',)
servers = list(servers)
for idx, val in enumerate(servers):
if isinstance(val, basestring):
servers[idx] = (val, 2181)
self.__initstr = ','.join('%s:%d' % pair for pair in servers)
try:
self._reconnect()
except ZooKeeperException:
# This can happen if a server doesn't have a DNS entry, or it seems that
# it can happen for other reasons, but either way we want to act as
# though we aren't configured at all in this case
del self.__initstr
raise
return self
def time_disconnected(self):
"""Return how long we've been disconnected. Returns None if we are
currently connected.
"""
try:
return time.time() - self.__disconnected
except TypeError:
# self.__disconnected is None
return None
def is_connected(self):
"""Returns True if we are currently connected to ZooKeeper, False if not.
"""
return not self.__disconnected
@fix_path
def get(self, path):
try:
return self.__nodes[path]
except KeyError:
with self.__nodelck:
try:
node = self.__nodes[path]
except KeyError:
node = Node(path, self)
self.__nodes[path] = node
self._setup(node)
return node
def get_json(self, path):
return JsNode(self.get(path))
@fix_path
def create(self, path, value='', flags=0):
if not flags:
node = self.get(path)
node.create(value)
return node
path = self._use_socket(lambda z:
zookeeper.create(z, path, value, ALL_ACL, flags))
return self.get(path)
@fix_path
def create_r(self, path, value=''):
"""Create the entire path up to this node, and then create this node"""
pre = path.rsplit('/',1)[0]
if pre:
self.ensure_exists(pre)
self.create(path, value)
def create_json(self, path, value, flags=0):
return JsNode(self.create(path, json.dumps(value), flags))
def create_r_json(self, path, value):
return JsNode(self.create_r(path, json.dumps(value)))
@fix_path
def ensure_exists(self, path, value=''):
"""Make sure every node, up to the given path, exists in zookeeper.
"""
node = self.get(path)
try:
node.value(timeout=0.1)
except NoNodeException:
try:
node.create(value)
except NodeExistsException:
# No problem; it exists
pass
except NoNodeException:
# the parent doesn't exist
self.ensure_exists(path.rsplit('/',1)[0])
self.ensure_exists(path)
return node
def addStateWatcher(self, key, fn):
"""Add a function that will be called when our connection state changes.
This function will be called with a zookeeper state variable (an int with
one of the values of
zookeeper.{AUTH_FAILED,EXPIRED_SESSION,CONNECTING,ASSOCIATING,CONNECTED}_STATE
of the value 0 (shouldn't happen, but it does)
"""
def catcher(val):
try:
fn(val)
except:
print 'state watcher callback threw this:'
traceback.print_exc()
self.__state_cbs[key] = catcher
def delStateWatcher(self, key):
"""Remove the state watcher that was assigned at the given key.
"""
try: del self.__state_cbs[key]
except KeyError: pass
@fix_path
def chroot(self, path):
"""Get a version of this mirror whose root has been changed to the given
path. All create and get requests will have the given path prepended to
them, and returned nodes will similarly have their path attributes
changed.
"""
return ChrootMirror(path, self)
def _run_async(self, fn):
"""Functions that wait on results from zookeeper cannot be usefully called
from within zookeeper callbacks, as the zookeeper receive socket is
blocked until the callback returns. Any callback that waits on zookeeper
data (calls to zookeeper.create, delete, exists, get, get_children wait,
but the equivalent zookeeper.acreate, etc calls do not wait) should
instead hand a thunk to this, so that the work can be done outside of the
zookeeper callback.
"""
self.__q.put(fn)
def _events(self, zk, event, state, path):
if event == CHANGED_EVENT:
debug('_events: adding CHANGE watcher for', path)
self._aget(path)
elif event == CHILD_EVENT:
debug('_events: adding CHILDREN watcher for', path)
self._aget_children(path)
elif event == CREATED_EVENT:
debug('_events: adding CHANGE and CHILDREN watchers for', path)
del_missing(self.__misslck, self.__missing, path)
self._aget(path)
self._aget_children(path)
elif event == DELETED_EVENT:
try:
node = self.__nodes[path]
node._delete()
debug('_events: adding EXISTS watcher for', path)
self._aexists(path)
except KeyError:
pass
elif event == SESSION_EVENT:
if zk != self.__zk:
return
for fn in self.__state_cbs.values():
self._run_async(lambda: fn(state))
if state == CONNECTED_STATE:
self.__disconnected = None
elif self.__disconnected is None:
self.__disconnected = time.time()
if state == EXPIRED_SESSION_STATE:
self._reconnect()
elif state == CONNECTED_STATE:
if self.__state == EXPIRED_SESSION_STATE:
# We just reconnected from a totally dead connection, so we need to
# setup everything again
for node in self.__nodes.values():
self._setup(node)
else:
# Happy reconnection; just do the pending stuff
while self.__pending:
self.__pending.pop()()
self.__state = state
debug('_events: My state is now', describe_state(self.__state))
def _reconnect(self):
oldzk = self.__zk
self.__zk = zookeeper.init(self.__initstr, self._events)
if oldzk >= 0:
zookeeper.close(oldzk)
def _setup(self, node):
path = node.path
debug('_setup: adding CHANGE and CHILDREN watchers for', path)
self._aget(path)
self._aget_children(path)
def _aget(self, path):
self._try_zoo(
lambda: self._use_socket(
lambda z: zookeeper.aget(z, path, self._events, self._get_cb(path))))
def _aget_children(self, path):
self._try_zoo(
lambda: self._use_socket(
lambda z: zookeeper.aget_children(z, path, self._events,
self._ls_cb(path))))
def _aexists(self, path):
if add_missing(self.__misslck, self.__missing, path):
debug('_aexists is hooking in a callback on existence')
watcher = self._events
else:
debug('_aexists is NOT hooking in a callback on existence')
watcher = None
self._try_zoo(
lambda: self._use_socket(
lambda z: zookeeper.aexists(z, path, watcher, self._exist_cb(path))))
def _try_zoo(self, action):
try:
action()
except (SystemError, ZooKeeperException):
# self.__zk must be really broken; we'll throw this in pending until we
# get a new connection
self.__pending.append(action)
def _get_cb(self, path):
def cb(_zk, status, value, meta):
self._update_node(
path,
status,
lambda node: node._val(value, meta),
lambda: self._aget(path))
return cb
def _ls_cb(self, path):
def cb(_zk, status, children):
self._update_node(
path,
status,
lambda node: node._children(children),
lambda: self._aget_children(path))
return cb
def _exist_cb(self, path):
def cb(_zk, status, meta):
if status == OK:
# It started existing while our message was in transit; set up the
# node's data and allow watch callbacks to occur on future aexist
# calls
del_missing(self.__misslck, self.__missing, path)
self._aget(path)
self._aget_children(path)
elif status == NONODE:
# This is what we expect; our watcher is set up, so we're happy
pass
else:
# Something went wrong communication-wise (disconnect, timeout,
# whatever). try again once re-connected. We need to remove the path
# from __missing so that a future aexists call can put the watcher
# back on
del_missing(self.__misslck, self.__missing, path)
self.__pending.append(lambda: self._aexists(path))
def _update_node(self, path, status, node_action, on_servfail):
try:
node = self.__nodes[path]
except KeyError:
return
if status == OK:
# This is the result of zookeeper returning good data, so _events has a
# good watch established looking for changes to path
node_action(node)
elif status == NONODE:
# Tried to do a get on the path, but it's gone, so _event's watch
# isn't any good. we need to set one for once it exists
node._delete()
debug('_update_node: adding EXISTS watcher for', path)
self._aexists(path)
else:
# Something (I assume connection-related) made the request fail. We'll
# try again once we reconnect
self.__pending.append(on_servfail)
def _use_socket(self, action):
with self.__socklck:
return action(self.__zk)
def close(self):
def quit_async():
sys.exit(0)
self._run_async(quit_async)
print 'killed async thread'
self.__async.join()
print 'async thread done'
if self.__zk >= 0:
zookeeper.close(self.__zk)
self.__zk = -1
try:
del self.__initstr
except AttributeError:
pass
print 'zookeeper closed'
def __del__(self):
self.close()
def run_tasks(queue):
try:
while True:
function = queue.get()
try:
function()
except Exception:
print 'zkmirror asynchronous task failed like this:'
traceback.print_exc()
finally:
print 'run_tasks thread shutting down'
def add_missing(lock, missing, path):
with lock:
if path in missing:
return False
missing.add(path)
return True
def del_missing(lock, missing, path):
with lock:
try:
missing.remove(path)
except KeyError:
pass
| |
import sys
import json
import logging
from kqml import KQMLList, KQMLPerformative
from indra.assemblers.pysb import assembler as pysb_assembler
from indra.assemblers.pysb import PysbAssembler
from indra.statements import stmts_from_json, Activation, Inhibition, \
ActiveForm, ModCondition, Agent
from indra.sources.trips import processor as trips_processor
from bioagents.tra import tra
from bioagents import Bioagent, BioagentException
# This version of logging is coming from tra...
logging.basicConfig(format='%(levelname)s: %(name)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger('TRA')
def get_bool_arg(arg_name, kwargs, default=True):
"Get the boolean value of an argument from either argv or kwarg."
ret = default
if (('argv' in kwargs.keys() and ('--%s' % arg_name) in kwargs['argv'])
or (arg_name in kwargs.keys() and kwargs[arg_name] is not default)):
ret = not default
if arg_name in kwargs.keys():
kwargs.pop(arg_name)
return ret
class TRA_Module(Bioagent):
name = "TRA"
tasks = ['SATISFIES-PATTERN', 'MODEL-COMPARE-CONDITIONS']
def __init__(self, **kwargs):
use_kappa = get_bool_arg('use_kappa', kwargs, default=False)
use_kappa_rest = get_bool_arg('use_kappa_rest', kwargs, default=False)
# Instantiate a singleton TRA agent
if not use_kappa:
logger.warning('You have chosen to not use Kappa.')
self.tra = tra.TRA(use_kappa, use_kappa_rest)
super(TRA_Module, self).__init__(**kwargs)
return
def respond_satisfies_pattern(self, content):
"""Return response content to satisfies-pattern request."""
model_indra_clj = content.get('model')
pattern_lst = content.get('pattern')
conditions_lst = content.get('conditions')
try:
stmts = decode_indra_stmts(model_indra_clj)
model = assemble_model(stmts)
except Exception as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_MODEL')
return reply_content
try:
pattern = get_temporal_pattern(pattern_lst)
except tra.InvalidTimeIntervalError as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_TIME_LIMIT')
return reply_content
except tra.InvalidTemporalPatternError as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_PATTERN')
return reply_content
except tra.InvalidMolecularEntityError as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_ENTITY_DESCRIPTION')
return reply_content
if conditions_lst is None:
conditions = None
else:
try:
conditions = []
for condition_lst in conditions_lst:
condition = get_molecular_condition(condition_lst)
conditions.append(condition)
except Exception as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_CONDITIONS')
return reply_content
try:
sat_rate, num_sim, suggestion_kqml, suggestion_obj, fig_path = \
self.tra.check_property(model, pattern, conditions)
except tra.MissingMonomerError as e:
logger.exception(e)
reply_content = self.make_failure('MODEL_MISSING_MONOMER')
if e.monomer:
reply_content.set('entity', self.make_cljson(e.monomer))
return reply_content
except tra.MissingMonomerSiteError as e:
logger.exception(e)
reply_content = self.make_failure('MODEL_MISSING_MONOMER_SITE')
return reply_content
except tra.SimulatorError as e:
logger.exception(e)
reply_content = self.make_failure('KAPPA_FAILURE')
return reply_content
except Exception as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_PATTERN')
return reply_content
self.send_display_figure(fig_path)
reply = KQMLList('SUCCESS')
content = KQMLList()
content.set('satisfies-rate', '%.1f' % sat_rate)
content.set('num-sim', '%d' % num_sim)
if suggestion_kqml:
sugg = KQMLList.from_string(suggestion_kqml)
content.set('suggestion', sugg)
reply.set('content', content)
return reply
def respond_model_compare_conditions(self, content):
condition_agent_clj = content.get('agent')
target_agent_clj = content.get('affected')
model_indra_clj = content.get('model')
up_dn = content.gets('up-dn')
try:
stmts = decode_indra_stmts(model_indra_clj)
model = assemble_model(stmts)
except Exception as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_MODEL')
return reply_content
try:
condition_agent = self.get_agent(condition_agent_clj)
target_agent = self.get_agent(target_agent_clj)
except Exception as e:
logger.exception(e)
reply_content = self.make_failure('INVALID_PATTERN')
return reply_content
try:
up_dn = 'dn' if up_dn is None else up_dn
logger.info('Checking %s against %s with polarity %s' %
(condition_agent, target_agent, up_dn))
result, fig_path = \
self.tra.compare_conditions(model, condition_agent,
target_agent, up_dn)
except tra.MissingMonomerError as e:
logger.exception(e)
reply_content = self.make_failure('MODEL_MISSING_MONOMER')
if e.monomer:
reply_content.set('entity', self.make_cljson(e.monomer))
return reply_content
except tra.MissingMonomerSiteError as e:
logger.exception(e)
reply_content = self.make_failure('MODEL_MISSING_MONOMER_SITE')
return reply_content
except tra.SimulatorError as e:
logger.exception(e)
reply_content = self.make_failure('KAPPA_FAILURE')
return reply_content
self.send_display_figure(fig_path)
reply = KQMLList('SUCCESS')
reply.set('result', result)
return reply
def send_display_figure(self, path):
msg = KQMLPerformative('tell')
content = KQMLList('display-image')
content.set('type', 'simulation')
content.sets('path', path)
msg.set('content', content)
self.send(msg)
def decode_indra_stmts(stmts_clj):
return TRA_Module.get_statement(stmts_clj)
def assemble_model(stmts):
pa = PysbAssembler()
pa.add_statements(stmts)
model = pa.make_model(policies='one_step')
pa.add_default_initial_conditions(100.0)
try:
targeted_agents = get_targeted_agents(stmts)
no_upstream_active_agents = get_no_upstream_active_agents(stmts)
except:
targeted_agents = []
no_upstream_active_agents = []
try:
chemical_agents = get_chemical_agents(stmts)
except:
chemical_agents = []
for m in model.monomers:
try:
if m.name in targeted_agents or m.name in no_upstream_active_agents:
pysb_assembler.set_base_initial_condition(model,
model.monomers[m.name], 50.0)
pysb_assembler.set_extended_initial_condition(model, m, 50.0)
elif m.name in chemical_agents:
pysb_assembler.set_base_initial_condition(model,
model.monomers[m.name], 10000.0)
else:
pysb_assembler.set_extended_initial_condition(model, m, 0)
except:
pysb_assembler.set_extended_initial_condition(model, m, 0)
# Tweak parameters
for param in model.parameters:
if 'kf' in param.name and 'bind' in param.name:
param.value = param.value * 100
return model
def get_targeted_agents(stmts):
"""Return agents that are inhibited while not being activated by anything.
"""
has_act = set()
has_inh = set()
for stmt in stmts:
if isinstance(stmt, Activation):
has_act.add(stmt.obj.name)
elif isinstance(stmt, Inhibition):
has_inh.add(stmt.obj.name)
inh_not_act = list(has_inh - has_act)
return inh_not_act
def get_no_upstream_active_agents(stmts):
"""Return agents that are active but there's nothing upstream.
"""
has_act = set()
has_upstream = set()
for stmt in stmts:
if isinstance(stmt, Activation):
has_upstream.add(stmt.obj.name)
elif isinstance(stmt, ActiveForm):
has_upstream.add(stmt.agent.name)
for agent in stmt.agent_list():
if agent is not None:
if agent.activity is not None and agent.activity.is_active:
has_act.add(agent.name)
act_no_ups = list(has_act - has_upstream)
return act_no_ups
def get_chemical_agents(stmts):
chemicals = set()
for stmt in stmts:
for agent in stmt.agent_list():
if agent is not None and ('CHEBI' in agent.db_refs or
'PUBCHEM' in agent.db_refs):
chemicals.add(pysb_assembler._n(agent.name))
return list(chemicals)
def get_molecular_entity(lst: KQMLList) -> Agent:
description_clj = lst.get('description')
agent = TRA_Module.get_agent(description_clj)
return agent
"""
def get_single_molecular_entity(description_str):
try:
tp = trips_processor.TripsProcessor(description_str)
terms = tp.tree.findall('TERM')
def find_complex(terms):
cplx = None
for term in terms:
term_type = term.find('type')
if term_type is not None and \
term_type.text == 'ONT::MACROMOLECULAR-COMPLEX':
cplx = term.attrib.get('id')
break
return cplx
cplx_id = find_complex(terms)
if not cplx_id:
term_id = terms[0].attrib['id']
logger.info('Using ID of term: %s' % term_id)
else:
logger.info('Using ID of complex: %s' % cplx_id)
term_id = cplx_id
agent = tp._get_agent_by_id(term_id, None)
return agent
except Exception as e:
raise tra.InvalidMolecularEntityError(e)
"""
def get_molecular_quantity(lst: KQMLList) -> tra.MolecularQuantity:
try:
quant_type = lst.gets('type')
value = lst.gets('value')
if quant_type == 'concentration':
unit = lst.gets('unit')
else:
unit = None
return tra.MolecularQuantity(quant_type, value, unit)
except Exception as e:
raise tra.InvalidMolecularQuantityError(e)
def get_molecular_quantity_ref(lst: KQMLList) -> tra.MolecularQuantityReference:
try:
quant_type = lst.gets('type')
entity_lst = lst.get('entity')
entity = get_molecular_entity(entity_lst)
return tra.MolecularQuantityReference(quant_type, entity)
except Exception as e:
raise tra.InvalidMolecularQuantityRefError(e)
def get_time_interval(lst: KQMLList) -> tra.TimeInterval:
try:
lb = lst.gets('lower-bound')
ub = lst.gets('upper-bound')
unit = lst.gets('unit')
return tra.TimeInterval(lb, ub, unit)
except Exception as e:
raise tra.InvalidTimeIntervalError(e)
def get_temporal_pattern(lst: KQMLList) -> tra.TemporalPattern:
pattern_type = lst.gets('type')
entities_lst = lst.get('entities')
entities = []
if entities_lst is None:
entities_lst = []
for e in entities_lst:
entity = get_molecular_entity(e)
entities.append(entity)
time_limit_lst = lst.get('time-limit')
if time_limit_lst is None:
time_limit = None
else:
time_limit = get_time_interval(time_limit_lst)
# TODO: handle more pattern-specific extra arguments
value_lst = lst.get('value')
if value_lst is not None:
value = get_molecular_quantity(value_lst)
else:
value = None
tp = tra.TemporalPattern(pattern_type, entities, time_limit, value=value)
return tp
def get_molecular_condition(lst: KQMLList) -> tra.MolecularCondition:
try:
condition_type = lst.gets('type')
quantity_ref_lst = lst.get('quantity')
quantity = get_molecular_quantity_ref(quantity_ref_lst)
if condition_type == 'exact':
value = get_molecular_quantity(lst.get('value'))
elif condition_type == 'multiple':
value = lst.gets('value')
else:
value = None
return tra.MolecularCondition(condition_type, quantity, value)
except Exception as e:
raise tra.InvalidMolecularConditionError(e)
class InvalidModelDescriptionError(BioagentException):
pass
if __name__ == "__main__":
m = TRA_Module(argv=sys.argv[1:])
| |
"""
Reader that can sample data from a WattsUpMeter: https://www.wattsupmeters.com
API documents are here: https://www.wattsupmeters.com/secure/downloads/CommunicationsProtocol090824.pdf
basic usage is as follows
reader = WattsUpReader("/dev/tty.usbserial-A600KI7M")
reader.drain()
reader.start_recording()
#
# do a bunch of stuff that uses power
#
summary, detailed_results = reader.get_recording()
#
# summary is a Summary named-tuple
# detailed_results an array of Result named-tuples defined below
#
This also is runnable from the command line
python watts_up_reader.py "/dev/tty.usbserial-A600KI7M"
which will put you in an interactive session, that allows testing
of recording, a few other things and direct communication with the
device
the meter is finicky and may not be working if connection is made
quickly after
"""
from __future__ import print_function
__author__ = 'Chick Markley'
import serial
import readline
import time
import select
import collections
import argparse
class WattsUpReader(object):
EXTERNAL_MODE = "E"
INTERNAL_MODE = "I"
FULL_HANDLING = 2
Result = collections.namedtuple("Result", ['time', 'watts', 'volts', 'milliamps'])
Response = collections.namedtuple("Response", ['time', 'message'])
Summary = collections.namedtuple(
"Summary", ["joules", "millicoulomb", "samples", "sampling_interval", "start_time"]
)
def __init__(self, port_name=None, verbose=False):
if port_name is None:
from ctree import CONFIG
port_name = CONFIG.get('wattsup', 'port')
self.port_name = port_name
self.serial_port = serial.Serial(self.port_name, 115200)
self.last_time = time.time()
self.verbose = verbose
self.t = []
self.power = []
self.returned_lines = 0
# I don't think device supports anything smaller
self.record_interval = 1
self.start_recording()
def reset(self):
if self.serial_port:
self.serial_port.close()
time.sleep(1)
self.serial_port = serial.Serial(self.port_name, 115200)
if self.verbose:
print("serial port:")
print(self.serial_port)
self.serial_port.sendBreak()
self.serial_port.flushInput()
self.serial_port.flushOutput()
self.serial_port.setDTR()
self.serial_port.write(chr(0x18))
time.sleep(1)
self.send_command("#V,W,0;", timeout=10, tries=1)
def clear(self):
self.serial_port.write("#R,W,0;")
self.drain()
def set_verbose(self, new_value=None):
if new_value is None:
self.verbose = not self.verbose
else:
self.verbose = new_value
def set_mode(self, runmode):
"""
TODO: Fix this, ported from https://github.com/kjordahl/Watts-Up--logger
See API url above
"""
self.serial_port.write('#L,W,3,%s,,%d;' % (runmode, self.record_interval))
if runmode == WattsUpReader.INTERNAL_MODE:
self.serial_port.write('#O,W,1,%d' % WattsUpReader.FULL_HANDLING)
def fetch(self, base_time=None, time_out=0.3, raw=False):
"""read one data point from meter"""
rfds, wfds, efds = select.select([self.serial_port], [], [], time_out)
if rfds:
# device_output = self.serial_port.readline()..decode("utf-8") #python3
device_output = self.serial_port.readline()
self.returned_lines += 1
if self.verbose:
print(device_output)
if device_output.startswith("#d"):
if raw:
return "%s,%s" % (device_output.strip(), base_time)
fields = device_output.split(',')
if self.verbose:
for index, field in enumerate(fields):
print("%02d %s" % (index, field))
watts = float(fields[3]) / 10
volts = float(fields[4]) / 10
milliamps = float(fields[5]) / 1000
if not base_time:
base_time = time.time()
return WattsUpReader.Result(base_time, watts, volts, milliamps)
elif len(device_output) > 0:
return WattsUpReader.Response(time.time(), device_output)
return None
def drain(self):
while True:
result = self.fetch()
if type(result) is WattsUpReader.Response:
print(result)
elif self.verbose:
print(result)
if not result:
return
def dump(self):
while True:
result = self.fetch(base_time=time.time(), time_out=1000)
print(result)
def raw_dump(self):
while True:
result = self.fetch(base_time=time.time(), time_out=1000, raw=True)
print(result)
def send_command(self, command, timeout=3, tries=3):
if not command.startswith("#"):
print( "Error: no initial # for command %s", command)
return
if not command.strip().endswith(";"):
print( "Error: no trailing ; for command %s", command)
return
for tries in range(tries):
if self.verbose:
print("sending command %s" % command)
self.serial_port.write(command)
answer = self.fetch(time_out=timeout)
if answer:
if self.verbose:
print("answer %s",end="")
print(answer)
self.last_time = time.time()
return
else:
if self.verbose:
print("timed out sending command %s" % command)
def start_recording(self):
self.drain()
command = "#L,W,3,E,,%d;" % self.record_interval
self.send_command(command)
def get_recording(self):
def pull():
results = []
estimated_record_time = self.last_time
watt_seconds = 0.0
samples = 0
millicoulombs = 0.0
while True:
result = self.fetch(estimated_record_time)
if not result:
summary = WattsUpReader.Summary(
watt_seconds, millicoulombs, samples, self.record_interval, self.last_time
)
return summary, results
if type(result) is WattsUpReader.Result:
results.append(result)
watt_seconds += result.watts
millicoulombs += result.milliamps
samples += 1
estimated_record_time += self.record_interval
all_results = pull()
self.last_time = time.time()
return all_results
def command(self, string):
self.serial_port.write(string)
save_verbose, self.verbose = self.verbose, True
time.sleep(1)
self.drain()
self.verbose = save_verbose
@staticmethod
def usage():
print("command must be one of (quit, reset, record, get_record, verbose, drain) or ")
print("native device command string beginning with # ")
print("empty command will repeat previous command")
print("commands can be abbreviated to first three letters")
print("\n")
def interactive_mode(self):
readline.parse_and_bind('tab: complete')
readline.parse_and_bind('set editing-mode vi')
last_input = None
WattsUpReader.usage()
while True:
# print("Command: ", end="")
# user_input = sys.stdin.readline()
user_input = raw_input("Command (quit to exit): ")
# print("got input %s" % user_input)
# if user_input.strip() == '':
# user_input = last_input
if user_input.startswith('q') or user_input.startswith('Q'):
return
elif user_input.lower().startswith('res'):
self.reset()
elif user_input.lower().startswith('dra'):
self.drain()
elif user_input.lower().startswith('rec'):
self.start_recording()
elif user_input.lower().startswith('get'):
summary, detailed_results = self.get_recording()
for index, result in enumerate(detailed_results):
print("%d ----" % index, end='')
print(result)
print(summary)
elif user_input.lower().startswith('ver'):
self.verbose = not self.verbose
elif user_input.startswith("#"):
print("sending...")
self.serial_port.write(user_input)
time.sleep(1)
self.drain()
else:
print("unknown command: %s" % user_input)
WattsUpReader.usage()
last_input = user_input
def stop(self):
self.serial_port.close()
@staticmethod
def guess_port():
import subprocess
for tty_search_command in ["ls /dev/tty*usb*","ls /dev/tty*USB*"]:
try:
possible_devices = subprocess.check_output(tty_search_command, shell=True).strip().split('\n')
except:
possible_devices = []
if len(possible_devices) == 1:
return possible_devices[0]
else:
for device in possible_devices:
print("Possible device %s" % device)
print("Multiple possible devices found, you must specify explicitly")
exit(1)
print("No potential usb based readers found, is it plugged in?")
exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="interface to WattsUpPro usb power meter")
parser.add_argument(
'-i', '--interactive', help='interactive mode, allows direct communcation with device', action="store_true"
)
parser.add_argument('-p', '--port', help='full /dev/ path to the usb device, if missing a good quess will be made')
parser.add_argument('-d', '--dump', help='meter data to stdout, default is prettified', action="store_true")
parser.add_argument('-r', '--raw', help='modify dump to return raw device output', action="store_true")
parser.add_argument('-c', '--clear', help='clear any saved lines in device', action="store_true")
parser.add_argument('-v', '--verbose', help='show more debug than you like', action="store_true")
args = parser.parse_args()
if not args.port:
usb_port_name = WattsUpReader.guess_port()
print("Using port %s" % usb_port_name)
else:
usb_port_name = args.port
watt_reader = WattsUpReader(usb_port_name, verbose=False)
if args.verbose:
watt_reader.set_verbose(True)
if args.clear:
watt_reader.drain()
if args.interactive:
watt_reader.interactive_mode()
if args.raw:
watt_reader.raw_dump()
elif args.dump:
watt_reader.dump()
else:
parser.print_usage()
watt_reader.stop()
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010-2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Defines interface for DB access
"""
import logging
from sqlalchemy import asc, create_engine, desc
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import exc
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import or_, and_
from glance.common import cfg
from glance.common import exception
from glance.common import utils
from glance.registry.db import models
_ENGINE = None
_MAKER = None
BASE = models.BASE
sa_logger = None
logger = logging.getLogger(__name__)
# attributes common to all models
BASE_MODEL_ATTRS = set(['id', 'created_at', 'updated_at', 'deleted_at',
'deleted'])
IMAGE_ATTRS = BASE_MODEL_ATTRS | set(['name', 'status', 'size',
'disk_format', 'container_format',
'min_disk', 'min_ram', 'is_public',
'location', 'checksum', 'owner',
'protected'])
CONTAINER_FORMATS = ['ami', 'ari', 'aki', 'bare', 'ovf']
DISK_FORMATS = ['ami', 'ari', 'aki', 'vhd', 'vmdk', 'raw', 'qcow2', 'vdi',
'iso']
STATUSES = ['active', 'saving', 'queued', 'killed', 'pending_delete',
'deleted']
db_opts = [
cfg.IntOpt('sql_idle_timeout', default=3600),
cfg.StrOpt('sql_connection', default='sqlite:///glance.sqlite'),
]
def configure_db(conf):
"""
Establish the database, create an engine if needed, and
register the models.
:param conf: Mapping of configuration options
"""
global _ENGINE, sa_logger, logger
if not _ENGINE:
conf.register_opts(db_opts)
timeout = conf.sql_idle_timeout
sql_connection = conf.sql_connection
try:
_ENGINE = create_engine(sql_connection, pool_recycle=timeout)
except Exception, err:
msg = _("Error configuring registry database with supplied "
"sql_connection '%(sql_connection)s'. "
"Got error:\n%(err)s") % locals()
logger.error(msg)
raise
sa_logger = logging.getLogger('sqlalchemy.engine')
if conf.debug:
sa_logger.setLevel(logging.DEBUG)
elif conf.verbose:
sa_logger.setLevel(logging.INFO)
models.register_models(_ENGINE)
def check_mutate_authorization(context, image_ref):
if not context.is_image_mutable(image_ref):
logger.info(_("Attempted to modify image user did not own."))
msg = _("You do not own this image")
raise exception.NotAuthorized(msg)
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session"""
global _MAKER, _ENGINE
if not _MAKER:
assert _ENGINE
_MAKER = sessionmaker(bind=_ENGINE,
autocommit=autocommit,
expire_on_commit=expire_on_commit)
return _MAKER()
def image_create(context, values):
"""Create an image from the values dictionary."""
return _image_update(context, values, None, False)
def image_update(context, image_id, values, purge_props=False):
"""
Set the given properties on an image and update it.
:raises NotFound if image does not exist.
"""
return _image_update(context, values, image_id, purge_props)
def image_destroy(context, image_id):
"""Destroy the image or raise if it does not exist."""
session = get_session()
with session.begin():
image_ref = image_get(context, image_id, session=session)
# Perform authorization check
check_mutate_authorization(context, image_ref)
image_ref.delete(session=session)
for prop_ref in image_ref.properties:
image_property_delete(context, prop_ref, session=session)
for memb_ref in image_ref.members:
image_member_delete(context, memb_ref, session=session)
def image_get(context, image_id, session=None, force_show_deleted=False):
"""Get an image or raise if it does not exist."""
session = session or get_session()
try:
query = session.query(models.Image).\
options(joinedload(models.Image.properties)).\
options(joinedload(models.Image.members)).\
filter_by(id=image_id)
# filter out deleted images if context disallows it
if not force_show_deleted and not can_show_deleted(context):
query = query.filter_by(deleted=False)
image = query.one()
except exc.NoResultFound:
raise exception.NotFound("No image found with ID %s" % image_id)
# Make sure they can look at it
if not context.is_image_visible(image):
raise exception.NotAuthorized("Image not visible to you")
return image
def image_get_all(context, filters=None, marker=None, limit=None,
sort_key='created_at', sort_dir='desc'):
"""
Get all images that match zero or more filters.
:param filters: dict of filter keys and values. If a 'properties'
key is present, it is treated as a dict of key/value
filters on the image properties attribute
:param marker: image id after which to start page
:param limit: maximum number of images to return
:param sort_key: image attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
"""
filters = filters or {}
session = get_session()
query = session.query(models.Image).\
options(joinedload(models.Image.properties)).\
options(joinedload(models.Image.members))
sort_dir_func = {
'asc': asc,
'desc': desc,
}[sort_dir]
sort_key_attr = getattr(models.Image, sort_key)
query = query.order_by(sort_dir_func(sort_key_attr))\
.order_by(sort_dir_func(models.Image.created_at))\
.order_by(sort_dir_func(models.Image.id))
if 'size_min' in filters:
query = query.filter(models.Image.size >= filters['size_min'])
del filters['size_min']
if 'size_max' in filters:
query = query.filter(models.Image.size <= filters['size_max'])
del filters['size_max']
if 'is_public' in filters and filters['is_public'] is not None:
the_filter = [models.Image.is_public == filters['is_public']]
if filters['is_public'] and context.owner is not None:
the_filter.extend([(models.Image.owner == context.owner),
models.Image.members.any(member=context.owner,
deleted=False)])
if len(the_filter) > 1:
query = query.filter(or_(*the_filter))
else:
query = query.filter(the_filter[0])
del filters['is_public']
showing_deleted = False
if 'changes-since' in filters:
changes_since = filters.pop('changes-since')
query = query.filter(models.Image.updated_at > changes_since)
showing_deleted = True
if 'deleted' in filters:
deleted_filter = filters.pop('deleted')
query = query.filter_by(deleted=deleted_filter)
showing_deleted = deleted_filter
# TODO(bcwaldon): handle this logic in registry server
if not deleted_filter:
query = query.filter(models.Image.status != 'killed')
for (k, v) in filters.pop('properties', {}).items():
query = query.filter(models.Image.properties.any(name=k, value=v))
for (k, v) in filters.items():
if v is not None:
query = query.filter(getattr(models.Image, k) == v)
if marker != None:
# images returned should be created before the image defined by marker
marker_image = image_get(context, marker,
force_show_deleted=showing_deleted)
marker_value = getattr(marker_image, sort_key)
if sort_dir == 'desc':
query = query.filter(
or_(sort_key_attr < marker_value,
and_(sort_key_attr == marker_value,
models.Image.created_at < marker_image.created_at,
models.Image.id < marker)))
else:
query = query.filter(
or_(sort_key_attr > marker_value,
and_(sort_key_attr == marker_value,
models.Image.created_at > marker_image.created_at,
models.Image.id > marker)))
if limit != None:
query = query.limit(limit)
return query.all()
def _drop_protected_attrs(model_class, values):
"""
Removed protected attributes from values dictionary using the models
__protected_attributes__ field.
"""
for attr in model_class.__protected_attributes__:
if attr in values:
del values[attr]
def validate_image(values):
"""
Validates the incoming data and raises a Invalid exception
if anything is out of order.
:param values: Mapping of image metadata to check
"""
status = values.get('status')
disk_format = values.get('disk_format')
container_format = values.get('container_format')
status = values.get('status', None)
if not status:
msg = "Image status is required."
raise exception.Invalid(msg)
if status not in STATUSES:
msg = "Invalid image status '%s' for image." % status
raise exception.Invalid(msg)
if disk_format and disk_format not in DISK_FORMATS:
msg = "Invalid disk format '%s' for image." % disk_format
raise exception.Invalid(msg)
if container_format and container_format not in CONTAINER_FORMATS:
msg = "Invalid container format '%s' for image." % container_format
raise exception.Invalid(msg)
if disk_format in ('aki', 'ari', 'ami') or\
container_format in ('aki', 'ari', 'ami'):
if container_format != disk_format:
msg = ("Invalid mix of disk and container formats. "
"When setting a disk or container format to "
"one of 'ami', 'ari', or 'ami', the container "
"and disk formats must match.")
raise exception.Invalid(msg)
def _image_update(context, values, image_id, purge_props=False):
"""
Used internally by image_create and image_update
:param context: Request context
:param values: A dict of attributes to set
:param image_id: If None, create the image, otherwise, find and update it
"""
session = get_session()
with session.begin():
# Remove the properties passed in the values mapping. We
# handle properties separately from base image attributes,
# and leaving properties in the values mapping will cause
# a SQLAlchemy model error because SQLAlchemy expects the
# properties attribute of an Image model to be a list and
# not a dict.
properties = values.pop('properties', {})
if image_id:
image_ref = image_get(context, image_id, session=session)
# Perform authorization check
check_mutate_authorization(context, image_ref)
else:
if 'size' in values:
values['size'] = int(values['size'])
if 'min_ram' in values:
values['min_ram'] = int(values['min_ram'] or 0)
if 'min_disk' in values:
values['min_disk'] = int(values['min_disk'] or 0)
values['is_public'] = bool(values.get('is_public', False))
values['protected'] = bool(values.get('protected', False))
image_ref = models.Image()
# Need to canonicalize ownership
if 'owner' in values and not values['owner']:
values['owner'] = None
if image_id:
# Don't drop created_at if we're passing it in...
_drop_protected_attrs(models.Image, values)
image_ref.update(values)
# Validate the attributes before we go any further. From my
# investigation, the @validates decorator does not validate
# on new records, only on existing records, which is, well,
# idiotic.
validate_image(image_ref.to_dict())
try:
image_ref.save(session=session)
except IntegrityError, e:
raise exception.Duplicate("Image ID %s already exists!"
% values['id'])
_set_properties_for_image(context, image_ref, properties, purge_props,
session)
return image_get(context, image_ref.id)
def _set_properties_for_image(context, image_ref, properties,
purge_props=False, session=None):
"""
Create or update a set of image_properties for a given image
:param context: Request context
:param image_ref: An Image object
:param properties: A dict of properties to set
:param session: A SQLAlchemy session to use (if present)
"""
orig_properties = {}
for prop_ref in image_ref.properties:
orig_properties[prop_ref.name] = prop_ref
for name, value in properties.iteritems():
prop_values = {'image_id': image_ref.id,
'name': name,
'value': value}
if name in orig_properties:
prop_ref = orig_properties[name]
image_property_update(context, prop_ref, prop_values,
session=session)
else:
image_property_create(context, prop_values, session=session)
if purge_props:
for key in orig_properties.keys():
if not key in properties:
prop_ref = orig_properties[key]
image_property_delete(context, prop_ref, session=session)
def image_property_create(context, values, session=None):
"""Create an ImageProperty object"""
prop_ref = models.ImageProperty()
return _image_property_update(context, prop_ref, values, session=session)
def image_property_update(context, prop_ref, values, session=None):
"""Update an ImageProperty object"""
return _image_property_update(context, prop_ref, values, session=session)
def _image_property_update(context, prop_ref, values, session=None):
"""
Used internally by image_property_create and image_property_update
"""
_drop_protected_attrs(models.ImageProperty, values)
values["deleted"] = False
prop_ref.update(values)
prop_ref.save(session=session)
return prop_ref
def image_property_delete(context, prop_ref, session=None):
"""
Used internally by image_property_create and image_property_update
"""
prop_ref.update(dict(deleted=True))
prop_ref.save(session=session)
return prop_ref
def image_member_create(context, values, session=None):
"""Create an ImageMember object"""
memb_ref = models.ImageMember()
return _image_member_update(context, memb_ref, values, session=session)
def image_member_update(context, memb_ref, values, session=None):
"""Update an ImageMember object"""
return _image_member_update(context, memb_ref, values, session=session)
def _image_member_update(context, memb_ref, values, session=None):
"""
Used internally by image_member_create and image_member_update
"""
_drop_protected_attrs(models.ImageMember, values)
values["deleted"] = False
values.setdefault('can_share', False)
memb_ref.update(values)
memb_ref.save(session=session)
return memb_ref
def image_member_delete(context, memb_ref, session=None):
"""Delete an ImageMember object"""
session = session or get_session()
memb_ref.update(dict(deleted=True))
memb_ref.save(session=session)
return memb_ref
def image_member_get(context, member_id, session=None):
"""Get an image member or raise if it does not exist."""
session = session or get_session()
try:
query = session.query(models.ImageMember).\
options(joinedload(models.ImageMember.image)).\
filter_by(id=member_id)
if not can_show_deleted(context):
query = query.filter_by(deleted=False)
member = query.one()
except exc.NoResultFound:
raise exception.NotFound("No membership found with ID %s" % member_id)
# Make sure they can look at it
if not context.is_image_visible(member.image):
raise exception.NotAuthorized("Image not visible to you")
return member
def image_member_find(context, image_id, member, session=None):
"""Find a membership association between image and member."""
session = session or get_session()
try:
# Note lack of permissions check; this function is called from
# RequestContext.is_image_visible(), so avoid recursive calls
query = session.query(models.ImageMember).\
options(joinedload(models.ImageMember.image)).\
filter_by(image_id=image_id).\
filter_by(member=member)
if not can_show_deleted(context):
query = query.filter_by(deleted=False)
return query.one()
except exc.NoResultFound:
raise exception.NotFound("No membership found for image %s member %s" %
(image_id, member))
def image_member_get_memberships(context, member, marker=None, limit=None,
sort_key='created_at', sort_dir='desc'):
"""
Get all image memberships for the given member.
:param member: the member to look up memberships for
:param marker: membership id after which to start page
:param limit: maximum number of memberships to return
:param sort_key: membership attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
"""
session = get_session()
query = session.query(models.ImageMember).\
options(joinedload(models.ImageMember.image)).\
filter_by(member=member)
if not can_show_deleted(context):
query = query.filter_by(deleted=False)
sort_dir_func = {
'asc': asc,
'desc': desc,
}[sort_dir]
sort_key_attr = getattr(models.ImageMember, sort_key)
query = query.order_by(sort_dir_func(sort_key_attr)).\
order_by(sort_dir_func(models.ImageMember.id))
if marker != None:
# memberships returned should be created before the membership
# defined by marker
marker_membership = image_member_get(context, marker)
marker_value = getattr(marker_membership, sort_key)
if sort_dir == 'desc':
query = query.filter(
or_(sort_key_attr < marker_value,
and_(sort_key_attr == marker_value,
models.ImageMember.id < marker)))
else:
query = query.filter(
or_(sort_key_attr > marker_value,
and_(sort_key_attr == marker_value,
models.ImageMember.id > marker)))
if limit != None:
query = query.limit(limit)
return query.all()
# pylint: disable-msg=C0111
def can_show_deleted(context):
"""
Calculates whether to include deleted objects based on context.
Currently just looks for a flag called deleted in the context dict.
"""
if hasattr(context, 'show_deleted'):
return context.show_deleted
if not hasattr(context, 'get'):
return False
return context.get('deleted', False)
| |
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import logging
from ryu.base import app_manager
from ryu.controller import event
from ryu.controller import handler
from ryu.controller import ofp_event
from ryu.controller.handler import set_ev_cls
from ryu.exception import RyuException
from ryu.exception import OFPUnknownVersion
from ryu.lib import hub
from ryu.lib.dpid import dpid_to_str
from ryu.lib.packet import bpdu
from ryu.lib.packet import ethernet
from ryu.lib.packet import llc
from ryu.lib.packet import packet
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_v1_3
MAX_PORT_NO = 0xfff
# for OpenFlow 1.2/1.3
BPDU_PKT_IN_PRIORITY = 0xffff
NO_PKT_IN_PRIORITY = 0xfffe
# Result of compared config BPDU priority.
SUPERIOR = -1
REPEATED = 0
INFERIOR = 1
# Port role
DESIGNATED_PORT = 0 # The port which sends BPDU.
ROOT_PORT = 1 # The port which receives BPDU from a root bridge.
NON_DESIGNATED_PORT = 2 # The port which blocked.
""" How to decide the port roles.
Root bridge:
a bridge has smallest bridge ID is chosen as a root.
it sends original config BPDU.
Non Root bridge:
forwards config BPDU received from the root bridge.
+-----------------------+
| Root bridge |
+-----------------------+
(D) (D)
| |
| |
(R) (R)
+-----------------+ +-----------------+
| Non Root bridge |(D)---(ND)| Non Root bridge |
+-----------------+ +-----------------+
ROOT_PORT(R):
the nearest port to a root bridge of the bridge.
it is determined by the cost of the path, etc.
DESIGNATED_PORT(D):
the port of the side near the root bridge of each link.
it is determined by the cost of the path, etc.
NON_DESIGNATED_PORT(ND):
the port other than a ROOT_PORT and DESIGNATED_PORT.
"""
# Port state
# DISABLE: Administratively down or link down by an obstacle.
# BLOCK : Not part of spanning tree.
# LISTEN : Not learning or relaying frames.
# LEARN : Learning but not relaying frames.
# FORWARD: Learning and relaying frames.
PORT_STATE_DISABLE = 0
PORT_STATE_BLOCK = 1
PORT_STATE_LISTEN = 2
PORT_STATE_LEARN = 3
PORT_STATE_FORWARD = 4
# for OpenFlow 1.0
PORT_CONFIG_V1_0 = {PORT_STATE_DISABLE: (ofproto_v1_0.OFPPC_NO_RECV_STP
| ofproto_v1_0.OFPPC_NO_RECV
| ofproto_v1_0.OFPPC_NO_FLOOD
| ofproto_v1_0.OFPPC_NO_FWD),
PORT_STATE_BLOCK: (ofproto_v1_0.OFPPC_NO_RECV
| ofproto_v1_0.OFPPC_NO_FLOOD
| ofproto_v1_0.OFPPC_NO_FWD),
PORT_STATE_LISTEN: (ofproto_v1_0.OFPPC_NO_RECV
| ofproto_v1_0.OFPPC_NO_FLOOD),
PORT_STATE_LEARN: ofproto_v1_0.OFPPC_NO_FLOOD,
PORT_STATE_FORWARD: 0}
# for OpenFlow 1.2
PORT_CONFIG_V1_2 = {PORT_STATE_DISABLE: (ofproto_v1_2.OFPPC_NO_RECV
| ofproto_v1_2.OFPPC_NO_FWD),
PORT_STATE_BLOCK: (ofproto_v1_2.OFPPC_NO_FWD
| ofproto_v1_2.OFPPC_NO_PACKET_IN),
PORT_STATE_LISTEN: ofproto_v1_2.OFPPC_NO_PACKET_IN,
PORT_STATE_LEARN: ofproto_v1_2.OFPPC_NO_PACKET_IN,
PORT_STATE_FORWARD: 0}
# for OpenFlow 1.3
PORT_CONFIG_V1_3 = {PORT_STATE_DISABLE: (ofproto_v1_3.OFPPC_NO_RECV
| ofproto_v1_3.OFPPC_NO_FWD),
PORT_STATE_BLOCK: (ofproto_v1_3.OFPPC_NO_FWD
| ofproto_v1_3.OFPPC_NO_PACKET_IN),
PORT_STATE_LISTEN: ofproto_v1_3.OFPPC_NO_PACKET_IN,
PORT_STATE_LEARN: ofproto_v1_3.OFPPC_NO_PACKET_IN,
PORT_STATE_FORWARD: 0}
""" Port state machine
+------------------------<--------------------------+
| |*2
+--> [BLOCK] -----+--> [LISTEN] ----> [LEARN] ------+----> [FORWARD]
*3 | | 15sec | 15sec *1 |
| |*3 |*3 |*3
+----<---+------<------+----------<----------+
*1 if port role == DESIGNATED_PORT or ROOT_PORT
*2 if port role == NON_DESIGNATED_PORT
*3 re-calculation of Spanning tree occurred.
When bridge has started, each port state is set to [LISTEN]
except port configuration is disable.
If port configuration is disable or link down occurred,
the port state is set to [DISABLE]
"""
# Throw this event when network topology is changed.
# Flush filtering database, when you receive this event.
class EventTopologyChange(event.EventBase):
def __init__(self, dp):
super(EventTopologyChange, self).__init__()
self.dp = dp
# Throw this event when port status is changed.
class EventPortStateChange(event.EventBase):
def __init__(self, dp, port):
super(EventPortStateChange, self).__init__()
self.dp = dp
self.port_no = port.ofport.port_no
self.port_state = port.state
# Event for receive packet in message except BPDU packet.
class EventPacketIn(event.EventBase):
def __init__(self, msg):
super(EventPacketIn, self).__init__()
self.msg = msg
class Stp(app_manager.RyuApp):
""" STP(spanning tree) library. """
OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION,
ofproto_v1_2.OFP_VERSION,
ofproto_v1_3.OFP_VERSION]
def __init__(self):
super(Stp, self).__init__()
self.name = 'stplib'
self._set_logger()
self.config = {}
self.bridge_list = {}
def close(self):
for dpid in self.bridge_list.keys():
self._unregister_bridge(dpid)
def _set_logger(self):
self.logger.propagate = False
hdlr = logging.StreamHandler()
fmt_str = '[STP][%(levelname)s] dpid=%(dpid)s: %(message)s'
hdlr.setFormatter(logging.Formatter(fmt_str))
self.logger.addHandler(hdlr)
def set_config(self, config):
""" Use this API if you want to set up configuration
of each bridge and ports.
Set configuration with 'config' parameter as follows.
config = {<dpid>: {'bridge': {'priority': <value>,
'sys_ext_id': <value>,
'max_age': <value>,
'hello_time': <value>,
'fwd_delay': <value>}
'ports': {<port_no>: {'priority': <value>,
'path_cost': <value>,
'enable': <True/False>},
<port_no>: {...},,,}}
<dpid>: {...},
<dpid>: {...},,,}
NOTE: You may omit each field.
If omitted, a default value is set up.
It becomes effective when a bridge starts.
Default values:
------------------------------------------------------
| bridge | priority | bpdu.DEFAULT_BRIDGE_PRIORITY |
| | sys_ext_id | 0 |
| | max_age | bpdu.DEFAULT_MAX_AGE |
| | hello_time | bpdu.DEFAULT_HELLO_TIME |
| | fwd_delay | bpdu.DEFAULT_FORWARD_DELAY |
|--------|------------|------------------------------|
| port | priority | bpdu.DEFAULT_PORT_PRIORITY |
| | path_cost | (Set up automatically |
| | | according to link speed.) |
| | enable | True |
------------------------------------------------------
"""
assert isinstance(config, dict)
self.config = config
@set_ev_cls(ofp_event.EventOFPStateChange,
[handler.MAIN_DISPATCHER, handler.DEAD_DISPATCHER])
def dispacher_change(self, ev):
assert ev.datapath is not None
if ev.state == handler.MAIN_DISPATCHER:
self._register_bridge(ev.datapath)
elif ev.state == handler.DEAD_DISPATCHER:
self._unregister_bridge(ev.datapath.id)
def _register_bridge(self, dp):
self._unregister_bridge(dp.id)
dpid_str = {'dpid': dpid_to_str(dp.id)}
self.logger.info('Join as stp bridge.', extra=dpid_str)
try:
bridge = Bridge(dp, self.logger,
self.config.get(dp.id, {}),
self.send_event_to_observers)
except OFPUnknownVersion as message:
self.logger.error(str(message), extra=dpid_str)
return
self.bridge_list[dp.id] = bridge
def _unregister_bridge(self, dp_id):
if dp_id in self.bridge_list:
self.bridge_list[dp_id].delete()
del self.bridge_list[dp_id]
self.logger.info('Leave stp bridge.',
extra={'dpid': dpid_to_str(dp_id)})
@set_ev_cls(ofp_event.EventOFPPacketIn, handler.MAIN_DISPATCHER)
def packet_in_handler(self, ev):
if ev.msg.datapath.id in self.bridge_list:
bridge = self.bridge_list[ev.msg.datapath.id]
bridge.packet_in_handler(ev.msg)
@set_ev_cls(ofp_event.EventOFPPortStatus, handler.MAIN_DISPATCHER)
def port_status_handler(self, ev):
dp = ev.msg.datapath
dpid_str = {'dpid': dpid_to_str(dp.id)}
port = ev.msg.desc
reason = ev.msg.reason
link_down_flg = port.state & 0b1
if dp.id in self.bridge_list:
bridge = self.bridge_list[dp.id]
if reason is dp.ofproto.OFPPR_ADD:
self.logger.info('[port=%d] Port add.',
port.port_no, extra=dpid_str)
bridge.port_add(port)
elif reason is dp.ofproto.OFPPR_DELETE:
self.logger.info('[port=%d] Port delete.',
port.port_no, extra=dpid_str)
bridge.port_delete(port.port_no)
else:
assert reason is dp.ofproto.OFPPR_MODIFY
if link_down_flg:
self.logger.info('[port=%d] Link down.',
port.port_no, extra=dpid_str)
bridge.link_down(port.port_no)
else:
self.logger.info('[port=%d] Link up.',
port.port_no, extra=dpid_str)
bridge.link_up(port.port_no)
@staticmethod
def compare_root_path(path_cost1, path_cost2, bridge_id1, bridge_id2,
port_id1, port_id2):
""" Decide the port of the side near a root bridge.
It is compared by the following priorities.
1. root path cost
2. designated bridge ID value
3. designated port ID value """
result = Stp._cmp_value(path_cost1, path_cost2)
if not result:
result = Stp._cmp_value(bridge_id1, bridge_id2)
if not result:
result = Stp._cmp_value(port_id1, port_id2)
return result
@staticmethod
def compare_bpdu_info(my_priority, my_times, rcv_priority, rcv_times):
""" Check received BPDU is superior to currently held BPDU
by the following comparison.
- root bridge ID value
- root path cost
- designated bridge ID value
- designated port ID value
- times """
if my_priority is None:
result = SUPERIOR
else:
result = Stp._cmp_value(rcv_priority.root_id.value,
my_priority.root_id.value)
if not result:
result = Stp.compare_root_path(
rcv_priority.root_path_cost,
my_priority.root_path_cost,
rcv_priority.designated_bridge_id.value,
my_priority.designated_bridge_id.value,
rcv_priority.designated_port_id.value,
my_priority.designated_port_id.value)
if not result:
result1 = Stp._cmp_value(
rcv_priority.designated_bridge_id.value,
my_priority.designated_bridge_id.mac_addr)
result2 = Stp._cmp_value(
rcv_priority.designated_port_id.value,
my_priority.designated_port_id.port_no)
if not result1 and not result2:
result = SUPERIOR
else:
result = Stp._cmp_obj(rcv_times, my_times)
return result
@staticmethod
def _cmp_value(value1, value2):
result = cmp(value1, value2)
if result < 0:
return SUPERIOR
elif result == 0:
return REPEATED
else:
return INFERIOR
@staticmethod
def _cmp_obj(obj1, obj2):
for key in obj1.__dict__.keys():
if (not hasattr(obj2, key)
or getattr(obj1, key) != getattr(obj2, key)):
return SUPERIOR
return REPEATED
class Bridge(object):
_DEFAULT_VALUE = {'priority': bpdu.DEFAULT_BRIDGE_PRIORITY,
'sys_ext_id': 0,
'max_age': bpdu.DEFAULT_MAX_AGE,
'hello_time': bpdu.DEFAULT_HELLO_TIME,
'fwd_delay': bpdu.DEFAULT_FORWARD_DELAY}
def __init__(self, dp, logger, config, send_ev_func):
super(Bridge, self).__init__()
self.dp = dp
self.logger = logger
self.dpid_str = {'dpid': dpid_to_str(dp.id)}
self.send_event = send_ev_func
# Bridge data
bridge_conf = config.get('bridge', {})
values = self._DEFAULT_VALUE
for key, value in bridge_conf.items():
values[key] = value
system_id = dp.ports.values()[0].hw_addr
self.bridge_id = BridgeId(values['priority'],
values['sys_ext_id'],
system_id)
self.bridge_times = Times(0, # message_age
values['max_age'],
values['hello_time'],
values['fwd_delay'])
# Root bridge data
self.root_priority = Priority(self.bridge_id, 0, None, None)
self.root_times = self.bridge_times
# Ports
self.ports = {}
self.ports_conf = config.get('ports', {})
for ofport in dp.ports.values():
self.port_add(ofport)
# Install BPDU PacketIn flow. (OpenFlow 1.2/1.3)
if dp.ofproto == ofproto_v1_2 or dp.ofproto == ofproto_v1_3:
ofctl = OfCtl_v1_2later(self.dp)
ofctl.add_bpdu_pkt_in_flow()
@property
def is_root_bridge(self):
return bool(self.bridge_id.value == self.root_priority.root_id.value)
def delete(self):
for port in self.ports.values():
port.delete()
def port_add(self, ofport):
if ofport.port_no <= MAX_PORT_NO:
port_conf = self.ports_conf.get(ofport.port_no, {})
self.ports[ofport.port_no] = Port(self.dp, self.logger,
port_conf, self.send_event,
self.recalculate_spanning_tree,
self.topology_change_notify,
self.bridge_id,
self.bridge_times,
ofport)
def port_delete(self, port_no):
self.link_down(port_no)
self.ports[port_no].delete()
del self.ports[port_no]
def link_up(self, port_no):
port = self.ports[port_no]
port.up(DESIGNATED_PORT, self.root_priority, self.root_times)
def link_down(self, port_no):
""" DESIGNATED_PORT/NON_DESIGNATED_PORT: change status to DISABLE.
ROOT_PORT: change status to DISABLE and recalculate STP. """
port = self.ports[port_no]
init_stp_flg = bool(port.role is ROOT_PORT)
port.down(PORT_STATE_DISABLE, msg_init=True)
if init_stp_flg:
self.recalculate_spanning_tree()
def packet_in_handler(self, msg):
dp = msg.datapath
if dp.ofproto == ofproto_v1_0:
in_port_no = msg.in_port
else:
assert dp.ofproto == ofproto_v1_2 or dp.ofproto == ofproto_v1_3
in_port_no = None
for match_field in msg.match.fields:
if match_field.header == dp.ofproto.OXM_OF_IN_PORT:
in_port_no = match_field.value
break
if in_port_no not in self.ports:
return
in_port = self.ports[in_port_no]
if in_port.state == PORT_STATE_DISABLE:
return
pkt = packet.Packet(msg.data)
if bpdu.ConfigurationBPDUs in pkt:
""" Receive Configuration BPDU.
- If receive superior BPDU:
re-caluculation of spanning tree.
- If receive Topology Change BPDU:
throw EventTopologyChange.
forward Topology Change BPDU. """
(bpdu_pkt, ) = pkt.get_protocols(bpdu.ConfigurationBPDUs)
if bpdu_pkt.message_age > bpdu_pkt.max_age:
log_msg = 'Drop BPDU packet which message_age exceeded.'
self.logger.debug(log_msg, extra=self.dpid_str)
return
rcv_info, rcv_tc = in_port.rcv_config_bpdu(bpdu_pkt)
if rcv_info is SUPERIOR:
self.logger.info('[port=%d] Receive superior BPDU.',
in_port_no, extra=self.dpid_str)
self.recalculate_spanning_tree(init=False)
elif rcv_tc:
self.send_event(EventTopologyChange(self.dp))
if in_port.role is ROOT_PORT:
self._forward_tc_bpdu(rcv_tc)
elif bpdu.TopologyChangeNotificationBPDUs in pkt:
""" Receive Topology Change Notification BPDU.
send Topology Change Ack BPDU.
throw EventTopologyChange.
- Root bridge:
send Topology Change BPDU from all port.
- Non root bridge:
send Topology Change Notification BPDU to root bridge. """
in_port.transmit_ack_bpdu()
self.topology_change_notify(None)
elif bpdu.RstBPDUs in pkt:
""" Receive Rst BPDU. """
# TODO: RSTP
pass
else:
""" Receive non BPDU packet.
throw EventPacketIn. """
self.send_event(EventPacketIn(msg))
def recalculate_spanning_tree(self, init=True):
""" Re-calculation of spanning tree. """
# All port down.
for port in self.ports.values():
if port.state is not PORT_STATE_DISABLE:
port.down(PORT_STATE_BLOCK, msg_init=init)
# Send topology change event.
if init:
self.send_event(EventTopologyChange(self.dp))
# Update tree roles.
port_roles = {}
self.root_priority = Priority(self.bridge_id, 0, None, None)
self.root_times = self.bridge_times
if init:
self.logger.info('Root bridge.', extra=self.dpid_str)
for port_no in self.ports.keys():
port_roles[port_no] = DESIGNATED_PORT
else:
(port_roles,
self.root_priority,
self.root_times) = self._spanning_tree_algorithm()
# All port up.
for port_no, role in port_roles.items():
if self.ports[port_no].state is not PORT_STATE_DISABLE:
self.ports[port_no].up(role, self.root_priority,
self.root_times)
def _spanning_tree_algorithm(self):
""" Update tree roles.
- Root bridge:
all port is DESIGNATED_PORT.
- Non root bridge:
select one ROOT_PORT and some DESIGNATED_PORT,
and the other port is set to NON_DESIGNATED_PORT."""
port_roles = {}
root_port = self._select_root_port()
if root_port is None:
# My bridge is a root bridge.
self.logger.info('Root bridge.', extra=self.dpid_str)
root_priority = self.root_priority
root_times = self.root_times
for port_no in self.ports.keys():
if self.ports[port_no].state is not PORT_STATE_DISABLE:
port_roles[port_no] = DESIGNATED_PORT
else:
# Other bridge is a root bridge.
self.logger.info('Non root bridge.', extra=self.dpid_str)
root_priority = root_port.designated_priority
root_times = root_port.designated_times
port_roles[root_port.ofport.port_no] = ROOT_PORT
d_ports = self._select_designated_port(root_port)
for port_no in d_ports:
port_roles[port_no] = DESIGNATED_PORT
for port in self.ports.values():
if port.state is not PORT_STATE_DISABLE:
port_roles.setdefault(port.ofport.port_no,
NON_DESIGNATED_PORT)
return port_roles, root_priority, root_times
def _select_root_port(self):
""" ROOT_PORT is the nearest port to a root bridge.
It is determined by the cost of path, etc. """
root_port = None
for port in self.ports.values():
root_msg = (self.root_priority if root_port is None
else root_port.designated_priority)
port_msg = port.designated_priority
if port.state is PORT_STATE_DISABLE or port_msg is None:
continue
if root_msg.root_id.value > port_msg.root_id.value:
result = SUPERIOR
elif root_msg.root_id.value == port_msg.root_id.value:
if root_msg.designated_bridge_id is None:
result = INFERIOR
else:
result = Stp.compare_root_path(
port_msg.root_path_cost,
root_msg.root_path_cost,
port_msg.designated_bridge_id.value,
root_msg.designated_bridge_id.value,
port_msg.designated_port_id.value,
root_msg.designated_port_id.value)
else:
result = INFERIOR
if result is SUPERIOR:
root_port = port
return root_port
def _select_designated_port(self, root_port):
""" DESIGNATED_PORT is a port of the side near the root bridge
of each link. It is determined by the cost of each path, etc
same as ROOT_PORT. """
d_ports = []
root_msg = root_port.designated_priority
for port in self.ports.values():
port_msg = port.designated_priority
if (port.state is PORT_STATE_DISABLE
or port.ofport.port_no == root_port.ofport.port_no):
continue
if (port_msg is None or
(port_msg.root_id.value != root_msg.root_id.value)):
d_ports.append(port.ofport.port_no)
else:
result = Stp.compare_root_path(
root_msg.root_path_cost,
port_msg.root_path_cost - port.path_cost,
self.bridge_id.value,
port_msg.designated_bridge_id.value,
port.port_id.value,
port_msg.designated_port_id.value)
if result is SUPERIOR:
d_ports.append(port.ofport.port_no)
return d_ports
def topology_change_notify(self, port_state):
notice = False
if port_state is PORT_STATE_FORWARD:
for port in self.ports.values():
if port.role is DESIGNATED_PORT:
notice = True
break
else:
notice = True
if notice:
self.send_event(EventTopologyChange(self.dp))
if self.is_root_bridge:
self._transmit_tc_bpdu()
else:
self._transmit_tcn_bpdu()
def _transmit_tc_bpdu(self):
for port in self.ports.values():
port.transmit_tc_bpdu()
def _transmit_tcn_bpdu(self):
root_port = None
for port in self.ports.values():
if port.role is ROOT_PORT:
root_port = port
break
if root_port:
root_port.transmit_tcn_bpdu()
def _forward_tc_bpdu(self, fwd_flg):
for port in self.ports.values():
port.send_tc_flg = fwd_flg
class Port(object):
_DEFAULT_VALUE = {'priority': bpdu.DEFAULT_PORT_PRIORITY,
'path_cost': bpdu.PORT_PATH_COST_10MB,
'enable': True}
def __init__(self, dp, logger, config, send_ev_func, timeout_func,
topology_change_func, bridge_id, bridge_times, ofport):
super(Port, self).__init__()
self.dp = dp
self.logger = logger
self.dpid_str = {'dpid': dpid_to_str(dp.id)}
self.config_enable = config.get('enable',
self._DEFAULT_VALUE['enable'])
self.send_event = send_ev_func
self.wait_bpdu_timeout = timeout_func
self.topology_change_notify = topology_change_func
self.ofctl = (OfCtl_v1_0(dp) if dp.ofproto == ofproto_v1_0
else OfCtl_v1_2later(dp))
# Bridge data
self.bridge_id = bridge_id
# Root bridge data
self.port_priority = None
self.port_times = None
# ofproto_v1_X_parser.OFPPhyPort data
self.ofport = ofport
# Port data
values = self._DEFAULT_VALUE
path_costs = {dp.ofproto.OFPPF_10MB_HD: bpdu.PORT_PATH_COST_10MB,
dp.ofproto.OFPPF_10MB_FD: bpdu.PORT_PATH_COST_10MB,
dp.ofproto.OFPPF_100MB_HD: bpdu.PORT_PATH_COST_100MB,
dp.ofproto.OFPPF_100MB_FD: bpdu.PORT_PATH_COST_100MB,
dp.ofproto.OFPPF_1GB_HD: bpdu.PORT_PATH_COST_1GB,
dp.ofproto.OFPPF_1GB_FD: bpdu.PORT_PATH_COST_1GB,
dp.ofproto.OFPPF_10GB_FD: bpdu.PORT_PATH_COST_10GB}
for rate in sorted(path_costs.keys(), reverse=True):
if ofport.curr & rate:
values['path_cost'] = path_costs[rate]
break
for key, value in values.items():
values[key] = value
self.port_id = PortId(values['priority'], ofport.port_no)
self.path_cost = values['path_cost']
self.state = (None if self.config_enable else PORT_STATE_DISABLE)
self.role = None
# Receive BPDU data
self.designated_priority = None
self.designated_times = None
# BPDU handling threads
self.send_bpdu_thread = PortThread(self._transmit_bpdu)
self.wait_bpdu_thread = PortThread(self._wait_bpdu_timer)
self.send_tc_flg = None
self.send_tc_timer = None
self.send_tcn_flg = None
self.wait_timer_event = None
# State machine thread
self.state_machine = PortThread(self._state_machine)
self.state_event = None
self.up(DESIGNATED_PORT,
Priority(bridge_id, 0, None, None),
bridge_times)
self.state_machine.start()
self.logger.debug('[port=%d] Start port state machine.',
self.ofport.port_no, extra=self.dpid_str)
def delete(self):
self.state_machine.stop()
self.send_bpdu_thread.stop()
self.wait_bpdu_thread.stop()
if self.state_event is not None:
self.state_event.set()
self.state_event = None
if self.wait_timer_event is not None:
self.wait_timer_event.set()
self.wait_timer_event = None
self.logger.debug('[port=%d] Stop port threads.',
self.ofport.port_no, extra=self.dpid_str)
def up(self, role, root_priority, root_times):
""" A port is started in the state of LISTEN. """
self.port_priority = root_priority
self.port_times = root_times
state = (PORT_STATE_LISTEN if self.config_enable
else PORT_STATE_DISABLE)
self._change_role(role)
self._change_status(state)
def down(self, state, msg_init=False):
""" A port will be in the state of DISABLE or BLOCK,
and be stopped. """
assert (state is PORT_STATE_DISABLE
or state is PORT_STATE_BLOCK)
if not self.config_enable:
return
if msg_init:
self.designated_priority = None
self.designated_times = None
self._change_role(DESIGNATED_PORT)
self._change_status(state)
def _state_machine(self):
""" Port state machine.
Change next status when timer is exceeded
or _change_status() method is called."""
role_str = {ROOT_PORT: 'ROOT_PORT ',
DESIGNATED_PORT: 'DESIGNATED_PORT ',
NON_DESIGNATED_PORT: 'NON_DESIGNATED_PORT'}
state_str = {PORT_STATE_DISABLE: 'DISABLE',
PORT_STATE_BLOCK: 'BLOCK',
PORT_STATE_LISTEN: 'LISTEN',
PORT_STATE_LEARN: 'LEARN',
PORT_STATE_FORWARD: 'FORWARD'}
if self.state is PORT_STATE_DISABLE:
self.ofctl.set_port_status(self.ofport, self.state)
while True:
self.logger.info('[port=%d] %s / %s', self.ofport.port_no,
role_str[self.role], state_str[self.state],
extra=self.dpid_str)
self.state_event = hub.Event()
timer = self._get_timer()
if timer:
timeout = hub.Timeout(timer)
try:
self.state_event.wait()
except hub.Timeout as t:
if t is not timeout:
err_msg = 'Internal error. Not my timeout.'
raise RyuException(msg=err_msg)
new_state = self._get_next_state()
self._change_status(new_state, thread_switch=False)
finally:
timeout.cancel()
else:
self.state_event.wait()
self.state_event = None
def _get_timer(self):
timer = {PORT_STATE_DISABLE: None,
PORT_STATE_BLOCK: None,
PORT_STATE_LISTEN: self.port_times.forward_delay,
PORT_STATE_LEARN: self.port_times.forward_delay,
PORT_STATE_FORWARD: None}
return timer[self.state]
def _get_next_state(self):
next_state = {PORT_STATE_DISABLE: None,
PORT_STATE_BLOCK: None,
PORT_STATE_LISTEN: PORT_STATE_LEARN,
PORT_STATE_LEARN: (PORT_STATE_FORWARD
if (self.role is ROOT_PORT or
self.role is DESIGNATED_PORT)
else PORT_STATE_BLOCK),
PORT_STATE_FORWARD: None}
return next_state[self.state]
def _change_status(self, new_state, thread_switch=True):
if new_state is not PORT_STATE_DISABLE:
self.ofctl.set_port_status(self.ofport, new_state)
if(new_state is PORT_STATE_FORWARD or
(self.state is PORT_STATE_FORWARD and
(new_state is PORT_STATE_DISABLE or
new_state is PORT_STATE_BLOCK))):
self.topology_change_notify(new_state)
if (new_state is PORT_STATE_DISABLE
or new_state is PORT_STATE_BLOCK):
self.send_tc_flg = False
self.send_tc_timer = None
self.send_tcn_flg = False
self.send_bpdu_thread.stop()
elif new_state is PORT_STATE_LISTEN:
self.send_bpdu_thread.start()
self.state = new_state
self.send_event(EventPortStateChange(self.dp, self))
if self.state_event is not None:
self.state_event.set()
self.state_event = None
if thread_switch:
hub.sleep(0) # For thread switching.
def _change_role(self, new_role):
if self.role is new_role:
return
self.role = new_role
if (new_role is ROOT_PORT
or new_role is NON_DESIGNATED_PORT):
self.wait_bpdu_thread.start()
else:
assert new_role is DESIGNATED_PORT
self.wait_bpdu_thread.stop()
def rcv_config_bpdu(self, bpdu_pkt):
# Check received BPDU is superior to currently held BPDU.
root_id = BridgeId(bpdu_pkt.root_priority,
bpdu_pkt.root_system_id_extension,
bpdu_pkt.root_mac_address)
root_path_cost = bpdu_pkt.root_path_cost
designated_bridge_id = BridgeId(bpdu_pkt.bridge_priority,
bpdu_pkt.bridge_system_id_extension,
bpdu_pkt.bridge_mac_address)
designated_port_id = PortId(bpdu_pkt.port_priority,
bpdu_pkt.port_number)
msg_priority = Priority(root_id, root_path_cost,
designated_bridge_id,
designated_port_id)
msg_times = Times(bpdu_pkt.message_age,
bpdu_pkt.max_age,
bpdu_pkt.hello_time,
bpdu_pkt.forward_delay)
rcv_info = Stp.compare_bpdu_info(self.designated_priority,
self.designated_times,
msg_priority, msg_times)
if rcv_info is SUPERIOR:
self.designated_priority = msg_priority
self.designated_times = msg_times
chk_flg = False
if ((rcv_info is SUPERIOR or rcv_info is REPEATED)
and (self.role is ROOT_PORT
or self.role is NON_DESIGNATED_PORT)):
self._update_wait_bpdu_timer()
chk_flg = True
elif(rcv_info is INFERIOR and self.role is DESIGNATED_PORT):
chk_flg = True
# Check TopologyChange flag.
rcv_tc = False
if chk_flg:
tc_flag_mask = 0b00000001
tcack_flag_mask = 0b10000000
if bpdu_pkt.flags & tc_flag_mask:
self.logger.debug('[port=%d] receive TopologyChange BPDU.',
self.ofport.port_no, extra=self.dpid_str)
rcv_tc = True
if bpdu_pkt.flags & tcack_flag_mask:
self.logger.debug('[port=%d] receive TopologyChangeAck BPDU.',
self.ofport.port_no, extra=self.dpid_str)
if self.send_tcn_flg:
self.send_tcn_flg = False
return rcv_info, rcv_tc
def _update_wait_bpdu_timer(self):
if self.wait_timer_event is not None:
self.wait_timer_event.set()
self.wait_timer_event = None
self.logger.debug('[port=%d] Wait BPDU timer is updated.',
self.ofport.port_no, extra=self.dpid_str)
hub.sleep(0) # For thread switching.
def _wait_bpdu_timer(self):
time_exceed = False
while True:
self.wait_timer_event = hub.Event()
message_age = (self.designated_times.message_age
if self.designated_times else 0)
timer = self.port_times.max_age - message_age
timeout = hub.Timeout(timer)
try:
self.wait_timer_event.wait()
except hub.Timeout as t:
if t is not timeout:
err_msg = 'Internal error. Not my timeout.'
raise RyuException(msg=err_msg)
self.logger.info('[port=%d] Wait BPDU timer is exceeded.',
self.ofport.port_no, extra=self.dpid_str)
time_exceed = True
finally:
timeout.cancel()
self.wait_timer_event = None
if time_exceed:
break
if time_exceed: # Bridge.recalculate_spanning_tree
hub.spawn(self.wait_bpdu_timeout)
def _transmit_bpdu(self):
while True:
# Send config BPDU packet if port role is DESIGNATED_PORT.
if self.role == DESIGNATED_PORT:
now = datetime.datetime.today()
if self.send_tc_timer and self.send_tc_timer < now:
self.send_tc_timer = None
self.send_tc_flg = False
if not self.send_tc_flg:
flags = 0b00000000
log_msg = '[port=%d] Send Config BPDU.'
else:
flags = 0b00000001
log_msg = '[port=%d] Send TopologyChange BPDU.'
bpdu_data = self._generate_config_bpdu(flags)
self.ofctl.send_packet_out(self.ofport.port_no, bpdu_data)
self.logger.debug(log_msg, self.ofport.port_no,
extra=self.dpid_str)
# Send Topology Change Notification BPDU until receive Ack.
if self.send_tcn_flg:
bpdu_data = self._generate_tcn_bpdu()
self.ofctl.send_packet_out(self.ofport.port_no, bpdu_data)
self.logger.debug('[port=%d] Send TopologyChangeNotify BPDU.',
self.ofport.port_no, extra=self.dpid_str)
hub.sleep(self.port_times.hello_time)
def transmit_tc_bpdu(self):
""" Set send_tc_flg to send Topology Change BPDU. """
if not self.send_tc_flg:
timer = datetime.timedelta(seconds=self.port_times.max_age
+ self.port_times.forward_delay)
self.send_tc_timer = datetime.datetime.today() + timer
self.send_tc_flg = True
def transmit_ack_bpdu(self):
""" Send Topology Change Ack BPDU. """
ack_flags = 0b10000001
bpdu_data = self._generate_config_bpdu(ack_flags)
self.ofctl.send_packet_out(self.ofport.port_no, bpdu_data)
def transmit_tcn_bpdu(self):
self.send_tcn_flg = True
def _generate_config_bpdu(self, flags):
src_mac = self.ofport.hw_addr
dst_mac = bpdu.BRIDGE_GROUP_ADDRESS
length = (bpdu.bpdu._PACK_LEN + bpdu.ConfigurationBPDUs.PACK_LEN
+ llc.llc._PACK_LEN + llc.ControlFormatU._PACK_LEN)
e = ethernet.ethernet(dst_mac, src_mac, length)
l = llc.llc(llc.SAP_BPDU, llc.SAP_BPDU, llc.ControlFormatU())
b = bpdu.ConfigurationBPDUs(
flags=flags,
root_priority=self.port_priority.root_id.priority,
root_mac_address=self.port_priority.root_id.mac_addr,
root_path_cost=self.port_priority.root_path_cost + self.path_cost,
bridge_priority=self.bridge_id.priority,
bridge_mac_address=self.bridge_id.mac_addr,
port_priority=self.port_id.priority,
port_number=self.ofport.port_no,
message_age=self.port_times.message_age + 1,
max_age=self.port_times.max_age,
hello_time=self.port_times.hello_time,
forward_delay=self.port_times.forward_delay)
pkt = packet.Packet()
pkt.add_protocol(e)
pkt.add_protocol(l)
pkt.add_protocol(b)
pkt.serialize()
return pkt.data
def _generate_tcn_bpdu(self):
src_mac = self.ofport.hw_addr
dst_mac = bpdu.BRIDGE_GROUP_ADDRESS
length = (bpdu.bpdu._PACK_LEN
+ bpdu.TopologyChangeNotificationBPDUs.PACK_LEN
+ llc.llc._PACK_LEN + llc.ControlFormatU._PACK_LEN)
e = ethernet.ethernet(dst_mac, src_mac, length)
l = llc.llc(llc.SAP_BPDU, llc.SAP_BPDU, llc.ControlFormatU())
b = bpdu.TopologyChangeNotificationBPDUs()
pkt = packet.Packet()
pkt.add_protocol(e)
pkt.add_protocol(l)
pkt.add_protocol(b)
pkt.serialize()
return pkt.data
class PortThread(object):
def __init__(self, function):
super(PortThread, self).__init__()
self.function = function
self.thread = None
def start(self):
self.stop()
self.thread = hub.spawn(self.function)
def stop(self):
if self.thread is not None:
hub.kill(self.thread)
hub.joinall([self.thread])
self.thread = None
class BridgeId(object):
def __init__(self, priority, system_id_extension, mac_addr):
super(BridgeId, self).__init__()
self.priority = priority
self.system_id_extension = system_id_extension
self.mac_addr = mac_addr
self.value = bpdu.ConfigurationBPDUs.encode_bridge_id(
priority, system_id_extension, mac_addr)
class PortId(object):
def __init__(self, priority, port_no):
super(PortId, self).__init__()
self.priority = priority
self.port_no = port_no
self.value = bpdu.ConfigurationBPDUs.encode_port_id(priority, port_no)
class Priority(object):
def __init__(self, root_id, root_path_cost,
designated_bridge_id, designated_port_id):
super(Priority, self).__init__()
self.root_id = root_id
self.root_path_cost = root_path_cost
self.designated_bridge_id = designated_bridge_id
self.designated_port_id = designated_port_id
class Times(object):
def __init__(self, message_age, max_age, hello_time, forward_delay):
super(Times, self).__init__()
self.message_age = message_age
self.max_age = max_age
self.hello_time = hello_time
self.forward_delay = forward_delay
class OfCtl_v1_0(object):
def __init__(self, dp):
super(OfCtl_v1_0, self).__init__()
self.dp = dp
def send_packet_out(self, out_port, data):
actions = [self.dp.ofproto_parser.OFPActionOutput(out_port, 0)]
self.dp.send_packet_out(buffer_id=self.dp.ofproto.OFP_NO_BUFFER,
in_port=self.dp.ofproto.OFPP_CONTROLLER,
actions=actions, data=data)
def set_port_status(self, port, state):
ofproto_parser = self.dp.ofproto_parser
mask = 0b1111111
msg = ofproto_parser.OFPPortMod(self.dp, port.port_no, port.hw_addr,
PORT_CONFIG_V1_0[state], mask,
port.advertised)
self.dp.send_msg(msg)
class OfCtl_v1_2later(OfCtl_v1_0):
def __init__(self, dp):
super(OfCtl_v1_2later, self).__init__(dp)
def set_port_status(self, port, state):
ofp = self.dp.ofproto
parser = self.dp.ofproto_parser
config = {ofproto_v1_2: PORT_CONFIG_V1_2,
ofproto_v1_3: PORT_CONFIG_V1_3}
# Only turn on the relevant bits defined on OpenFlow 1.2+, otherwise
# some switch that follows the specification strictly will report
# OFPPMFC_BAD_CONFIG error.
mask = 0b1100101
msg = parser.OFPPortMod(self.dp, port.port_no, port.hw_addr,
config[ofp][state], mask, port.advertised)
self.dp.send_msg(msg)
if config[ofp][state] & ofp.OFPPC_NO_PACKET_IN:
self.add_no_pkt_in_flow(port.port_no)
else:
self.del_no_pkt_in_flow(port.port_no)
def add_bpdu_pkt_in_flow(self):
ofp = self.dp.ofproto
parser = self.dp.ofproto_parser
match = parser.OFPMatch(eth_dst=bpdu.BRIDGE_GROUP_ADDRESS)
actions = [parser.OFPActionOutput(ofp.OFPP_CONTROLLER,
ofp.OFPCML_NO_BUFFER)]
inst = [parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
actions)]
mod = parser.OFPFlowMod(self.dp, priority=BPDU_PKT_IN_PRIORITY,
match=match, instructions=inst)
self.dp.send_msg(mod)
def add_no_pkt_in_flow(self, in_port):
parser = self.dp.ofproto_parser
match = parser.OFPMatch(in_port=in_port)
mod = parser.OFPFlowMod(self.dp, priority=NO_PKT_IN_PRIORITY,
match=match)
self.dp.send_msg(mod)
def del_no_pkt_in_flow(self, in_port):
ofp = self.dp.ofproto
parser = self.dp.ofproto_parser
match = parser.OFPMatch(in_port=in_port)
mod = parser.OFPFlowMod(self.dp, command=ofp.OFPFC_DELETE_STRICT,
out_port=ofp.OFPP_ANY, out_group=ofp.OFPG_ANY,
priority=NO_PKT_IN_PRIORITY, match=match)
self.dp.send_msg(mod)
| |
import sqlalchemy as _sqla
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from mc.utils import update_helper
from .query_builder import QueryBuilder
class Db(object):
ITEM_TYPES = ['job', 'flow', 'queue']
class ItemNotFoundError(Exception):
pass
def __init__(self, engine=None, db_uri=None, schema=None,
ensure_tables=False):
if engine:
self.engine = engine
else:
self.db_uri = db_uri
if schema:
self.schema = schema
if ensure_tables:
self.ensure_tables()
self.query_builder = QueryBuilder()
@property
def engine(self):
if not hasattr(self, '_engine'):
db_uri = self.db_uri
if callable(db_uri):
db_uri = db_uri()
self._engine = create_engine(db_uri)
return self._engine
@engine.setter
def engine(self, value): self._engine = value
@property
def schema(self):
if not hasattr(self, '_schema'):
self._schema = self._get_default_schema()
return self._schema
@schema.setter
def schema(self, value): self._schema = value
def _get_default_schema(self):
from . import schema
return schema
@property
def session(self):
if not hasattr(self, '_session'):
self._session = self.Session()
return self._session
@session.setter
def session(self, value): self._session = value
@property
def Session(self):
if not hasattr(self, '_Session'):
self._Session = sessionmaker(bind=self.engine)
return self._Session
@Session.setter
def Session(self, value): self._Session = value
def ensure_tables(self):
assert self.schema is not None
self.create_tables()
def create_tables(self):
self.schema.metadata.create_all(self.engine)
def drop_tables(self):
self.schema.metadata.drop_all(self.engine)
@property
def models(self):
return self.schema.models
def create_item(self, item_type=None, item_kwargs=None):
Model = self.get_model_for_item_type(item_type)
with self.session.begin_nested():
item = Model(**item_kwargs)
self.session.add(item)
return self.item_to_dict(item)
def get_model_for_item_type(self, item_type):
return getattr(self.models, item_type.title())
def query_items(self, item_type=None, query=None):
"""Get items of item_type that match the given query.
Args:
item_type (str): one of :attr:`.ITEM_TYPES`
query (dict, optional): a dict in this shape:
::
{'filters': [filter_dict_1, ...filter_dict_n]}
where a filter_dict has this shape: ::
{'field': 'prop_name', 'op': 'op_name',
'arg': 'op argument'}
Returns:
items (list): a list of retrieved items.
"""
q = self.generate_item_query(item_type=item_type, query_spec=query)
return self.items_to_dicts(items=q)
def generate_item_query(self, item_type=None, query_spec=None):
Model = self.get_model_for_item_type(item_type)
base_query = self.session.query(Model)
item_query = self.query_builder.alter_query_per_query_spec(
query=base_query, query_spec=query_spec)
return item_query
def patch_item(self, item_type=None, key=None, patches=None):
"""
Args:
item_type (str): item_type <str>: one of :attr:`.ITEM_TYPES`
key (str): the item's key.
patches (dict): a dict of item props to update.
Returns:
patched_item <dict>: the patched item.
"""
Model = self.get_model_for_item_type(item_type)
with self.session.begin_nested():
item = self.session.query(Model).filter_by(key=key).first()
for field, value in patches.items():
setattr(item, field, value)
self.session.add(item)
return item.to_dict()
def flush(self, item_types=None):
"""clear the db tables."""
for item_type in (item_types or self.ITEM_TYPES):
Model = self.get_model_for_item_type(item_type)
self.session.query(Model).delete()
def items_to_dicts(self, items):
return [self.item_to_dict(item) for item in items]
def item_to_dict(self, item): return item.to_dict()
def get_lock_count_subquery(self):
lock_query = (
self.session.query(
_sqla.func.count(self.models.Lock.key).label('lock_count'),
self.models.Lock.lockee_key.label('lockee_key')
)
.group_by(self.models.Lock.lockee_key)
)
return lock_query.subquery()
def delete_items(self, item_type=None, query=None):
"""
Args:
item_type (str): one of :attr:`.ITEM_TYPES`
query (dict): a query dict
Returns:
None
"""
q = self.generate_item_query(item_type=item_type, query_spec=query)
return {'num_deleted': q.delete(synchronize_session='fetch')}
def get_item_by_key(self, item_type=None, key=None):
"""
Args:
item_type (str): item_type <str>: one of :attr:`.ITEM_TYPES`
key (str): the item's key
Raises:
ItemNotFoundError
"""
try:
return self.query_items(item_type=item_type, query={
'filters': [{'field': 'key', 'op': '=', 'arg': key}]
})[0]
except IndexError as exc:
error_details = "item_type '{item_type}', key '{key}'".format(
item_type=item_type, key=key)
raise self.ItemNotFoundError(error_details)
def patch_items(self, item_type=None, keyed_patches=None):
"""
Args:
item_type (str): item_type <str>: one of :attr:`.ITEM_TYPES`
keyed_patches (dict): a dictionary in which the keys are item_keys
and the values are dicts of item props to update.
Returns:
patched_items (dict): a dictionary of patched results, keyed by
item keys
"""
return {
key: self.patch_item(item_type=item_type, key=key, patches=patches)
for key, patches in keyed_patches.items()
}
def claim_queue_items(self, queue_key=None, **kwargs):
"""
Builds query for queue by examining queue's queue_spec.
Args:
queue_key (str): the queue's key
Returns:
claimed_items (dict): a dict of claim result, in this shape:
::
{items: [claimed_item_1, ..., claimed_item_n]}
"""
queue = self.get_item_by_key(item_type='queue', key=queue_key)
items_to_claim = self.get_queue_items_to_claim(queue=queue)
if items_to_claim:
claimed_items = self.patch_items(
item_type=queue['queue_spec']['item_type'],
keyed_patches={
item['key']: {'claimed': True} for item in items_to_claim
}
).values()
else:
claimed_items = []
return {'items': claimed_items}
def get_queue_items_to_claim(self, queue=None):
"""
Args:
queue (dic): a queue record
Returns:
items (list): a list of items that match the queue's query.
"""
queue_item_type = queue['queue_spec']['item_type']
if queue_item_type == 'flow':
claim_fn = self.get_flow_queue_items_to_claim
else:
claim_fn = self.default_get_queue_items_to_claim
return claim_fn(queue=queue)
def get_flow_queue_items_to_claim(self, queue=None):
"""Gets flow queue items.
Checks for lock records on items.
"""
Flow = self.models.Flow
query = self.session.query(Flow)
query = self.query_builder.alter_query_per_query_spec(
query=query,
query_spec={'filters': self.get_default_claiming_filters()}
)
lock_count_subquery = self.get_lock_count_subquery()
query = (
query.join(
lock_count_subquery,
(Flow.key == lock_count_subquery.c.lockee_key),
isouter=True,
)
.filter(
(Flow.num_tickable_tasks.is_(None))
| (lock_count_subquery.c.lock_count.is_(None))
| (Flow.num_tickable_tasks > lock_count_subquery.c.lock_count)
)
)
return self.items_to_dicts(items=query)
def get_default_claiming_filters(self):
"""
Returns:
filters (list): a list of default filters to use for claiming queue
items.
"""
return [
{'field': 'claimed', 'op': '=', 'arg': False},
{'field': 'status', 'op': 'IN', 'arg': ['PENDING', 'RUNNING']}
]
def default_get_queue_items_to_claim(self, queue=None, filters=None):
"""Default handler for claiming queue items.
Args:
queue (dict): a queue record
filters <list>: filters to use for getting claimable items.
Returns:
items (list): a list of items that match the combination of the
filters and the queue's queue_spec.
"""
return self.query_items(
item_type=queue['queue_spec']['item_type'],
query={'filters': self.get_default_claiming_filters()}
)
def create_lock(self, lockee_key=None, locker_key=None):
"""Create a lock record.
Args:
lockee_key (str): keyfor the item being locked.
locker_key (str): key for the item that holds the lock.
Returns:
lock_record (dict): a lock_record
"""
self.create_item(
item_type='Lock',
item_kwargs={'lockee_key': lockee_key, 'locker_key': locker_key}
)
def release_locks(self, locker_keys=None):
"""Release locks.
Args:
locker_key (str): key for the item that holds the lock.
Returns:
None
"""
return self.delete_items(
item_type='lock',
query={
'filters': [
{'field': 'locker_key', 'op': 'IN', 'arg': locker_keys},
]
}
)
def upsert(self, key=None, updates=None, model_type=None, commit=True):
model_type = model_type or key.split(':')[0].title()
model = getattr(self.models, model_type)
instance, created = self.get_or_create_instance(key=key, model=model)
updates = self._alter_updates(updates)
update_helper.update(instance, updates)
self.session.merge(instance)
if commit:
self.session.commit()
def _alter_updates(self, updates):
return [self._alter_update(update) for update in updates]
def _alter_update(self, update):
calls_that_need_session = ['add_parents_by_key',
'add_ancestors_by_key']
if (
update[1] == '$call' and
update[0].split('.')[-1] in calls_that_need_session
):
altered_update = self._alter_update_that_needs_session(update)
else:
altered_update = update
return altered_update
def _alter_update_that_needs_session(self, update):
session_kwargs = {'session': self.session}
params = [*update[2:]]
if len(params) == 0:
params = [[], session_kwargs]
elif len(params) == 1:
params.append(session_kwargs)
elif len(params) == 2:
params[1] = {**params[1], **session_kwargs}
altered_update = [update[0], update[1], *params]
return altered_update
def get_or_create_instance(self, key=None, model=None):
instance = self.session.query(model).filter_by(key=key).first()
if instance:
return instance, False
else:
model_kwargs = {'key': key}
if model is self.models.Ent:
_, ent_type, key_body = key.split(':', maxsplit=2)
model_kwargs = {'key': key, 'ent_type': ent_type}
instance = model(**model_kwargs)
self.session.add(instance)
return instance, True
def execute_actions(self, actions=None):
results = []
with self.session.begin_nested():
for action in actions or []:
result = self.execute_action(action=action, commit=False)
results.append(result)
self.session.commit()
return results
def execute_action(self, action=None, commit=False):
params = action.get('params', {})
if action['type'] == 'upsert':
fn = self.upsert
params = {**params, 'commit': commit}
return fn(**params)
| |
# -*- coding: utf-8 -*-
"""Custom SQLAlchemy types for use with the Annotations API database."""
from __future__ import unicode_literals
from hyputils.memex._compat import string_types
import binascii
import base64
import uuid
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.exc import DontWrapMixin
# A magic byte (expressed as two hexadecimal nibbles) which we use to expand a
# 15-byte ElasticSearch flake ID into a 16-byte UUID.
#
# The UUID specification defines UUIDs as taking the form
#
# xxxxxxxx-xxxx-Mxxx-Nxxx-xxxxxxxxxxxx
#
# in the canonical hexadecimal representation. M and N represent the UUID
# version and variant fields respectively. The four bits M can take values {1,
# 2, 3, 4, 5} in specified UUID types, and the first three bits of N can take
# the values {8, 9, 0xa, 0xb} in specified UUID types.
#
# In order to expand a 15-byte ElasticSearch flake ID into a value that can be
# stored in the UUID field, we insert the magic nibbles 0xe, 0x5 into the
# version and variant fields respectively. These values are disjoint with any
# specified UUID so the resulting UUID can be distinguished from those
# generated by, for example, PostgreSQL's uuid_generate_v1mc(), and mapped back
# to a 20-char ElasticSearch flake ID.
ES_FLAKE_MAGIC_BYTE = ["e", "5"]
class InvalidUUID(Exception, DontWrapMixin):
pass
class URLSafeUUID(types.TypeDecorator):
"""
Expose UUIDs as URL-safe base64-encoded strings.
Fields decorated with this type decorator use PostgreSQL UUID fields for
storage, but expose URL-safe strings in the application.
This type decorator will handle the transformation between any UUID and a
URL-safe, base64-encoded string version of that UUID (which will be 22
characters long). In addition, it will transparently map post-v1.4
ElasticSearch flake IDs (which are 20 characters long and map to 15 bytes
of data).
"""
impl = postgresql.UUID
def process_bind_param(self, value, dialect):
if value is None:
return None
return _get_hex_from_urlsafe(value)
def process_result_value(self, value, dialect):
if value is None:
return None
hexstring = uuid.UUID(value).hex
return _get_urlsafe_from_hex(hexstring)
class AnnotationSelectorJSONB(types.TypeDecorator):
"""
Special type for the Annotation selector column.
It transparently escapes NULL (\u0000) bytes to \\u0000 when writing to the
database, and the other way around when reading from the database, but
only on the prefix/exact/suffix fields in a TextQuoteSelector.
"""
impl = postgresql.JSONB
def process_bind_param(self, value, dialect):
return _transform_quote_selector(value, _escape_null_byte)
def process_result_value(self, value, dialect):
return _transform_quote_selector(value, _unescape_null_byte)
def _get_hex_from_urlsafe(value):
"""
Convert a URL-safe base 64 ID to a hex UUID.
:type value: unicode
:rtype: unicode
"""
def _fail():
raise InvalidUUID("{0!r} is not a valid encoded UUID".format(value))
if not isinstance(value, string_types):
raise InvalidUUID(
"`value` is {}, expected one of {}".format(type(value), string_types)
)
bytestr = value.encode()
if len(bytestr) == 22:
# 22-char inputs represent 16 bytes of data, which when normally
# base64-encoded would have two bytes of padding on the end, so we add
# that back before decoding.
try:
data = _must_b64_decode(bytestr + b"==", expected_size=16)
except (TypeError, binascii.Error):
_fail()
return binascii.hexlify(data).decode()
if len(bytestr) == 20:
# 20-char inputs represent 15 bytes of data, which requires no padding
# corrections.
try:
data = _must_b64_decode(bytestr, expected_size=15)
except (TypeError, binascii.Error):
_fail()
hexstring = binascii.hexlify(data).decode()
# These are ElasticSearch flake IDs, so to convert them into UUIDs we
# insert the magic nibbles at the appropriate points. See the comments
# on ES_FLAKE_MAGIC_BYTE for details.
return (
hexstring[0:12]
+ ES_FLAKE_MAGIC_BYTE[0]
+ hexstring[12:15]
+ ES_FLAKE_MAGIC_BYTE[1]
+ hexstring[15:30]
)
# Fallthrough: we must have a received a string of invalid length
_fail()
def _get_urlsafe_from_hex(value):
"""
Convert a hex UUID to a URL-safe base 64 ID.
:type value: unicode
:rtype: unicode
"""
# Validate and normalise hex string
hexstring = uuid.UUID(hex=value).hex
is_flake_id = (
hexstring[12] == ES_FLAKE_MAGIC_BYTE[0]
and hexstring[16] == ES_FLAKE_MAGIC_BYTE[1]
)
if is_flake_id:
# The hex representation of the flake ID is simply the UUID without the
# two magic nibbles.
data = binascii.unhexlify(hexstring[0:12] + hexstring[13:16] + hexstring[17:32])
return base64.urlsafe_b64encode(data).decode()
# Encode UUID bytes and strip two bytes of padding
data = binascii.unhexlify(hexstring)
return base64.urlsafe_b64encode(data)[:-2].decode()
def _must_b64_decode(data, expected_size=None):
result = base64.urlsafe_b64decode(data)
if expected_size is not None and len(result) != expected_size:
raise TypeError("incorrect data size")
return result
def _transform_quote_selector(selectors, transform_func):
if selectors is None:
return None
if not isinstance(selectors, list):
return selectors
for selector in selectors:
if not isinstance(selector, dict):
continue
if not selector.get("type") == "TextQuoteSelector":
continue
if "prefix" in selector:
selector["prefix"] = transform_func(selector["prefix"])
if "exact" in selector:
selector["exact"] = transform_func(selector["exact"])
if "suffix" in selector:
selector["suffix"] = transform_func(selector["suffix"])
return selectors
def _escape_null_byte(s):
if s is None:
return s
return s.replace("\u0000", "\\u0000")
def _unescape_null_byte(s):
if s is None:
return s
return s.replace("\\u0000", "\u0000")
| |
# Copyright (c) 2014-2015, Heliosphere Research LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
This module provides the PlotContext object, in addition to the
color cycle, the RESOURCES dictionary used for method dispatch,
and a few other miscellaneous pieces.
Crucially, it imports all of the api_* modules, which triggers
population of RESOURCES. Once this module is successfully imported,
RESOURCES is ready to go.
This is also the location where Matplotlib global settings are made.
Matplotlib should not be imported before this module is loaded, or
the settings will not be effective.
"""
import matplotlib as m
m.use('Agg')
m.rcParams['mathtext.fontset'] = 'stixsans'
from matplotlib import pyplot as plt
from matplotlib import colors as mcolors
from .import errors
RESOURCES = {} # Dict mapping string resource names to callables
def resource(name):
""" Decorator which binds a callable to the given resource name """
def w(cls):
RESOURCES[name] = cls
return cls
return w
def blend_color(color):
alpha = 0.65
rgb = mcolors.colorConverter.to_rgb(color)
rgb = tuple((x*alpha + 1.0*(1 - alpha)) for x in rgb)
return rgb
COLOR_CYCLE = ['b', 'g', 'r', 'c', 'm', 'y', '#444444', '#ff00ff']
COLOR_CYCLE_BAR = [blend_color(x) for x in COLOR_CYCLE]
class PlotContext(object):
"""
Stateful context object for dealing with plots.
Used in the api_* functions to set and get plot information, and
to handle the color cycle.
"""
def __init__(self):
self._plots = set() # All Plot IDs which are alive
self._color_indexes = {} # Dict mapping plot id -> color cycle position
self._mappable = {} # Dict mapping plot id -> bool if colormapped object present
self._polar = {} # Dict mapping plot id -> bool if polar axes
self._plotid = None # The active plot id
self._xscales = {} # Maps plot id -> scale mode string
self._yscales = {}
self._errorbar_colors = {}
self._legend_entries = {} # PlotID -> list of 2-tuples
def set(self, plotid):
""" Set "plotid" as the active plot.
Returns the MPL figure.
"""
if not plotid in self._plots:
raise errors.InvalidIdentifier("Plot ID 0x%X does not exist" % plotid)
self._plotid = plotid
return plt.figure(plotid)
def new(self, plotid):
""" Register a new figure with the context object.
Also sets the current plot to plotid and returns the MPL figure.
"""
if plotid in self._plots:
raise ValueError("Plot ID %d already exists" % plotid)
self._plots.add(plotid)
return self.set(plotid)
def close(self):
""" Close a figure and forget the id """
plotid = self._plotid
f = self.set(plotid)
plt.close(f)
self._plotid = None
self._plots.remove(plotid)
self._color_indexes.pop(plotid, None)
self._mappable.pop(plotid, None)
self._polar.pop(plotid, None)
self._xscales.pop(plotid, None)
self._yscales.pop(plotid, None)
self._errorbar_colors.pop(plotid, None)
def isvalid(self, plotid):
""" Determine if a plot identifier is valid """
return plotid in self._plots
@property
def mappable(self):
""" Mappable for which we should create the colorbar. None means
no mappable has been created yet."""
return self._mappable.get(self._plotid, None)
@mappable.setter
def mappable(self, m):
self._mappable[self._plotid] = m
@property
def polar(self):
""" Bool determining if axes are polar """
return self._polar[self._plotid] # KeyError is desired, as all plots must have this property
@polar.setter
def polar(self, value):
if self._plotid in self._polar:
raise AttributeError("Attempt to set .polar property twice")
else:
self._polar[self._plotid] = value
@property
def xscale(self):
""" Scale mode """
return self._xscales.get(self._plotid)
@xscale.setter
def xscale(self, value):
self._xscales[self._plotid] = value
@property
def yscale(self):
""" Scale mode """
return self._yscales.get(self._plotid)
@yscale.setter
def yscale(self, value):
self._yscales[self._plotid] = value
@property
def legend_entries(self):
""" Return the list of legend entries """
return self._legend_entries.setdefault(self._plotid, [])
def next_color(self, bar=False):
""" Get a color, and advance the cycle.
If *bar*, return a slightly less eye-piercing version.
"""
idx = self._color_indexes.setdefault(self._plotid, 0)
self._color_indexes[self._plotid] += 1
if bar:
return COLOR_CYCLE_BAR[idx%len(COLOR_CYCLE_BAR)]
else:
return COLOR_CYCLE[idx%len(COLOR_CYCLE)]
def last_color(self):
""" Get the last-used color (or the first in the cycle, if none
has been used) """
idx = self._color_indexes.get(self._plotid)
if idx is not None:
return COLOR_CYCLE[(idx-1) % len(COLOR_CYCLE)]
return COLOR_CYCLE[0]
def errorbar_color(self, color=None):
if color is not None:
self._errorbar_colors[self._plotid] = color
return self._errorbar_colors.get(self._plotid, self.last_color())
def fail_if_polar(self):
if self.polar:
raise errors.PolarNotSupported("This VI does not support polar axes")
def fail_if_symlog(self):
if self.xscale == 'symlog' or self.yscale == 'symlog':
raise errors.LogNotSupported("This VI does not support symlog axis scales")
def fail_if_log_symlog(self):
if self.xscale != 'linear' or self.yscale != 'linear':
raise errors.LogNotSupported("This VI does not support log or symlog axis scales")
# Trigger population of RESOURCES
from . import api_core, api_figure, api_plotting, api_annotations
| |
import math
import numpy as np
from scipy import ndimage as ndi
from scipy.ndimage import filters as ndif
from collections import OrderedDict
from ..exposure import histogram
from .._shared.utils import assert_nD, warn
__all__ = ['try_all_threshold',
'threshold_adaptive',
'threshold_otsu',
'threshold_yen',
'threshold_isodata',
'threshold_li',
'threshold_minimum',
'threshold_mean',
'threshold_triangle']
def _try_all(image, methods=None, figsize=None, num_cols=2, verbose=True):
"""Returns a figure comparing the outputs of different methods.
Parameters
----------
image : (N, M) ndarray
Input image.
methods : dict, optional
Names and associated functions.
Functions must take and return an image.
figsize : tuple, optional
Figure size (in inches).
num_cols : int, optional
Number of columns.
verbose : bool, optional
Print function name for each method.
Returns
-------
fig, ax : tuple
Matplotlib figure and axes.
"""
from matplotlib import pyplot as plt
num_rows = math.ceil((len(methods) + 1.) / num_cols)
num_rows = int(num_rows) # Python 2.7 support
fig, ax = plt.subplots(num_rows, num_cols, figsize=figsize,
sharex=True, sharey=True,
subplot_kw={'adjustable': 'box-forced'})
ax = ax.ravel()
ax[0].imshow(image, cmap=plt.cm.gray)
ax[0].set_title('Original')
i = 1
for name, func in methods.items():
ax[i].imshow(func(image), cmap=plt.cm.gray)
ax[i].set_title(name)
i += 1
if verbose:
print(func.__orifunc__)
for a in ax:
a.axis('off')
fig.tight_layout()
return fig, ax
def try_all_threshold(image, figsize=(8, 5), verbose=True):
"""Returns a figure comparing the outputs of different thresholding methods.
Parameters
----------
image : (N, M) ndarray
Input image.
figsize : tuple, optional
Figure size (in inches).
verbose : bool, optional
Print function name for each method.
Returns
-------
fig, ax : tuple
Matplotlib figure and axes.
Notes
-----
The following algorithms are used:
* isodata
* li
* mean
* minimum
* otsu
* triangle
* yen
Examples
--------
>>> from skimage.data import text
>>> fig, ax = try_all_threshold(text(), figsize=(10, 6), verbose=False)
"""
def thresh(func):
"""
A wrapper function to return a thresholded image.
"""
def wrapper(im):
return im > func(im)
try:
wrapper.__orifunc__ = func.__orifunc__
except AttributeError:
wrapper.__orifunc__ = func.__module__ + '.' + func.__name__
return wrapper
# Global algorithms.
methods = OrderedDict({'Isodata': thresh(threshold_isodata),
'Li': thresh(threshold_li),
'Mean': thresh(threshold_mean),
'Minimum': thresh(threshold_minimum),
'Otsu': thresh(threshold_otsu),
'Triangle': thresh(threshold_triangle),
'Yen': thresh(threshold_yen)})
return _try_all(image, figsize=figsize,
methods=methods, verbose=verbose)
def threshold_adaptive(image, block_size, method='gaussian', offset=0,
mode='reflect', param=None):
"""Applies an adaptive threshold to an array.
Also known as local or dynamic thresholding where the threshold value is
the weighted mean for the local neighborhood of a pixel subtracted by a
constant. Alternatively the threshold can be determined dynamically by a a
given function using the 'generic' method.
Parameters
----------
image : (N, M) ndarray
Input image.
block_size : int
Odd size of pixel neighborhood which is used to calculate the
threshold value (e.g. 3, 5, 7, ..., 21, ...).
method : {'generic', 'gaussian', 'mean', 'median'}, optional
Method used to determine adaptive threshold for local neighbourhood in
weighted mean image.
* 'generic': use custom function (see `param` parameter)
* 'gaussian': apply gaussian filter (see `param` parameter for custom\
sigma value)
* 'mean': apply arithmetic mean filter
* 'median': apply median rank filter
By default the 'gaussian' method is used.
offset : float, optional
Constant subtracted from weighted mean of neighborhood to calculate
the local threshold value. Default offset is 0.
mode : {'reflect', 'constant', 'nearest', 'mirror', 'wrap'}, optional
The mode parameter determines how the array borders are handled, where
cval is the value when mode is equal to 'constant'.
Default is 'reflect'.
param : {int, function}, optional
Either specify sigma for 'gaussian' method or function object for
'generic' method. This functions takes the flat array of local
neighbourhood as a single argument and returns the calculated
threshold for the centre pixel.
Returns
-------
threshold : (N, M) ndarray
Thresholded binary image
References
----------
.. [1] http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html?highlight=threshold#adaptivethreshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()[:50, :50]
>>> binary_image1 = threshold_adaptive(image, 15, 'mean')
>>> func = lambda arr: arr.mean()
>>> binary_image2 = threshold_adaptive(image, 15, 'generic', param=func)
"""
if block_size % 2 == 0:
raise ValueError("The kwarg ``block_size`` must be odd! Given "
"``block_size`` {0} is even.".format(block_size))
assert_nD(image, 2)
thresh_image = np.zeros(image.shape, 'double')
if method == 'generic':
ndi.generic_filter(image, param, block_size,
output=thresh_image, mode=mode)
elif method == 'gaussian':
if param is None:
# automatically determine sigma which covers > 99% of distribution
sigma = (block_size - 1) / 6.0
else:
sigma = param
ndi.gaussian_filter(image, sigma, output=thresh_image, mode=mode)
elif method == 'mean':
mask = 1. / block_size * np.ones((block_size,))
# separation of filters to speedup convolution
ndi.convolve1d(image, mask, axis=0, output=thresh_image, mode=mode)
ndi.convolve1d(thresh_image, mask, axis=1,
output=thresh_image, mode=mode)
elif method == 'median':
ndi.median_filter(image, block_size, output=thresh_image, mode=mode)
return image > (thresh_image - offset)
def threshold_otsu(image, nbins=256):
"""Return threshold value based on Otsu's method.
Parameters
----------
image : (N, M) ndarray
Grayscale input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
Raises
------
ValueError
If `image` only contains a single grayscale value.
References
----------
.. [1] Wikipedia, http://en.wikipedia.org/wiki/Otsu's_Method
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_otsu(image)
>>> binary = image <= thresh
Notes
-----
The input image must be grayscale.
"""
if len(image.shape) > 2 and image.shape[-1] in (3, 4):
msg = "threshold_otsu is expected to work correctly only for " \
"grayscale images; image shape {0} looks like an RGB image"
warn(msg.format(image.shape))
# Check if the image is multi-colored or not
if image.min() == image.max():
raise ValueError("threshold_otsu is expected to work with images "
"having more than one color. The input image seems "
"to have just one color {0}.".format(image.min()))
hist, bin_centers = histogram(image.ravel(), nbins)
hist = hist.astype(float)
# class probabilities for all possible thresholds
weight1 = np.cumsum(hist)
weight2 = np.cumsum(hist[::-1])[::-1]
# class means for all possible thresholds
mean1 = np.cumsum(hist * bin_centers) / weight1
mean2 = (np.cumsum((hist * bin_centers)[::-1]) / weight2[::-1])[::-1]
# Clip ends to align class 1 and class 2 variables:
# The last value of `weight1`/`mean1` should pair with zero values in
# `weight2`/`mean2`, which do not exist.
variance12 = weight1[:-1] * weight2[1:] * (mean1[:-1] - mean2[1:]) ** 2
idx = np.argmax(variance12)
threshold = bin_centers[:-1][idx]
return threshold
def threshold_yen(image, nbins=256):
"""Return threshold value based on Yen's method.
Parameters
----------
image : (N, M) ndarray
Input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] Yen J.C., Chang F.J., and Chang S. (1995) "A New Criterion
for Automatic Multilevel Thresholding" IEEE Trans. on Image
Processing, 4(3): 370-378. DOI:10.1109/83.366472
.. [2] Sezgin M. and Sankur B. (2004) "Survey over Image Thresholding
Techniques and Quantitative Performance Evaluation" Journal of
Electronic Imaging, 13(1): 146-165, DOI:10.1117/1.1631315
http://www.busim.ee.boun.edu.tr/~sankur/SankurFolder/Threshold_survey.pdf
.. [3] ImageJ AutoThresholder code, http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_yen(image)
>>> binary = image <= thresh
"""
hist, bin_centers = histogram(image.ravel(), nbins)
# On blank images (e.g. filled with 0) with int dtype, `histogram()`
# returns `bin_centers` containing only one value. Speed up with it.
if bin_centers.size == 1:
return bin_centers[0]
# Calculate probability mass function
pmf = hist.astype(np.float32) / hist.sum()
P1 = np.cumsum(pmf) # Cumulative normalized histogram
P1_sq = np.cumsum(pmf ** 2)
# Get cumsum calculated from end of squared array:
P2_sq = np.cumsum(pmf[::-1] ** 2)[::-1]
# P2_sq indexes is shifted +1. I assume, with P1[:-1] it's help avoid '-inf'
# in crit. ImageJ Yen implementation replaces those values by zero.
crit = np.log(((P1_sq[:-1] * P2_sq[1:]) ** -1) *
(P1[:-1] * (1.0 - P1[:-1])) ** 2)
return bin_centers[crit.argmax()]
def threshold_isodata(image, nbins=256, return_all=False):
"""Return threshold value(s) based on ISODATA method.
Histogram-based threshold, known as Ridler-Calvard method or inter-means.
Threshold values returned satisfy the following equality:
`threshold = (image[image <= threshold].mean() +`
`image[image > threshold].mean()) / 2.0`
That is, returned thresholds are intensities that separate the image into
two groups of pixels, where the threshold intensity is midway between the
mean intensities of these groups.
For integer images, the above equality holds to within one; for floating-
point images, the equality holds to within the histogram bin-width.
Parameters
----------
image : (N, M) ndarray
Input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
return_all: bool, optional
If False (default), return only the lowest threshold that satisfies
the above equality. If True, return all valid thresholds.
Returns
-------
threshold : float or int or array
Threshold value(s).
References
----------
.. [1] Ridler, TW & Calvard, S (1978), "Picture thresholding using an
iterative selection method"
IEEE Transactions on Systems, Man and Cybernetics 8: 630-632,
DOI:10.1109/TSMC.1978.4310039
.. [2] Sezgin M. and Sankur B. (2004) "Survey over Image Thresholding
Techniques and Quantitative Performance Evaluation" Journal of
Electronic Imaging, 13(1): 146-165,
http://www.busim.ee.boun.edu.tr/~sankur/SankurFolder/Threshold_survey.pdf
DOI:10.1117/1.1631315
.. [3] ImageJ AutoThresholder code,
http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import coins
>>> image = coins()
>>> thresh = threshold_isodata(image)
>>> binary = image > thresh
"""
hist, bin_centers = histogram(image.ravel(), nbins)
# image only contains one unique value
if len(bin_centers) == 1:
if return_all:
return bin_centers
else:
return bin_centers[0]
hist = hist.astype(np.float32)
# csuml and csumh contain the count of pixels in that bin or lower, and
# in all bins strictly higher than that bin, respectively
csuml = np.cumsum(hist)
csumh = np.cumsum(hist[::-1])[::-1] - hist
# intensity_sum contains the total pixel intensity from each bin
intensity_sum = hist * bin_centers
# l and h contain average value of all pixels in that bin or lower, and
# in all bins strictly higher than that bin, respectively.
# Note that since exp.histogram does not include empty bins at the low or
# high end of the range, csuml and csumh are strictly > 0, except in the
# last bin of csumh, which is zero by construction.
# So no worries about division by zero in the following lines, except
# for the last bin, but we can ignore that because no valid threshold
# can be in the top bin. So we just patch up csumh[-1] to not cause 0/0
# errors.
csumh[-1] = 1
l = np.cumsum(intensity_sum) / csuml
h = (np.cumsum(intensity_sum[::-1])[::-1] - intensity_sum) / csumh
# isodata finds threshold values that meet the criterion t = (l + m)/2
# where l is the mean of all pixels <= t and h is the mean of all pixels
# > t, as calculated above. So we are looking for places where
# (l + m) / 2 equals the intensity value for which those l and m figures
# were calculated -- which is, of course, the histogram bin centers.
# We only require this equality to be within the precision of the bin
# width, of course.
all_mean = (l + h) / 2.0
bin_width = bin_centers[1] - bin_centers[0]
# Look only at thresholds that are below the actual all_mean value,
# for consistency with the threshold being included in the lower pixel
# group. Otherwise can get thresholds that are not actually fixed-points
# of the isodata algorithm. For float images, this matters less, since
# there really can't be any guarantees anymore anyway.
distances = all_mean - bin_centers
thresholds = bin_centers[(distances >= 0) & (distances < bin_width)]
if return_all:
return thresholds
else:
return thresholds[0]
def threshold_li(image):
"""Return threshold value based on adaptation of Li's Minimum Cross Entropy method.
Parameters
----------
image : (N, M) ndarray
Input image.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] Li C.H. and Lee C.K. (1993) "Minimum Cross Entropy Thresholding"
Pattern Recognition, 26(4): 617-625
DOI:10.1016/0031-3203(93)90115-D
.. [2] Li C.H. and Tam P.K.S. (1998) "An Iterative Algorithm for Minimum
Cross Entropy Thresholding" Pattern Recognition Letters, 18(8): 771-776
DOI:10.1016/S0167-8655(98)00057-9
.. [3] Sezgin M. and Sankur B. (2004) "Survey over Image Thresholding
Techniques and Quantitative Performance Evaluation" Journal of
Electronic Imaging, 13(1): 146-165
DOI:10.1117/1.1631315
.. [4] ImageJ AutoThresholder code, http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_li(image)
>>> binary = image > thresh
"""
# Copy to ensure input image is not modified
image = image.copy()
# Requires positive image (because of log(mean))
immin = np.min(image)
image -= immin
imrange = np.max(image)
tolerance = 0.5 * imrange / 256
# Calculate the mean gray-level
mean = np.mean(image)
# Initial estimate
new_thresh = mean
old_thresh = new_thresh + 2 * tolerance
# Stop the iterations when the difference between the
# new and old threshold values is less than the tolerance
while abs(new_thresh - old_thresh) > tolerance:
old_thresh = new_thresh
threshold = old_thresh + tolerance # range
# Calculate the means of background and object pixels
mean_back = image[image <= threshold].mean()
mean_obj = image[image > threshold].mean()
temp = (mean_back - mean_obj) / (np.log(mean_back) - np.log(mean_obj))
if temp < 0:
new_thresh = temp - tolerance
else:
new_thresh = temp + tolerance
return threshold + immin
def threshold_minimum(image, nbins=256, bias='min', max_iter=10000):
"""Return threshold value based on minimum method.
The histogram of the input `image` is computed and smoothed until there are
only two maxima. Then the minimum in between is the threshold value.
Parameters
----------
image : (M, N) ndarray
Input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
bias : {'min', 'mid', 'max'}, optional
'min', 'mid', 'max' return lowest, middle, or highest pixel value
with minimum histogram value.
max_iter: int, optional
Maximum number of iterations to smooth the histogram.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
Raises
------
RuntimeError
If unable to find two local maxima in the histogram or if the
smoothing takes more than 1e4 iterations.
References
----------
.. [1] Prewitt, JMS & Mendelsohn, ML (1966), "The analysis of cell images",
Annals of the New York Academy of Sciences 128: 1035-1053
DOI:10.1111/j.1749-6632.1965.tb11715.x
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_minimum(image)
>>> binary = image > thresh
"""
def find_local_maxima(hist):
# We can't use scipy.signal.argrelmax
# as it fails on plateaus
maximums = list()
direction = 1
for i in range(hist.shape[0] - 1):
if direction > 0:
if hist[i + 1] < hist[i]:
direction = -1
maximums.append(i)
else:
if hist[i + 1] > hist[i]:
direction = 1
return maximums
if bias not in ('min', 'mid', 'max'):
raise ValueError("Unknown bias: {0}".format(bias))
hist, bin_centers = histogram(image.ravel(), nbins)
smooth_hist = np.copy(hist)
for counter in range(max_iter):
smooth_hist = ndif.uniform_filter1d(smooth_hist, 3)
maximums = find_local_maxima(smooth_hist)
if len(maximums) < 3:
break
if len(maximums) != 2:
raise RuntimeError('Unable to find two maxima in histogram')
elif counter == max_iter - 1:
raise RuntimeError('Maximum iteration reached for histogram'
'smoothing')
# Find lowest point between the maxima, biased to the low end (min)
minimum = smooth_hist[maximums[0]]
threshold = maximums[0]
for i in range(maximums[0], maximums[1]+1):
if smooth_hist[i] < minimum:
minimum = smooth_hist[i]
threshold = i
if bias == 'min':
return bin_centers[threshold]
else:
upper_bound = threshold
while smooth_hist[upper_bound] == smooth_hist[threshold]:
upper_bound += 1
upper_bound -= 1
if bias == 'max':
return bin_centers[upper_bound]
elif bias == 'mid':
return bin_centers[(threshold + upper_bound) // 2]
def threshold_mean(image):
"""Return threshold value based on the mean of grayscale values.
Parameters
----------
image : (N, M[, ..., P]) ndarray
Grayscale input image.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] C. A. Glasbey, "An analysis of histogram-based thresholding
algorithms," CVGIP: Graphical Models and Image Processing,
vol. 55, pp. 532-537, 1993.
DOI:10.1006/cgip.1993.1040
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_mean(image)
>>> binary = image > thresh
"""
return np.mean(image)
def threshold_triangle(image, nbins=256):
"""Return threshold value based on the triangle algorithm.
Parameters
----------
image : (N, M[, ..., P]) ndarray
Grayscale input image.
nbins : int, optional
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
threshold : float
Upper threshold value. All pixels with an intensity higher than
this value are assumed to be foreground.
References
----------
.. [1] Zack, G. W., Rogers, W. E. and Latt, S. A., 1977,
Automatic Measurement of Sister Chromatid Exchange Frequency,
Journal of Histochemistry and Cytochemistry 25 (7), pp. 741-753
DOI:10.1177/25.7.70454
.. [2] ImageJ AutoThresholder code,
http://fiji.sc/wiki/index.php/Auto_Threshold
Examples
--------
>>> from skimage.data import camera
>>> image = camera()
>>> thresh = threshold_triangle(image)
>>> binary = image > thresh
"""
# nbins is ignored for integer arrays
# so, we recalculate the effective nbins.
hist, bin_centers = histogram(image.ravel(), nbins)
nbins = len(hist)
# Find peak, lowest and highest gray levels.
arg_peak_height = np.argmax(hist)
peak_height = hist[arg_peak_height]
arg_low_level, arg_high_level = np.where(hist>0)[0][[0, -1]]
# Flip is True if left tail is shorter.
flip = arg_peak_height - arg_low_level < arg_high_level - arg_peak_height
if flip:
hist = hist[::-1]
arg_low_level = nbins - arg_high_level - 1
arg_peak_height = nbins - arg_peak_height - 1
# If flip == True, arg_high_level becomes incorrect
# but we don't need it anymore.
del(arg_high_level)
# Set up the coordinate system.
width = arg_peak_height - arg_low_level
x1 = np.arange(width)
y1 = hist[x1 + arg_low_level]
# Normalize.
norm = np.sqrt(peak_height**2 + width**2)
peak_height /= norm
width /= norm
# Maximize the length.
# The ImageJ implementation includes an additional constant when calculating
# the length, but here we omit it as it does not affect the location of the
# minimum.
length = peak_height * x1 - width * y1
arg_level = np.argmax(length) + arg_low_level
if flip:
arg_level = nbins - arg_level - 1
return bin_centers[arg_level]
| |
from __future__ import absolute_import
import errno
import itertools
import logging
import os.path
import tempfile
from contextlib import contextmanager
from pipenv.patched.notpip._vendor.contextlib2 import ExitStack
import warnings
from pipenv.patched.notpip._internal.utils.misc import rmtree
from pipenv.patched.notpip._internal.utils.typing import MYPY_CHECK_RUNNING
from pipenv.vendor.vistir.compat import finalize, ResourceWarning
if MYPY_CHECK_RUNNING:
from typing import Any, Dict, Iterator, Optional, TypeVar
_T = TypeVar('_T', bound='TempDirectory')
logger = logging.getLogger(__name__)
_tempdir_manager = None # type: Optional[ExitStack]
@contextmanager
def global_tempdir_manager():
# type: () -> Iterator[None]
global _tempdir_manager
with ExitStack() as stack:
old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
try:
yield
finally:
_tempdir_manager = old_tempdir_manager
class TempDirectoryTypeRegistry(object):
"""Manages temp directory behavior
"""
def __init__(self):
# type: () -> None
self._should_delete = {} # type: Dict[str, bool]
def set_delete(self, kind, value):
# type: (str, bool) -> None
"""Indicate whether a TempDirectory of the given kind should be
auto-deleted.
"""
self._should_delete[kind] = value
def get_delete(self, kind):
# type: (str) -> bool
"""Get configured auto-delete flag for a given TempDirectory type,
default True.
"""
return self._should_delete.get(kind, True)
_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry]
@contextmanager
def tempdir_registry():
# type: () -> Iterator[TempDirectoryTypeRegistry]
"""Provides a scoped global tempdir registry that can be used to dictate
whether directories should be deleted.
"""
global _tempdir_registry
old_tempdir_registry = _tempdir_registry
_tempdir_registry = TempDirectoryTypeRegistry()
try:
yield _tempdir_registry
finally:
_tempdir_registry = old_tempdir_registry
class TempDirectory(object):
"""Helper class that owns and cleans up a temporary directory.
This class can be used as a context manager or as an OO representation of a
temporary directory.
Attributes:
path
Location to the created temporary directory
delete
Whether the directory should be deleted when exiting
(when used as a contextmanager)
Methods:
cleanup()
Deletes the temporary directory
When used as a context manager, if the delete attribute is True, on
exiting the context the temporary directory is deleted.
"""
def __init__(
self,
path=None, # type: Optional[str]
delete=None, # type: Optional[bool]
kind="temp", # type: str
globally_managed=False, # type: bool
):
super(TempDirectory, self).__init__()
# If we were given an explicit directory, resolve delete option now.
# Otherwise we wait until cleanup and see what tempdir_registry says.
if path is not None and delete is None:
delete = False
if path is None:
path = self._create(kind)
self._path = path
self._deleted = False
self.delete = delete
self.kind = kind
self._finalizer = None
if self._path:
self._register_finalizer()
if globally_managed:
assert _tempdir_manager is not None
_tempdir_manager.enter_context(self)
def _register_finalizer(self):
if self.delete and self._path:
self._finalizer = finalize(
self,
self._cleanup,
self._path,
warn_message = None
)
else:
self._finalizer = None
@property
def path(self):
# type: () -> str
assert not self._deleted, (
"Attempted to access deleted path: {}".format(self._path)
)
return self._path
def __repr__(self):
# type: () -> str
return "<{} {!r}>".format(self.__class__.__name__, self.path)
def __enter__(self):
# type: (_T) -> _T
return self
def __exit__(self, exc, value, tb):
# type: (Any, Any, Any) -> None
if self.delete is not None:
delete = self.delete
elif _tempdir_registry:
delete = _tempdir_registry.get_delete(self.kind)
else:
delete = True
if delete:
self.cleanup()
def _create(self, kind):
# type: (str) -> str
"""Create a temporary directory and store its path in self.path
"""
# We realpath here because some systems have their default tmpdir
# symlinked to another directory. This tends to confuse build
# scripts, so we canonicalize the path by traversing potential
# symlinks here.
path = os.path.realpath(
tempfile.mkdtemp(prefix="pip-{}-".format(kind))
)
logger.debug("Created temporary directory: {}".format(path))
return path
@classmethod
def _cleanup(cls, name, warn_message=None):
if not os.path.exists(name):
return
try:
rmtree(name)
except OSError:
pass
else:
if warn_message:
warnings.warn(warn_message, ResourceWarning)
def cleanup(self):
# type: () -> None
"""Remove the temporary directory created and reset state
"""
if getattr(self._finalizer, "detach", None) and self._finalizer.detach():
if os.path.exists(self._path):
self._deleted = True
try:
rmtree(self._path)
except OSError:
pass
class AdjacentTempDirectory(TempDirectory):
"""Helper class that creates a temporary directory adjacent to a real one.
Attributes:
original
The original directory to create a temp directory for.
path
After calling create() or entering, contains the full
path to the temporary directory.
delete
Whether the directory should be deleted when exiting
(when used as a contextmanager)
"""
# The characters that may be used to name the temp directory
# We always prepend a ~ and then rotate through these until
# a usable name is found.
# pkg_resources raises a different error for .dist-info folder
# with leading '-' and invalid metadata
LEADING_CHARS = "-~.=%0123456789"
def __init__(self, original, delete=None):
# type: (str, Optional[bool]) -> None
self.original = original.rstrip('/\\')
super(AdjacentTempDirectory, self).__init__(delete=delete)
@classmethod
def _generate_names(cls, name):
# type: (str) -> Iterator[str]
"""Generates a series of temporary names.
The algorithm replaces the leading characters in the name
with ones that are valid filesystem characters, but are not
valid package names (for both Python and pip definitions of
package).
"""
for i in range(1, len(name)):
for candidate in itertools.combinations_with_replacement(
cls.LEADING_CHARS, i - 1):
new_name = '~' + ''.join(candidate) + name[i:]
if new_name != name:
yield new_name
# If we make it this far, we will have to make a longer name
for i in range(len(cls.LEADING_CHARS)):
for candidate in itertools.combinations_with_replacement(
cls.LEADING_CHARS, i):
new_name = '~' + ''.join(candidate) + name
if new_name != name:
yield new_name
def _create(self, kind):
# type: (str) -> str
root, name = os.path.split(self.original)
for candidate in self._generate_names(name):
path = os.path.join(root, candidate)
try:
os.mkdir(path)
except OSError as ex:
# Continue if the name exists already
if ex.errno != errno.EEXIST:
raise
else:
path = os.path.realpath(path)
break
else:
# Final fallback on the default behavior.
path = os.path.realpath(
tempfile.mkdtemp(prefix="pip-{}-".format(kind))
)
logger.debug("Created temporary directory: {}".format(path))
return path
| |
import sys
import os
import marshal
import imp
import struct
import time
import unittest
from test import support
from test.test_importhooks import ImportHooksBaseTestCase, test_src, test_co
from zipfile import ZipFile, ZipInfo, ZIP_STORED, ZIP_DEFLATED
import zipimport
import linecache
import doctest
import inspect
import io
from traceback import extract_tb, extract_stack, print_tb
raise_src = 'def do_raise(): raise TypeError\n'
def make_pyc(co, mtime, size):
data = marshal.dumps(co)
if type(mtime) is type(0.0):
# Mac mtimes need a bit of special casing
if mtime < 0x7fffffff:
mtime = int(mtime)
else:
mtime = int(-0x100000000 + int(mtime))
pyc = imp.get_magic() + struct.pack("<ii", int(mtime), size & 0xFFFFFFFF) + data
return pyc
def module_path_to_dotted_name(path):
return path.replace(os.sep, '.')
NOW = time.time()
test_pyc = make_pyc(test_co, NOW, len(test_src))
TESTMOD = "ziptestmodule"
TESTPACK = "ziptestpackage"
TESTPACK2 = "ziptestpackage2"
TEMP_ZIP = os.path.abspath("junk95142.zip")
pyc_file = imp.cache_from_source(TESTMOD + '.py')
pyc_ext = ('.pyc' if __debug__ else '.pyo')
class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
compression = ZIP_STORED
def setUp(self):
# We're reusing the zip archive path, so we must clear the
# cached directory info and linecache
linecache.clearcache()
zipimport._zip_directory_cache.clear()
ImportHooksBaseTestCase.setUp(self)
def doTest(self, expected_ext, files, *modules, **kw):
z = ZipFile(TEMP_ZIP, "w")
try:
for name, (mtime, data) in files.items():
zinfo = ZipInfo(name, time.localtime(mtime))
zinfo.compress_type = self.compression
z.writestr(zinfo, data)
z.close()
stuff = kw.get("stuff", None)
if stuff is not None:
# Prepend 'stuff' to the start of the zipfile
with open(TEMP_ZIP, "rb") as f:
data = f.read()
with open(TEMP_ZIP, "wb") as f:
f.write(stuff)
f.write(data)
sys.path.insert(0, TEMP_ZIP)
mod = __import__(".".join(modules), globals(), locals(),
["__dummy__"])
call = kw.get('call')
if call is not None:
call(mod)
if expected_ext:
file = mod.get_file()
self.assertEqual(file, os.path.join(TEMP_ZIP,
*modules) + expected_ext)
finally:
z.close()
os.remove(TEMP_ZIP)
def testAFakeZlib(self):
#
# This could cause a stack overflow before: importing zlib.py
# from a compressed archive would cause zlib to be imported
# which would find zlib.py in the archive, which would... etc.
#
# This test *must* be executed first: it must be the first one
# to trigger zipimport to import zlib (zipimport caches the
# zlib.decompress function object, after which the problem being
# tested here wouldn't be a problem anymore...
# (Hence the 'A' in the test method name: to make it the first
# item in a list sorted by name, like unittest.makeSuite() does.)
#
# This test fails on platforms on which the zlib module is
# statically linked, but the problem it tests for can't
# occur in that case (builtin modules are always found first),
# so we'll simply skip it then. Bug #765456.
#
if "zlib" in sys.builtin_module_names:
self.skipTest('zlib is a builtin module')
if "zlib" in sys.modules:
del sys.modules["zlib"]
files = {"zlib.py": (NOW, test_src)}
try:
self.doTest(".py", files, "zlib")
except ImportError:
if self.compression != ZIP_DEFLATED:
self.fail("expected test to not raise ImportError")
else:
if self.compression != ZIP_STORED:
self.fail("expected test to raise ImportError")
def testPy(self):
files = {TESTMOD + ".py": (NOW, test_src)}
self.doTest(".py", files, TESTMOD)
def testPyc(self):
files = {TESTMOD + pyc_ext: (NOW, test_pyc)}
self.doTest(pyc_ext, files, TESTMOD)
def testBoth(self):
files = {TESTMOD + ".py": (NOW, test_src),
TESTMOD + pyc_ext: (NOW, test_pyc)}
self.doTest(pyc_ext, files, TESTMOD)
def testEmptyPy(self):
files = {TESTMOD + ".py": (NOW, "")}
self.doTest(None, files, TESTMOD)
def testBadMagic(self):
# make pyc magic word invalid, forcing loading from .py
badmagic_pyc = bytearray(test_pyc)
badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit
files = {TESTMOD + ".py": (NOW, test_src),
TESTMOD + pyc_ext: (NOW, badmagic_pyc)}
self.doTest(".py", files, TESTMOD)
def testBadMagic2(self):
# make pyc magic word invalid, causing an ImportError
badmagic_pyc = bytearray(test_pyc)
badmagic_pyc[0] ^= 0x04 # flip an arbitrary bit
files = {TESTMOD + pyc_ext: (NOW, badmagic_pyc)}
try:
self.doTest(".py", files, TESTMOD)
except ImportError:
pass
else:
self.fail("expected ImportError; import from bad pyc")
def testBadMTime(self):
badtime_pyc = bytearray(test_pyc)
# flip the second bit -- not the first as that one isn't stored in the
# .py's mtime in the zip archive.
badtime_pyc[7] ^= 0x02
files = {TESTMOD + ".py": (NOW, test_src),
TESTMOD + pyc_ext: (NOW, badtime_pyc)}
self.doTest(".py", files, TESTMOD)
def testPackage(self):
packdir = TESTPACK + os.sep
files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc),
packdir + TESTMOD + pyc_ext: (NOW, test_pyc)}
self.doTest(pyc_ext, files, TESTPACK, TESTMOD)
def testDeepPackage(self):
packdir = TESTPACK + os.sep
packdir2 = packdir + TESTPACK2 + os.sep
files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc),
packdir2 + "__init__" + pyc_ext: (NOW, test_pyc),
packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)}
self.doTest(pyc_ext, files, TESTPACK, TESTPACK2, TESTMOD)
def testZipImporterMethods(self):
packdir = TESTPACK + os.sep
packdir2 = packdir + TESTPACK2 + os.sep
files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc),
packdir2 + "__init__" + pyc_ext: (NOW, test_pyc),
packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)}
z = ZipFile(TEMP_ZIP, "w")
try:
for name, (mtime, data) in files.items():
zinfo = ZipInfo(name, time.localtime(mtime))
zinfo.compress_type = self.compression
z.writestr(zinfo, data)
z.close()
zi = zipimport.zipimporter(TEMP_ZIP)
self.assertEqual(zi.archive, TEMP_ZIP)
self.assertEqual(zi.is_package(TESTPACK), True)
mod = zi.load_module(TESTPACK)
self.assertEqual(zi.get_filename(TESTPACK), mod.__file__)
existing_pack_path = __import__(TESTPACK).__path__[0]
expected_path_path = os.path.join(TEMP_ZIP, TESTPACK)
self.assertEqual(existing_pack_path, expected_path_path)
self.assertEqual(zi.is_package(packdir + '__init__'), False)
self.assertEqual(zi.is_package(packdir + TESTPACK2), True)
self.assertEqual(zi.is_package(packdir2 + TESTMOD), False)
mod_path = packdir2 + TESTMOD
mod_name = module_path_to_dotted_name(mod_path)
__import__(mod_name)
mod = sys.modules[mod_name]
self.assertEqual(zi.get_source(TESTPACK), None)
self.assertEqual(zi.get_source(mod_path), None)
self.assertEqual(zi.get_filename(mod_path), mod.__file__)
# To pass in the module name instead of the path, we must use the
# right importer
loader = mod.__loader__
self.assertEqual(loader.get_source(mod_name), None)
self.assertEqual(loader.get_filename(mod_name), mod.__file__)
# test prefix and archivepath members
zi2 = zipimport.zipimporter(TEMP_ZIP + os.sep + TESTPACK)
self.assertEqual(zi2.archive, TEMP_ZIP)
self.assertEqual(zi2.prefix, TESTPACK + os.sep)
finally:
z.close()
os.remove(TEMP_ZIP)
def testZipImporterMethodsInSubDirectory(self):
packdir = TESTPACK + os.sep
packdir2 = packdir + TESTPACK2 + os.sep
files = {packdir2 + "__init__" + pyc_ext: (NOW, test_pyc),
packdir2 + TESTMOD + pyc_ext: (NOW, test_pyc)}
z = ZipFile(TEMP_ZIP, "w")
try:
for name, (mtime, data) in files.items():
zinfo = ZipInfo(name, time.localtime(mtime))
zinfo.compress_type = self.compression
z.writestr(zinfo, data)
z.close()
zi = zipimport.zipimporter(TEMP_ZIP + os.sep + packdir)
self.assertEqual(zi.archive, TEMP_ZIP)
self.assertEqual(zi.prefix, packdir)
self.assertEqual(zi.is_package(TESTPACK2), True)
mod = zi.load_module(TESTPACK2)
self.assertEqual(zi.get_filename(TESTPACK2), mod.__file__)
self.assertEqual(
zi.is_package(TESTPACK2 + os.sep + '__init__'), False)
self.assertEqual(
zi.is_package(TESTPACK2 + os.sep + TESTMOD), False)
mod_path = TESTPACK2 + os.sep + TESTMOD
mod_name = module_path_to_dotted_name(mod_path)
__import__(mod_name)
mod = sys.modules[mod_name]
self.assertEqual(zi.get_source(TESTPACK2), None)
self.assertEqual(zi.get_source(mod_path), None)
self.assertEqual(zi.get_filename(mod_path), mod.__file__)
# To pass in the module name instead of the path, we must use the
# right importer
loader = mod.__loader__
self.assertEqual(loader.get_source(mod_name), None)
self.assertEqual(loader.get_filename(mod_name), mod.__file__)
finally:
z.close()
os.remove(TEMP_ZIP)
def testGetData(self):
z = ZipFile(TEMP_ZIP, "w")
z.compression = self.compression
try:
name = "testdata.dat"
data = bytes(x for x in range(256))
z.writestr(name, data)
z.close()
zi = zipimport.zipimporter(TEMP_ZIP)
self.assertEqual(data, zi.get_data(name))
self.assertIn('zipimporter object', repr(zi))
finally:
z.close()
os.remove(TEMP_ZIP)
def testImporterAttr(self):
src = """if 1: # indent hack
def get_file():
return __file__
if __loader__.get_data("some.data") != b"some data":
raise AssertionError("bad data")\n"""
pyc = make_pyc(compile(src, "<???>", "exec"), NOW, len(src))
files = {TESTMOD + pyc_ext: (NOW, pyc),
"some.data": (NOW, "some data")}
self.doTest(pyc_ext, files, TESTMOD)
def testImport_WithStuff(self):
# try importing from a zipfile which contains additional
# stuff at the beginning of the file
files = {TESTMOD + ".py": (NOW, test_src)}
self.doTest(".py", files, TESTMOD,
stuff=b"Some Stuff"*31)
def assertModuleSource(self, module):
self.assertEqual(inspect.getsource(module), test_src)
def testGetSource(self):
files = {TESTMOD + ".py": (NOW, test_src)}
self.doTest(".py", files, TESTMOD, call=self.assertModuleSource)
def testGetCompiledSource(self):
pyc = make_pyc(compile(test_src, "<???>", "exec"), NOW, len(test_src))
files = {TESTMOD + ".py": (NOW, test_src),
TESTMOD + pyc_ext: (NOW, pyc)}
self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource)
def runDoctest(self, callback):
files = {TESTMOD + ".py": (NOW, test_src),
"xyz.txt": (NOW, ">>> log.append(True)\n")}
self.doTest(".py", files, TESTMOD, call=callback)
def doDoctestFile(self, module):
log = []
old_master, doctest.master = doctest.master, None
try:
doctest.testfile(
'xyz.txt', package=module, module_relative=True,
globs=locals()
)
finally:
doctest.master = old_master
self.assertEqual(log,[True])
def testDoctestFile(self):
self.runDoctest(self.doDoctestFile)
def doDoctestSuite(self, module):
log = []
doctest.DocFileTest(
'xyz.txt', package=module, module_relative=True,
globs=locals()
).run()
self.assertEqual(log,[True])
def testDoctestSuite(self):
self.runDoctest(self.doDoctestSuite)
def doTraceback(self, module):
try:
module.do_raise()
except:
tb = sys.exc_info()[2].tb_next
f,lno,n,line = extract_tb(tb, 1)[0]
self.assertEqual(line, raise_src.strip())
f,lno,n,line = extract_stack(tb.tb_frame, 1)[0]
self.assertEqual(line, raise_src.strip())
s = io.StringIO()
print_tb(tb, 1, s)
self.assertTrue(s.getvalue().endswith(raise_src))
else:
raise AssertionError("This ought to be impossible")
def testTraceback(self):
files = {TESTMOD + ".py": (NOW, raise_src)}
self.doTest(None, files, TESTMOD, call=self.doTraceback)
@unittest.skipIf(support.TESTFN_UNENCODABLE is None,
"need an unencodable filename")
def testUnencodable(self):
filename = support.TESTFN_UNENCODABLE + ".zip"
z = ZipFile(filename, "w")
zinfo = ZipInfo(TESTMOD + ".py", time.localtime(NOW))
zinfo.compress_type = self.compression
z.writestr(zinfo, test_src)
z.close()
try:
zipimport.zipimporter(filename)
finally:
os.remove(filename)
@support.requires_zlib
class CompressedZipImportTestCase(UncompressedZipImportTestCase):
compression = ZIP_DEFLATED
class BadFileZipImportTestCase(unittest.TestCase):
def assertZipFailure(self, filename):
self.assertRaises(zipimport.ZipImportError,
zipimport.zipimporter, filename)
def testNoFile(self):
self.assertZipFailure('AdfjdkFJKDFJjdklfjs')
def testEmptyFilename(self):
self.assertZipFailure('')
def testBadArgs(self):
self.assertRaises(TypeError, zipimport.zipimporter, None)
self.assertRaises(TypeError, zipimport.zipimporter, TESTMOD, kwd=None)
def testFilenameTooLong(self):
self.assertZipFailure('A' * 33000)
def testEmptyFile(self):
support.unlink(TESTMOD)
support.create_empty_file(TESTMOD)
self.assertZipFailure(TESTMOD)
def testFileUnreadable(self):
support.unlink(TESTMOD)
fd = os.open(TESTMOD, os.O_CREAT, 000)
try:
os.close(fd)
self.assertZipFailure(TESTMOD)
finally:
# If we leave "the read-only bit" set on Windows, nothing can
# delete TESTMOD, and later tests suffer bogus failures.
os.chmod(TESTMOD, 0o666)
support.unlink(TESTMOD)
def testNotZipFile(self):
support.unlink(TESTMOD)
fp = open(TESTMOD, 'w+')
fp.write('a' * 22)
fp.close()
self.assertZipFailure(TESTMOD)
# XXX: disabled until this works on Big-endian machines
def _testBogusZipFile(self):
support.unlink(TESTMOD)
fp = open(TESTMOD, 'w+')
fp.write(struct.pack('=I', 0x06054B50))
fp.write('a' * 18)
fp.close()
z = zipimport.zipimporter(TESTMOD)
try:
self.assertRaises(TypeError, z.find_module, None)
self.assertRaises(TypeError, z.load_module, None)
self.assertRaises(TypeError, z.is_package, None)
self.assertRaises(TypeError, z.get_code, None)
self.assertRaises(TypeError, z.get_data, None)
self.assertRaises(TypeError, z.get_source, None)
error = zipimport.ZipImportError
self.assertEqual(z.find_module('abc'), None)
self.assertRaises(error, z.load_module, 'abc')
self.assertRaises(error, z.get_code, 'abc')
self.assertRaises(IOError, z.get_data, 'abc')
self.assertRaises(error, z.get_source, 'abc')
self.assertRaises(error, z.is_package, 'abc')
finally:
zipimport._zip_directory_cache.clear()
def test_main():
try:
support.run_unittest(
UncompressedZipImportTestCase,
CompressedZipImportTestCase,
BadFileZipImportTestCase,
)
finally:
support.unlink(TESTMOD)
if __name__ == "__main__":
test_main()
| |
#!/usr/bin/python
"""
Copyright 2019 The Ceph-CSI Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#pylint: disable=line-too-long
python tool to trace backend image name from pvc
Note: For the script to work properly python>=3.x is required
sample input:
python -c oc -k /home/.kube/config -n default -rn rook-ceph -id admin -key
adminkey -cm ceph-csi-config
Sample output:
+------------------------------------------------------------------------------------------------------------------------------------------------------------+
| RBD |
+----------+------------------------------------------+----------------------------------------------+-----------------+--------------+------------------+
| PVC Name | PV Name | Image
Name | PV name in omap | Image ID in omap | Image in cluster |
+----------+------------------------------------------+----------------------------------------------+-----------------+--------------+------------------+
| rbd-pvc | pvc-f1a501dd-03f6-45c9-89f4-85eed7a13ef2 | csi-vol-1b00f5f8-b1c1-11e9-8421-9243c1f659f0 | True | True | False |
| rbd-pvcq | pvc-09a8bceb-0f60-4036-85b9-dc89912ae372 | csi-vol-b781b9b1-b1c5-11e9-8421-9243c1f659f0 | True | True | True |
+----------+------------------------------------------+----------------------------------------------+-----------------+--------------+------------------+
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| CephFS |
+----------------+------------------------------------------+----------------------------------------------+-----------------+----------------------+----------------------+
| PVC Name | PV Name | Subvolume Name | PV name in omap | Subvolume ID in omap | Subvolume in cluster |
+----------------+------------------------------------------+----------------------------------------------+-----------------+----------------------+----------------------+
| csi-cephfs-pvc | pvc-b3492186-73c0-4a4e-a810-0d0fa0daf709 | csi-vol-6f283b82-a09d-11ea-81a7-0242ac11000f | True | True | True |
+----------------+------------------------------------------+----------------------------------------------+-----------------+----------------------+----------------------+
"""
import argparse
import subprocess
import json
import sys
import re
import prettytable
PARSER = argparse.ArgumentParser()
# -p pvc-test -k /home/.kube/config -n default -rn rook-ceph
PARSER.add_argument("-p", "--pvcname", default="", help="PVC name")
PARSER.add_argument("-c", "--command", default="oc",
help="kubectl or oc command")
PARSER.add_argument("-k", "--kubeconfig", default="",
help="kubernetes configuration")
PARSER.add_argument("-n", "--namespace", default="default",
help="namespace in which pvc created")
PARSER.add_argument("-t", "--toolboxdeployed", type=bool, default=True,
help="is rook toolbox deployed")
PARSER.add_argument("-d", "--debug", type=bool, default=False,
help="log commands output")
PARSER.add_argument("-rn", "--rooknamespace",
default="rook-ceph", help="rook namespace")
PARSER.add_argument("-id", "--userid",
default="admin", help="user ID to connect to ceph cluster")
PARSER.add_argument("-key", "--userkey",
default="", help="user password to connect to ceph cluster")
PARSER.add_argument("-cm", "--configmap", default="ceph-csi-config",
help="configmap name which holds the cephcsi configuration")
PARSER.add_argument("-cmn", "--configmapnamespace", default="default",
help="namespace where configmap exists")
def list_pvc_vol_name_mapping(arg):
"""
list pvc and volume name mapping
"""
table_rbd = prettytable.PrettyTable()
table_rbd.title = "RBD"
table_rbd.field_names = ["PVC Name", "PV Name", "Image Name", "PV name in omap",
"Image ID in omap", "Image in cluster"]
table_cephfs = prettytable.PrettyTable()
table_cephfs.title = "CephFS"
table_cephfs.field_names = ["PVC Name", "PV Name", "Subvolume Name", "PV name in omap",
"Subvolume ID in omap", "Subvolume in cluster"]
cmd = [arg.command]
if arg.kubeconfig != "":
if arg.command == "oc":
cmd += ["--config", arg.kubeconfig]
else:
cmd += ["--kubeconfig", arg.kubeconfig]
cmd += ["--namespace", arg.namespace]
if arg.pvcname != "":
cmd += ['get', 'pvc', arg.pvcname, '-o', 'json']
# list all pvc and get mapping
else:
cmd += ['get', 'pvc', '-o', 'json']
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to list pvc %s", stderr)
sys.exit()
try:
pvcs = json.loads(stdout)
except ValueError as err:
print(err, stdout)
sys.exit()
format_and_print_tables(arg, pvcs, table_rbd, table_cephfs)
def format_and_print_tables(arg, pvcs, table_rbd, table_cephfs):
"""
format and print tables with all relevant information.
"""
if arg.pvcname != "":
pvname = pvcs['spec']['volumeName']
pvdata = get_pv_data(arg, pvname)
if is_rbd_pv(arg, pvname, pvdata):
format_table(arg, pvcs, pvdata, table_rbd, True)
else:
format_table(arg, pvcs, pvdata, table_cephfs, False)
else:
for pvc in pvcs['items']:
pvname = pvc['spec']['volumeName']
pvdata = get_pv_data(arg, pvname)
if is_rbd_pv(arg, pvname, pvdata):
format_table(arg, pvc, pvdata, table_rbd, True)
else:
format_table(arg, pvc, pvdata, table_cephfs, False)
print(table_rbd)
print(table_cephfs)
#pylint: disable=too-many-locals
def format_table(arg, pvc_data, pvdata, table, is_rbd):
"""
format tables for pvc and image information
"""
# pvc name
pvcname = pvc_data['metadata']['name']
# get pv name
pvname = pvc_data['spec']['volumeName']
# get volume handler from pv
volume_name = get_volume_handler_from_pv(arg, pvname)
# get volume handler
if volume_name == "":
table.add_row([pvcname, "", "", False,
False, False])
return
pool_name = get_pool_name(arg, volume_name, is_rbd)
if pool_name == "":
table.add_row([pvcname, pvname, "", False,
False, False])
return
# get image id
image_id = get_image_uuid(volume_name)
if image_id is None:
table.add_row([pvcname, pvname, "", False,
False, False])
return
# get volname prefix
volname_prefix = get_volname_prefix(arg, pvdata)
# check image/subvolume details present rados omap
pv_present, uuid_present = validate_volume_in_rados(arg, image_id, pvname, pool_name, is_rbd)
present_in_cluster = False
if is_rbd:
present_in_cluster = check_image_in_cluster(arg, image_id, pool_name, volname_prefix)
else:
fsname = get_fsname_from_pvdata(arg, pvdata)
subvolname = volname_prefix + image_id
present_in_cluster = check_subvol_in_cluster(arg, subvolname, fsname)
image_name = volname_prefix + image_id
table.add_row([pvcname, pvname, image_name, pv_present,
uuid_present, present_in_cluster])
def validate_volume_in_rados(arg, image_id, pvc_name, pool_name, is_rbd):
"""
validate volume information in rados
"""
pv_present = check_pv_name_in_rados(arg, image_id, pvc_name, pool_name, is_rbd)
uuid_present = check_image_uuid_in_rados(arg, image_id, pvc_name, pool_name, is_rbd)
return pv_present, uuid_present
def check_pv_name_in_rados(arg, image_id, pvc_name, pool_name, is_rbd):
"""
validate pvc information in rados
"""
omapkey = 'csi.volume.%s' % pvc_name
cmd = ['rados', 'getomapval', 'csi.volumes.default',
omapkey, "--pool", pool_name]
if not arg.userkey:
cmd += ["--id", arg.userid, "--key", arg.userkey]
if not is_rbd:
cmd += ["--namespace", "csi"]
if arg.toolboxdeployed is True:
kube = get_cmd_prefix(arg)
cmd = kube + cmd
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
return False
name = b''
lines = [x.strip() for x in stdout.split(b"\n")]
for line in lines:
if b' ' not in line:
continue
if b'value' in line and b'bytes' in line:
continue
part = re.findall(br'[A-Za-z0-9\-]+', line)
if part:
name += part[-1]
if name.decode() != image_id:
if arg.debug:
print("expected image Id %s found Id in rados %s" %
(image_id, name.decode()))
return False
return True
def check_image_in_cluster(arg, image_uuid, pool_name, volname_prefix):
"""
validate pvc information in ceph backend
"""
image = volname_prefix + image_uuid
cmd = ['rbd', 'info', image, "--pool", pool_name]
if not arg.userkey:
cmd += ["--id", arg.userid, "--key", arg.userkey]
if arg.toolboxdeployed is True:
kube = get_cmd_prefix(arg)
cmd = kube + cmd
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print(b"failed to toolbox %s", stderr)
return False
if b"No such file or directory" in stdout:
if arg.debug:
print("image not found in cluster")
return False
return True
def check_image_uuid_in_rados(arg, image_id, pvc_name, pool_name, is_rbd):
"""
validate image uuid in rados
"""
omapkey = 'csi.volume.%s' % image_id
cmd = ['rados', 'getomapval', omapkey, "csi.volname", "--pool", pool_name]
if not arg.userkey:
cmd += ["--id", arg.userid, "--key", arg.userkey]
if not is_rbd:
cmd += ["--namespace", "csi"]
if arg.toolboxdeployed is True:
kube = get_cmd_prefix(arg)
cmd = kube + cmd
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to get toolbox %s", stderr)
return False
name = b''
lines = [x.strip() for x in stdout.split(b"\n")]
for line in lines:
if b' ' not in line:
continue
if b'value' in line and b'bytes' in line:
continue
part = re.findall(br'[A-Za-z0-9\-]+', line)
if part:
name += part[-1]
if name.decode() != pvc_name:
if arg.debug:
print("expected image Id %s found Id in rados %s" %
(pvc_name, name.decode()))
return False
return True
def get_cmd_prefix(arg):
"""
Returns command prefix
"""
kube = [arg.command]
if arg.kubeconfig != "":
if arg.command == "oc":
kube += ["--config", arg.kubeconfig]
else:
kube += ["--kubeconfig", arg.kubeconfig]
tool_box_name = get_tool_box_pod_name(arg)
kube += ['exec', '-it', tool_box_name, '-n', arg.rooknamespace, '--']
return kube
def get_image_uuid(volume_handler):
"""
fetch image uuid from volume handler
"""
image_id = volume_handler.split('-')
if len(image_id) < 9:
return None
img_id = "-"
return img_id.join(image_id[len(image_id)-5:])
def get_volume_handler_from_pv(arg, pvname):
"""
fetch volume handler from pv
"""
cmd = [arg.command]
if arg.kubeconfig != "":
if arg.command == "oc":
cmd += ["--config", arg.kubeconfig]
else:
cmd += ["--kubeconfig", arg.kubeconfig]
cmd += ['get', 'pv', pvname, '-o', 'json']
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to pv %s", stderr)
return ""
try:
vol = json.loads(stdout)
return vol['spec']['csi']['volumeHandle']
except ValueError as err:
if arg.debug:
print("failed to pv %s", err)
return ""
def get_tool_box_pod_name(arg):
"""
get tool box pod name
"""
cmd = [arg.command]
if arg.kubeconfig != "":
if arg.command == "oc":
cmd += ["--config", arg.kubeconfig]
else:
cmd += ["--kubeconfig", arg.kubeconfig]
cmd += ['get', 'po', '-l=app=rook-ceph-tools',
'-n', arg.rooknamespace, '-o', 'json']
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to get toolbox pod name %s", stderr)
return ""
try:
pod_name = json.loads(stdout)
return pod_name['items'][0]['metadata']['name']
except ValueError as err:
if arg.debug:
print("failed to pod %s", err)
return ""
#pylint: disable=too-many-branches
def get_pool_name(arg, vol_id, is_rbd):
"""
get pool name from ceph backend
"""
if is_rbd:
cmd = ['ceph', 'osd', 'lspools', '--format=json']
else:
cmd = ['ceph', 'fs', 'ls', '--format=json']
if not arg.userkey:
cmd += ["--id", arg.userid, "--key", arg.userkey]
if arg.toolboxdeployed is True:
kube = get_cmd_prefix(arg)
cmd = kube + cmd
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to get the pool name %s", stderr)
return ""
try:
pools = json.loads(stdout)
except ValueError as err:
if arg.debug:
print("failed to get the pool name %s", err)
return ""
if is_rbd:
pool_id = vol_id.split('-')
if len(pool_id) < 4:
raise Exception("pool id not in the proper format")
if pool_id[3] in arg.rooknamespace:
pool_id = pool_id[4]
else:
pool_id = pool_id[3]
for pool in pools:
if int(pool_id) is int(pool['poolnum']):
return pool['poolname']
else:
for pool in pools:
return pool['metadata_pool']
return ""
def check_subvol_in_cluster(arg, subvol_name, fsname):
"""
Checks if subvolume exists in cluster or not.
"""
# check if user has specified subvolumeGroup
subvol_group = get_subvol_group(arg)
return check_subvol_path(arg, subvol_name, subvol_group, fsname)
def check_subvol_path(arg, subvol_name, subvol_group, fsname):
"""
Returns True if subvolume path exists in the cluster.
"""
cmd = ['ceph', 'fs', 'subvolume', 'getpath',
fsname, subvol_name, subvol_group]
if not arg.userkey:
cmd += ["--id", arg.userid, "--key", arg.userkey]
if arg.toolboxdeployed is True:
kube = get_cmd_prefix(arg)
cmd = kube + cmd
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to get toolbox %s", stderr)
return False
if b"Error" in stdout:
if arg.debug:
print("subvolume not found in cluster", stdout)
return False
return True
def get_subvol_group(arg):
"""
Returns sub volume group from configmap.
"""
cmd = [arg.command]
if arg.kubeconfig != "":
if arg.command == "oc":
cmd += ["--config", arg.kubeconfig]
else:
cmd += ["--kubeconfig", arg.kubeconfig]
cmd += ['get', 'cm', arg.configmap, '-o', 'json']
cmd += ['--namespace', arg.configmapnamespace]
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to get configmap %s", stderr)
sys.exit()
try:
config_map = json.loads(stdout)
except ValueError as err:
print(err, stdout)
sys.exit()
# default subvolumeGroup
subvol_group = "csi"
cm_data = config_map['data'].get('config.json')
# Absence of 'config.json' means that the configmap
# is created by Rook and there won't be any provision to
# specify subvolumeGroup
if cm_data:
if "subvolumeGroup" in cm_data:
try:
cm_data_list = json.loads(cm_data)
except ValueError as err:
print(err, stdout)
sys.exit()
subvol_group = cm_data_list[0]['cephFS']['subvolumeGroup']
return subvol_group
def is_rbd_pv(arg, pvname, pvdata):
"""
Checks if volume attributes in a pv has an attribute named 'fsname'.
If it has, returns False else return True.
"""
if not pvdata:
if arg.debug:
print("failed to get pvdata for %s", pvname)
sys.exit()
volume_attr = pvdata['spec']['csi']['volumeAttributes']
key = 'fsName'
if key in volume_attr.keys():
return False
return True
def get_pv_data(arg, pvname):
"""
Returns pv data for a given pvname.
"""
pvdata = {}
cmd = [arg.command]
if arg.kubeconfig != "":
if arg.command == "oc":
cmd += ["--config", arg.kubeconfig]
else:
cmd += ["--kubeconfig", arg.kubeconfig]
cmd += ['get', 'pv', pvname, '-o', 'json']
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) as out:
stdout, stderr = out.communicate()
if stderr is not None:
if arg.debug:
print("failed to get pv %s", stderr)
sys.exit()
try:
pvdata = json.loads(stdout)
except ValueError as err:
if arg.debug:
print("failed to get pv %s", err)
sys.exit()
return pvdata
def get_volname_prefix(arg, pvdata):
"""
Returns volname prefix stored in storage class/pv,
defaults to "csi-vol-"
"""
volname_prefix = "csi-vol-"
if not pvdata:
if arg.debug:
print("failed to get pv data")
sys.exit()
volume_attr = pvdata['spec']['csi']['volumeAttributes']
key = 'volumeNamePrefix'
if key in volume_attr.keys():
volname_prefix = volume_attr[key]
return volname_prefix
def get_fsname_from_pvdata(arg, pvdata):
"""
Returns fsname stored in pv data
"""
fsname = 'myfs'
if not pvdata:
if arg.debug:
print("failed to get pv data")
sys.exit()
volume_attr = pvdata['spec']['csi']['volumeAttributes']
key = 'fsName'
if key in volume_attr.keys():
fsname = volume_attr[key]
else:
if arg.debug:
print("fsname is not set in storageclass/pv")
sys.exit()
return fsname
if __name__ == "__main__":
ARGS = PARSER.parse_args()
if ARGS.command not in ["kubectl", "oc"]:
print("%s command not supported" % ARGS.command)
sys.exit(1)
if sys.version_info[0] < 3:
print("python version less than 3 is not supported.")
sys.exit(1)
list_pvc_vol_name_mapping(ARGS)
| |
# -*- coding: utf-8 -*-
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.renderers import JSONRenderer
from sita.api.v1.routers import router
from sita.utils import conekta_sita
from sita.users.models import User, Device, Subscription
from sita.subscriptions.models import Subscription as Subscriptions
from sita.cards.models import Card
from sita.users.serializers import UserSerializer
from sita.authentication.serializers import (LoginSerializer,
RecoveryPasswordSerializer,
LoginResponseSerializer,
ResetPasswordWithCodeSerializer,
SignUpSerializer)
from sita.core.api.routers.single import SingleObjectRouter
from datetime import datetime, timedelta
class LoginViewSet(viewsets.GenericViewSet):
permission_classes = (AllowAny, )
serializer_class = LoginSerializer
@detail_route(methods=['POST'])
def signin(self, request, *args, **kwards):
"""
User login.
---
type:
token:
type: string
user:
pytype: UserSerializer
omit_parameters:
- form
parameters:
- name: body
pytype: LoginSerializer
paramType: body
description:
'email: <b>required</b> <br>
password: <b>required</b> <br>
deviceOs: NOT required <br>
deviceToken: NOT required'
responseMessages:
- code: 400
message: BAD REQUEST
- code: 200
message: OK
- code: 500
message: INTERNAL SERVER ERROR
consumes:
- application/json
produces:
- application/json
"""
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
user = serializer.get_user(serializer.data)
response_serializer = LoginResponseSerializer()
device_token=request.data.get("device_token")
device_os=request.data.get("device_os")
if device_token and device_os:
device = Device.objects.register(
device_token=device_token,
device_os=device_os,
user=user)
return Response(response_serializer.get_token(user))
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class RecoveryPasswordViewSet(viewsets.GenericViewSet):
permission_classes = (AllowAny, )
serializer_class = RecoveryPasswordSerializer
@detail_route(methods=['POST'])
def recovery_password(self, request, *args, **kwards):
"""
Recovery Password.
---
omit_parameters:
- form
parameters:
- name: body
type: RecoveryPasswordSerializer
paramType: body
description:
'email: <b>required</b>'
responseMessages:
- code: 400
message: BAD REQUEST
- code: 200
message: OK
- code: 500
message: INTERNAL SERVER ERROR
consumes:
- application/json
produces:
- application/json
"""
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
serializer.generate_recovery_token(serializer.data)
return Response()
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ResetPasswordWithCodeViewSet(viewsets.GenericViewSet):
permission_classes = (AllowAny, )
serializer_class = ResetPasswordWithCodeSerializer
@detail_route(methods=['POST'])
def reset_password_code(self, request, *args, **kwards):
"""
Reset Password.
---
omit_parameters:
- form
parameters:
- name: body
type: ResetPasswordWithCodeSerializer
paramType: body
description:
'password: <b>required</b> <br>
passwordConfim: <b>required</b> <br>
recoveryCode: <b>required</b> <br>'
responseMessages:
- code: 400
message: BAD REQUEST
- code: 200
message: OK
- code: 500
message: INTERNAL SERVER ERROR
consumes:
- application/json
produces:
- application/json
"""
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
serializer.update_password(data=request.data)
return Response()
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class SignUpViewSet(viewsets.GenericViewSet):
permission_classes = (AllowAny, )
serializer_class = SignUpSerializer
@detail_route(methods=['POST'])
def signup(self, request, *args, **kwards):
"""
User login.
---
omit_parameters:
- form
parameters:
- name: body
type: SignUpSerializer
paramType: body
description:
'email: <b>required</b> <br>
password: <b>required</b> <br>
name:NOT required <br>
firstName: NOT required <br>
mothersName: NOT required <br>
phone: NOT required<br>
deviceOs: NOT required<br>
deviceToken: NOT required<br>
conektaCard: NOT required'
responseMessages:
- code: 400
message: BAD REQUEST
- code: 200
message: OK
- code: 500
message: INTERNAL SERVER ERROR
consumes:
- application/json
produces:
- application/json
"""
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
conekta_customer = ""
if request.data.get("conekta_card"):
customer = conekta_sita.create_customer(data=request.data)
if customer is not None:
conekta_customer = customer.id
for key in request.data:
if key == "name" or key == "phone":
kwards.setdefault(key,request.data.get(key))
user = User.objects.create_user(
email=request.data.get("email"),
password=request.data.get("password"),
time_zone=request.data.get("time_zone"),
conekta_customer=conekta_customer,
automatic_payment=False,
**kwards
)
if conekta_customer != "":
card_data = {
"last_four":customer.payment_sources[0].last4,
"is_default":True,
"conekta_card":customer.payment_sources[0].id,
"brand_card":customer.payment_sources[0].brand,
}
fields = Card().get_fields()
Card.objects.register(
data=card_data, fields=fields, user=user)
subscription = Subscriptions.objects.get(id=request.data.get("subscription_id"))
# next_time_expirate = datetime.now() + timedelta(minutes=43200)
next_time_expirate = datetime.now() + timedelta(minutes=1)
subscription_user = Subscription(
user_id=user.id,
time_in_minutes=43200,
is_test=True,
is_current=True,
next_time_in_minutes=subscription.time_in_minutes,
next_mount_pay=subscription.amount,
expiration_date=next_time_expirate,
next_pay_date=next_time_expirate,
title=subscription.title
)
subscription_user.save()
user.has_subscription = True
user.automatic_payment = True
user.save()
device_token=request.data.get("device_token")
device_os=request.data.get("device_os")
if device_token and device_os:
device = Device.objects.register(
device_token=device_token,
device_os=device_os,
user=user)
response_serializer = LoginResponseSerializer()
return Response(response_serializer.get_token(user))
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
router.register(
r'auth',
LoginViewSet,
base_name="signin",
router_class=SingleObjectRouter
)
router.register(
r'auth',
RecoveryPasswordViewSet,
base_name="recovery-password",
router_class=SingleObjectRouter
)
router.register(
r'auth',
ResetPasswordWithCodeViewSet,
base_name="reset-password-code",
router_class=SingleObjectRouter
)
router.register(
r'auth',
SignUpViewSet,
base_name="signup",
router_class=SingleObjectRouter
)
| |
"""
test_import
------------------
Tests for `alpenhorn.auto_import` module.
"""
import os
import pytest
import yaml
import alpenhorn.acquisition as ac
import alpenhorn.archive as ar
import alpenhorn.auto_import as auto_import
import alpenhorn.db as db
import alpenhorn.generic as ge
import alpenhorn.storage as st
import test_archive_model as ta
tests_path = os.path.abspath(os.path.dirname(__file__))
# Create handlers for the acquisition and file types
class ZabInfo(ge.GenericAcqInfo):
_acq_type = "zab"
_file_types = ["zxc", "log"]
patterns = ["**zab"]
class QuuxInfo(ge.GenericAcqInfo):
_acq_type = "quux"
_file_types = ["zxc", "log"]
patterns = ["*quux", "x"]
class ZxcInfo(ge.GenericFileInfo):
_file_type = "zxc"
patterns = ["**.zxc", "jim*", "sheila"]
class SpqrInfo(ge.GenericFileInfo):
_file_type = "spqr"
patterns = ["*spqr*"]
class LogInfo(ge.GenericFileInfo):
_file_type = "log"
patterns = ["*.log"]
def load_fixtures(tmpdir):
"""Loads data from tests/fixtures into the connected database"""
fs = ta.load_fixtures()
p = tmpdir.join("ROOT")
(st.StorageNode.update(root=str(p)).where(st.StorageNode.name == "x").execute())
# Register new handlers
ac.AcqType.register_type(ZabInfo)
ac.AcqType.register_type(QuuxInfo)
ac.FileType.register_type(ZxcInfo)
ac.FileType.register_type(SpqrInfo)
ac.FileType.register_type(LogInfo)
db.database_proxy.create_tables([ZabInfo, QuuxInfo, ZxcInfo, SpqrInfo, LogInfo])
with open(os.path.join(tests_path, "fixtures/files.yml")) as f:
fixtures = yaml.safe_load(f)
def make_files(dir_name, files, root):
d = root.mkdir(dir_name)
rel_path = os.path.relpath(str(d), str(p))
for file_name, file_data in files.items():
if "md5" in file_data:
f = d.join(file_name)
f.write(file_data["contents"])
for archive_file in (
ac.ArchiveFile.select()
.join(ac.ArchiveAcq)
.where(
ac.ArchiveAcq.name + "/" + ac.ArchiveFile.name
== rel_path + "/" + file_name
)
):
archive_file.size_b = len(file_data["contents"])
archive_file.md5sum = file_data["md5"]
archive_file.save()
break
else: # it's really a directory, recurse!
make_files(file_name, file_data, d)
make_files(p.basename, fixtures, tmpdir)
return {"root": p, "files": fixtures}
@pytest.fixture
def fixtures(tmpdir):
"""Initializes an in-memory Sqlite database with data in tests/fixtures"""
db._connect()
yield load_fixtures(tmpdir)
db.database_proxy.close()
def test_schema(fixtures):
"""Basic sanity test of fixtures used"""
assert set(db.database_proxy.get_tables()) == {
u"storagegroup",
u"storagenode",
u"acqtype",
u"archiveacq",
u"filetype",
u"archivefile",
u"archivefilecopyrequest",
u"archivefilecopy",
u"zabinfo",
u"quuxinfo",
u"zxcinfo",
u"spqrinfo",
u"loginfo",
}
assert fixtures["root"].basename == "ROOT"
assert st.StorageNode.get(st.StorageNode.name == "x").root == fixtures["root"]
tmpdir = fixtures["root"]
assert len(tmpdir.listdir()) == 3
acq_dir = tmpdir.join("12345678T000000Z_inst_zab")
assert len(acq_dir.listdir()) == 4
def test_import(fixtures):
tmpdir = fixtures["root"]
acq_dir = tmpdir.join("12345678T000000Z_inst_zab")
node = st.StorageNode.get(st.StorageNode.name == "x")
# import for hello.txt should be ignored while creating the acquisition
# because 'zab' acq type only tracks *.zxc and *.log files
auto_import.import_file(node, acq_dir.join("hello.txt").relto(tmpdir))
assert ac.AcqType.get(ac.AcqType.name == "zab") is not None
# the acquisition is still created
acq = ac.ArchiveAcq.get(ac.ArchiveAcq.name == acq_dir.basename)
assert acq is not None
assert acq.name == acq_dir.basename
assert acq.type.name == "zab"
# while no file has been imported yet
assert (ac.ArchiveFile.select().where(ac.ArchiveFile.acq == acq).count()) == 0
# now import 'ch_master.log', which should succeed
auto_import.import_file(node, acq_dir.join("ch_master.log").relto(tmpdir))
file = ac.ArchiveFile.get(ac.ArchiveFile.name == "ch_master.log")
assert file is not None
assert file.acq == acq
assert file.type.name == "log"
assert file.size_b == len(
fixtures["files"][acq_dir.basename][file.name]["contents"]
)
assert file.md5sum == fixtures["files"][acq_dir.basename][file.name]["md5"]
file_copy = ar.ArchiveFileCopy.get(
ar.ArchiveFileCopy.file == file, ar.ArchiveFileCopy.node == node
)
assert file_copy is not None
assert file_copy.file == file
assert file_copy.has_file == "Y"
assert file_copy.wants_file == "Y"
# re-importing ch_master.log should be a no-op
auto_import.import_file(node, acq_dir.join("ch_master.log").relto(tmpdir))
assert list(
ac.ArchiveFile.select().where(ac.ArchiveFile.name == "ch_master.log")
) == [file]
assert list(
ar.ArchiveFileCopy.select().where(
ar.ArchiveFileCopy.file == file, ar.ArchiveFileCopy.node == node
)
) == [file_copy]
def test_import_existing(fixtures):
"""Checks for importing from an acquisition that is already in the archive"""
tmpdir = fixtures["root"]
acq_dir = tmpdir.join("x")
node = st.StorageNode.get(st.StorageNode.name == "x")
assert (ac.ArchiveAcq.select().where(ac.ArchiveAcq.name == "x").count()) == 1
## import an unknown file
auto_import.import_file(node, acq_dir.join("foo.log").relto(tmpdir))
assert (
ar.ArchiveFileCopy.select()
.join(ac.ArchiveFile)
.where(ac.ArchiveFile.name == "foo.log")
.count()
) == 1
## import file for which ArchiveFile entry exists but not ArchiveFileCopy
assert (
ar.ArchiveFileCopy.select() # no ArchiveFileCopy for 'jim'
.join(ac.ArchiveFile)
.where(ac.ArchiveFile.name == "jim")
.count()
) == 0
auto_import.import_file(node, acq_dir.join("jim").relto(tmpdir))
assert (
ar.ArchiveFileCopy.select() # now we have an ArchiveFileCopy for 'jim'
.join(ac.ArchiveFile)
.where(ac.ArchiveFile.name == "jim")
.count()
) == 1
def test_import_locked(fixtures):
tmpdir = fixtures["root"]
acq_dir = tmpdir.join("12345678T000000Z_inst_zab")
node = st.StorageNode.get(st.StorageNode.name == "x")
# import for foo.zxc should be ignored because there is also the
# foo.zxc.lock file
auto_import.import_file(node, acq_dir.join("foo.zxc").relto(tmpdir))
assert (
ac.ArchiveFile.select().where(ac.ArchiveFile.name == "foo.zxc").count()
) == 0
# now delete the lock and try reimport, which should succeed
acq_dir.join(".foo.zxc.lock").remove()
auto_import.import_file(node, acq_dir.join("foo.zxc").relto(tmpdir))
file = ac.ArchiveFile.get(ac.ArchiveFile.name == "foo.zxc")
assert file.acq.name == acq_dir.basename
assert file.type.name == "zxc"
assert file.size_b == len(
fixtures["files"][acq_dir.basename][file.name]["contents"]
)
assert file.md5sum == fixtures["files"][acq_dir.basename][file.name]["md5"]
file_copy = ar.ArchiveFileCopy.get(
ar.ArchiveFileCopy.file == file, ar.ArchiveFileCopy.node == node
)
assert file_copy.file == file
assert file_copy.has_file == "Y"
assert file_copy.wants_file == "Y"
def test_import_corrupted(fixtures):
"""Checks for importing from an acquisition that is already in the archive"""
tmpdir = fixtures["root"]
acq_dir = tmpdir.join("x")
node = st.StorageNode.get(st.StorageNode.name == "x")
## reimport a file for which we have a copy that is corrupted
assert list(
ar.ArchiveFileCopy.select(
ar.ArchiveFileCopy.has_file, ar.ArchiveFileCopy.wants_file
)
.join(ac.ArchiveFile)
.where(ac.ArchiveFile.name == "sheila")
.dicts()
) == [{"has_file": "X", "wants_file": "M"}]
auto_import.import_file(node, acq_dir.join("sheila").relto(tmpdir))
assert list(
ar.ArchiveFileCopy.select(
ar.ArchiveFileCopy.has_file, ar.ArchiveFileCopy.wants_file
)
.join(ac.ArchiveFile)
.where(ac.ArchiveFile.name == "sheila")
.dicts()
) == [{"has_file": "M", "wants_file": "Y"}]
def test_watchdog(fixtures):
"""Checks that the file system observer triggers imports on new/changed files"""
tmpdir = fixtures["root"]
acq_dir = tmpdir.join("12345678T000000Z_inst_zab")
node = st.StorageNode.get(st.StorageNode.name == "x")
import watchdog.events as ev
watchdog_handler = auto_import.RegisterFile(node)
# new acquisition file
f = acq_dir.join("new_file.log")
f.write("")
assert file_copy_count("new_file.log") == 0
watchdog_handler.on_created(ev.FileCreatedEvent(str(f)))
assert file_copy_count("new_file.log") == 1
# this file is outside any acqs and should be ignored
g = tmpdir.join("some_file.log")
g.write("Where is my acq?!")
assert file_copy_count("some_file.log") == 0
watchdog_handler.on_created(ev.FileCreatedEvent(str(g)))
assert file_copy_count("some_file.log") == 0
# now delete the lock and try reimport, which should succeed
lock = acq_dir.join(".foo.zxc.lock")
lock.remove()
assert file_copy_count("foo.zxc") == 0
watchdog_handler.on_deleted(ev.FileDeletedEvent(str(lock)))
assert file_copy_count("foo.zxc") == 1
def test_import_nested(fixtures):
tmpdir = fixtures["root"]
acq_dir = tmpdir.join("alp_root/2017/03/21/acq_xy1_45678901T000000Z_inst_zab")
node = st.StorageNode.get(st.StorageNode.name == "x")
# import for hello.txt should be ignored while creating the acquisition
# because 'zab' acq type only tracks *.zxc and *.log files
auto_import.import_file(node, acq_dir.join("summary.txt").relto(tmpdir))
assert ac.AcqType.get(ac.AcqType.name == "zab") is not None
# the acquisition is still created
acq = ac.ArchiveAcq.get(ac.ArchiveAcq.name == acq_dir.relto(tmpdir))
assert acq is not None
assert acq.name == acq_dir.relto(tmpdir)
assert acq.type.name == "zab"
# while no file has been imported yet
assert (ac.ArchiveFile.select().where(ac.ArchiveFile.acq == acq).count()) == 0
# now import 'acq_123_1.zxc', which should succeed
auto_import.import_file(
node, acq_dir.join("acq_data/x_123_1_data/raw/acq_123_1.zxc").relto(tmpdir)
)
file = ac.ArchiveFile.get(
ac.ArchiveFile.name == "acq_data/x_123_1_data/raw/acq_123_1.zxc"
)
assert file.acq.name == acq_dir.relto(tmpdir)
assert file.type.name == "zxc"
assert file.size_b == len(
fixtures["files"]["alp_root"]["2017"]["03"]["21"][
"acq_xy1_45678901T000000Z_inst_zab"
]["acq_data"]["x_123_1_data"]["raw"]["acq_123_1.zxc"]["contents"]
)
assert (
file.md5sum
== fixtures["files"]["alp_root"]["2017"]["03"]["21"][
"acq_xy1_45678901T000000Z_inst_zab"
]["acq_data"]["x_123_1_data"]["raw"]["acq_123_1.zxc"]["md5"]
)
file_copy = ar.ArchiveFileCopy.get(
ar.ArchiveFileCopy.file == file, ar.ArchiveFileCopy.node == node
)
assert file_copy.file == file
assert file_copy.has_file == "Y"
assert file_copy.wants_file == "Y"
def file_copy_count(file_name):
return (
ar.ArchiveFileCopy.select()
.join(ac.ArchiveFile)
.where(ac.ArchiveFile.name == file_name)
.count()
)
| |
import itertools
import os
from gitflow import const
from gitflow.properties import PropertyIO
from test.integration.base import TestFlowBase
class TestFlow(TestFlowBase):
version_tag_prefix: str = None
def setup_method(self, method):
TestFlowBase.setup_method(self, method)
# create the config file
self.project_property_file = 'project.properties'
config_file = os.path.join(self.git_working_copy, const.DEFAULT_CONFIG_FILE)
config = {
const.CONFIG_VERSIONING_SCHEME: 'semverWithSeq',
const.CONFIG_PROJECT_PROPERTY_FILE: self.project_property_file,
const.CONFIG_VERSION_PROPERTY: 'version',
const.CONFIG_SEQUENCE_NUMBER_PROPERTY: 'seq',
const.CONFIG_VERSION_TAG_PREFIX: ''
}
PropertyIO.write_file(config_file, config)
self.version_tag_prefix = config.get(const.CONFIG_VERSION_TAG_PREFIX,
const.DEFAULT_VERSION_TAG_PREFIX) or ''
# create & push the initial commit
self.add(config_file)
self.commit('initial commit: gitflow config file')
self.push()
self.assert_refs({
'refs/heads/master',
'refs/remotes/origin/master'
})
def test_status(self):
exit_code = self.git_flow('status')
assert exit_code == os.EX_OK
def test_log(self):
exit_code = self.git_flow('log')
assert exit_code == os.EX_OK
def test_bump_major(self):
refs = dict()
self.assert_refs(refs, added={
'refs/heads/master',
'refs/remotes/origin/master'
})
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0': None,
'refs/tags/' + self.version_tag_prefix + '1.0.0-1': 'refs/remotes/origin/release/1.0'
})
self.assert_first_parent('refs/remotes/origin/release/1.0', 'refs/heads/master')
self.assert_project_properties_contain({
})
# the head commit is the base of a release branch, further bumps shall not be possible
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.checkout("release/1.0")
self.assert_refs(refs, added={
'refs/heads/release/1.0': 'refs/remotes/origin/release/1.0'
})
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1',
})
def test_bump_minor(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-minor', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
# the head commit is the base of a release branch, further bumps shall not be possible
exit_code = self.git_flow('bump-minor', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.checkout("release/1.0")
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
self.checkout("master")
self.commit()
self.push()
exit_code = self.git_flow('bump-minor', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/heads/release/1.0', # local branch
'refs/remotes/origin/release/1.1',
'refs/tags/' + self.version_tag_prefix + '1.1.0-2'
})
self.checkout("release/1.1")
self.assert_refs(refs, added={
'refs/heads/release/1.1' # local branch
})
self.assert_project_properties_contain({
'seq': '2',
'version': '1.1.0-2'
})
def test_bump_patch(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.checkout('release/1.0')
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.commit()
exit_code = self.git_flow('bump-patch', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.push()
exit_code = self.git_flow('bump-patch', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/tags/' + self.version_tag_prefix + '1.0.1-2'
})
self.checkout("release/1.0")
self.assert_project_properties_contain({
'seq': '2',
'version': '1.0.1-2'
})
self.assert_refs(refs)
def test_bump_patch_on_untagged_branch(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master'
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.commit('dummy')
self.push()
self.git('checkout', '-b', 'release/1.1', 'master')
self.assert_refs(refs, added={
'refs/heads/release/1.1' # local branch
})
self.push('-u')
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.1' # remote branch
})
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.commit()
exit_code = self.git_flow('bump-patch', '--assume-yes')
assert exit_code == os.EX_USAGE
self.push()
self.assert_refs(refs)
exit_code = self.git_flow('bump-patch', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/tags/' + self.version_tag_prefix + '1.1.0-2'
})
self.assert_project_properties_contain({
'seq': '2',
'version': '1.1.0-2'
})
def test_bump_prerelease_type(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.checkout('release/1.0')
self.assert_refs(refs, added={
'refs/heads/release/1.0', # local branch
})
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_USAGE
exit_code = self.git_flow('bump-prerelease-type', '--assume-yes')
assert exit_code != os.EX_OK
exit_code = self.git_flow('bump-prerelease-type', '--assume-yes')
assert exit_code != os.EX_OK
self.assert_refs(refs)
self.checkout("release/1.0")
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
self.assert_refs(refs)
def test_bump_to_release(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.checkout('release/1.0')
self.assert_refs(refs, added={
'refs/heads/release/1.0', # local branch
})
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
exit_code = self.git_flow('bump-prerelease-type', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.checkout("release/1.0")
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
self.assert_refs(refs)
def test_bump_prerelease(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.checkout("release/1.0")
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
self.commit()
exit_code = self.git_flow('bump-prerelease', '--assume-yes')
assert exit_code == os.EX_USAGE
self.push()
exit_code = self.git_flow('bump-prerelease', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/tags/' + self.version_tag_prefix + '1.0.0-2'
})
self.checkout("release/1.0")
self.assert_project_properties_contain({
'seq': '2',
'version': '1.0.0-2'
})
self.assert_refs(refs)
def test_bump_prerelease_type_behind_branch_tip(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.checkout("release/1.0")
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
tagged_commit = self.current_head_commit()
self.commit()
self.push()
exit_code = self.git_flow('bump-prerelease-type', '--assume-yes', tagged_commit)
assert exit_code != os.EX_OK
self.assert_refs(refs)
self.checkout("release/1.0")
self.commit()
self.push()
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
self.assert_refs(refs)
def test_bump_prerelease_type_on_superseded_version_tag(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.checkout("release/1.0")
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
tagged_commit = self.current_head_commit()
self.commit()
self.push()
exit_code = self.git_flow('bump-patch', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/tags/' + self.version_tag_prefix + '1.0.1-2'
})
self.commit()
self.push()
exit_code = self.git_flow('bump-prerelease-type', '--assume-yes', tagged_commit)
assert exit_code != os.EX_OK
self.assert_refs(refs)
self.checkout("release/1.0")
self.assert_project_properties_contain({
'seq': '2',
'version': '1.0.1-2'
})
self.assert_refs(refs)
def test_discontinue_implicitly(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.checkout("release/1.0")
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
exit_code = self.git_flow('discontinue', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/tags/discontinued/1.0'
})
exit_code = self.git_flow('discontinue', '--assume-yes')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.checkout("release/1.0")
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
self.assert_refs(refs)
def test_discontinue_explicitly(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
exit_code = self.git_flow('discontinue', '--assume-yes', '1.0')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/tags/discontinued/1.0'
})
exit_code = self.git_flow('discontinue', '--assume-yes', '1.0')
assert exit_code == os.EX_USAGE
self.assert_refs(refs)
self.checkout("release/1.0")
self.assert_refs(refs, added={
'refs/heads/release/1.0'
})
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
def test_begin_end_dev_feature(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
self.assert_head('refs/heads/master')
exit_code = self.git_flow('start', 'dev', 'feature', 'test-feature')
assert exit_code == os.EX_OK
self.assert_head('refs/heads/dev/feature/test-feature')
self.assert_refs(refs, added={
'refs/heads/dev/feature/test-feature'
})
for _ in itertools.repeat(None, 3):
self.commit()
self.push('-u', 'origin', 'dev/feature/test-feature')
self.assert_refs(refs, added={
'refs/remotes/origin/dev/feature/test-feature'
})
exit_code = self.git_flow('finish', 'dev', 'feature', 'test-feature')
assert exit_code == os.EX_OK
self.assert_head('refs/heads/master')
self.assert_refs(refs)
def test_begin_end_dev_feature_from_another_branch(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
self.assert_head('refs/heads/master')
exit_code = self.git_flow('start', 'dev', 'feature', 'test-feature')
assert exit_code == os.EX_OK
self.assert_head('refs/heads/dev/feature/test-feature')
self.assert_refs(refs, added={
'refs/heads/dev/feature/test-feature',
})
for _ in itertools.repeat(None, 3):
self.commit()
self.push('-u', 'origin', 'dev/feature/test-feature')
self.assert_refs(refs, added={
'refs/remotes/origin/dev/feature/test-feature',
})
self.checkout("master")
self.assert_head('refs/heads/master')
exit_code = self.git_flow('finish', 'dev', 'feature', 'test-feature')
assert exit_code == os.EX_OK
self.assert_head('refs/heads/master')
self.assert_refs(refs)
def test_error_begin_dev_feature_off_a_release_branch(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
self.assert_head('refs/heads/master')
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.assert_head('refs/heads/master')
self.checkout('release/1.0')
self.assert_head('refs/heads/release/1.0')
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
exit_code = self.git_flow('start', 'dev', 'feature', 'test-feature', 'release/1.0')
assert exit_code == os.EX_USAGE
self.assert_head('refs/heads/release/1.0')
self.assert_refs(refs)
def test_begin_end_prod_fix(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
self.assert_head('refs/heads/master')
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.assert_head('refs/heads/master')
self.checkout('release/1.0')
self.assert_head('refs/heads/release/1.0')
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
exit_code = self.git_flow('start', 'prod', 'fix', 'test-fix')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/heads/prod/fix/test-fix'
})
self.assert_head('refs/heads/prod/fix/test-fix')
for _ in itertools.repeat(None, 3):
self.commit()
self.push('-u')
self.assert_refs(refs, added={
'refs/remotes/origin/prod/fix/test-fix'
})
exit_code = self.git_flow('finish', 'prod', 'fix', 'test-fix', '1.0')
assert exit_code == os.EX_OK
self.assert_head('refs/heads/release/1.0')
self.assert_refs(refs)
def test_misc(self):
refs = {
'refs/heads/master',
'refs/remotes/origin/master',
}
self.assert_head('refs/heads/master')
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/1.0',
'refs/tags/' + self.version_tag_prefix + '1.0.0-1'
})
self.assert_head('refs/heads/master')
self.checkout('release/1.0')
self.assert_head('refs/heads/release/1.0')
self.assert_refs(refs, added={
'refs/heads/release/1.0' # local branch
})
self.assert_project_properties_contain({
'seq': '1',
'version': '1.0.0-1'
})
# hotfix
exit_code = self.git_flow('start', 'prod', 'fix', 'test-fix')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/heads/prod/fix/test-fix'
})
self.assert_head('refs/heads/prod/fix/test-fix')
for _ in itertools.repeat(None, 3):
self.commit()
self.push('-u')
self.assert_refs(refs, added={
'refs/remotes/origin/prod/fix/test-fix'
})
exit_code = self.git_flow('finish', 'prod', 'fix', 'test-fix', '1.0')
assert exit_code == os.EX_OK
self.assert_head('refs/heads/release/1.0')
self.assert_refs(refs)
# hotfix 2 with implicit finish on work branch
exit_code = self.git_flow('start', 'prod', 'fix', 'test-fix2')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/heads/prod/fix/test-fix2'
})
self.assert_head('refs/heads/prod/fix/test-fix2')
for _ in itertools.repeat(None, 3):
self.commit()
self.push('-u')
self.assert_refs(refs, added={
'refs/remotes/origin/prod/fix/test-fix2',
})
exit_code = self.git_flow('finish')
assert exit_code == os.EX_OK
self.assert_head('refs/heads/release/1.0')
# GA release
exit_code = self.git_flow('bump-patch', '--assume-yes', '1.0')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/tags/' + self.version_tag_prefix + '1.0.1-2',
})
exit_code = self.git_flow('bump-prerelease-type', '--assume-yes', '1.0')
assert exit_code == os.EX_USAGE
exit_code = self.git_flow('bump-to-release', '--assume-yes', '1.0')
assert exit_code == os.EX_USAGE
self.checkout('release/1.0')
self.assert_project_properties_contain({
'seq': '2',
'version': '1.0.1-2'
})
# new feature
self.checkout('master')
self.assert_head('refs/heads/master')
exit_code = self.git_flow('start', 'dev', 'feature', 'test-feature')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/heads/dev/feature/test-feature'
})
self.assert_head('refs/heads/dev/feature/test-feature')
for _ in itertools.repeat(None, 3):
self.commit()
self.push('-u')
self.assert_refs(refs, added={
'refs/remotes/origin/dev/feature/test-feature'
})
exit_code = self.git_flow('finish', 'dev', 'feature', 'test-feature')
assert exit_code == os.EX_OK
self.assert_refs(refs)
# new major version
exit_code = self.git_flow('bump-major', '--assume-yes')
assert exit_code == os.EX_OK
self.assert_refs(refs, added={
'refs/remotes/origin/release/2.0',
'refs/tags/' + self.version_tag_prefix + '2.0.0-3',
})
self.checkout('release/2.0')
self.assert_refs(refs, added={
'refs/heads/release/2.0' # local branch
})
self.assert_project_properties_contain({
'seq': '3',
'version': '2.0.0-3'
})
| |
'''Arsenal API physical_devices.'''
# Copyright 2015 CityGrid Media, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from datetime import datetime
from pyramid.view import view_config
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import IntegrityError
from arsenalweb.views import (
get_authenticated_user,
)
from arsenalweb.views.api.common import (
api_200,
api_400,
api_404,
api_500,
api_501,
collect_params,
)
from arsenalweb.views.api.hardware_profiles import (
get_hardware_profile,
)
from arsenalweb.views.api.physical_locations import (
find_physical_location_by_name,
)
from arsenalweb.views.api.physical_racks import (
find_physical_rack_by_name_loc,
)
from arsenalweb.views.api.physical_elevations import (
find_physical_elevation_by_elevation,
)
from arsenalweb.models.common import (
DBSession,
)
from arsenalweb.models.physical_devices import (
PhysicalDevice,
PhysicalDeviceAudit,
)
from arsenalweb.models.statuses import (
Status,
)
LOG = logging.getLogger(__name__)
# Functions
def find_status_by_name(status_name):
'''Find a status by name.'''
status = DBSession.query(Status)
status = status.filter(Status.name == status_name)
return status.one()
def find_physical_device_by_serial(serial_number):
'''Find a physical_device by serial_number. Returns a physical_device object if found,
raises NoResultFound otherwise.'''
LOG.debug('Searching for physical_device by serial_number: {0}'.format(serial_number))
physical_device = DBSession.query(PhysicalDevice)
physical_device = physical_device.filter(PhysicalDevice.serial_number == serial_number)
return physical_device.one()
def find_physical_device_by_id(physical_device_id):
'''Find a physical_device by id.'''
LOG.debug('Searching for physical_device by id: {0}'.format(physical_device_id))
physical_device = DBSession.query(PhysicalDevice)
physical_device = physical_device.filter(PhysicalDevice.id == physical_device_id)
return physical_device.one()
def create_physical_device(serial_number=None,
mac_address_1=None,
physical_location_id=None,
physical_rack_id=None,
physical_elevation_id=None,
status_id=None,
updated_by=None,
**kwargs):
'''Create a new physical_device.
Required params:
hardware_profile_id: An integer representing the hardware_profile_id from
the hardware_profiles table.
mac_address_1: A string representing the MAC address of the first interface.
physical_location_id : An integer representing the physical_location_id
from the physical_locations table.
physical_rack_id : An integer representing the physical_rack_id
from the physical_racks table.
physical_elevation_id: An integer representing the physical_elevation_id
from the physical_elevations table.
serial_number : A string that is the serial_number of the physical_device.
status_id : An integer representing the status_id from the statuses table.
updated_by : A string that is the user making the update.
Optional kwargs:
mac_address_2: A string representing the MAC address of the second interface.
oob_ip_address: A string representing the out of band IP address.
oob_mac_address: A string representing the out of band MAC address.
'''
try:
LOG.info('Creating new physical_device serial_number: {0}'.format(serial_number))
utcnow = datetime.utcnow()
physical_device = PhysicalDevice(serial_number=serial_number,
mac_address_1=mac_address_1,
physical_location_id=physical_location_id,
physical_rack_id=physical_rack_id,
physical_elevation_id=physical_elevation_id,
status_id=status_id,
updated_by=updated_by,
created=utcnow,
updated=utcnow,
**kwargs)
DBSession.add(physical_device)
DBSession.flush()
audit = PhysicalDeviceAudit(object_id=physical_device.id,
field='serial_number',
old_value='created',
new_value=physical_device.serial_number,
updated_by=updated_by,
created=utcnow)
DBSession.add(audit)
DBSession.flush()
return api_200(results=physical_device)
except IntegrityError:
msg = 'Physical elevation is already occupied, move the existing ' \
'physical_device first.'
LOG.error(msg)
raise Exception(msg)
except Exception as ex:
msg = 'Error creating new physical_device serial_number: {0} exception: ' \
'{1}'.format(serial_number, ex)
LOG.error(msg)
return api_500(msg=msg)
def update_physical_device(physical_device, **kwargs):
'''Update an existing physical_device.
Required params:
physical_device : A physical_device object.
updated_by : A string that is the user making the update.
Optional kwargs:
hardware_profile_id: An integer representing the hardware_profile_id from
the hardware_profiles table.
mac_address_1: A string representing the MAC address of the first interface.
mac_address_2: A string representing the MAC address of the second interface.
oob_ip_address: A string representing the out of band IP address.
oob_mac_address: A string representing the out of band MAC address.
physical_location_id : An integer representing the physical_location_id
from the physical_locations table.
physical_rack_id : An integer representing the physical_rack_id
from the physical_racks table.
physical_elevation_id: An integer representing the physical_elevation_id
from the physical_elevations table.
status_id: An integer representing the status_id from the statuses table.
'''
try:
my_attribs = kwargs.copy()
LOG.info('Updating physical_device: {0}'.format(physical_device.serial_number))
utcnow = datetime.utcnow()
for attribute in my_attribs:
if attribute == 'serial_number':
LOG.debug('Skipping update to physical_device.serial_number')
continue
old_value = getattr(physical_device, attribute)
new_value = my_attribs[attribute]
if old_value != new_value and new_value:
if not old_value:
old_value = 'None'
LOG.debug('Updating physical_device: {0} attribute: '
'{1} new_value: {2}'.format(physical_device.serial_number,
attribute,
new_value))
audit = PhysicalDeviceAudit(object_id=physical_device.id,
field=attribute,
old_value=old_value,
new_value=new_value,
updated_by=my_attribs['updated_by'],
created=utcnow)
DBSession.add(audit)
setattr(physical_device, attribute, new_value)
DBSession.flush()
return api_200(results=physical_device)
except IntegrityError:
msg = 'Physical elevation is already occupied, move the existing ' \
'physical_device first.'
LOG.error(msg)
raise Exception(msg)
except Exception as ex:
msg = 'Error updating physical_device serial_number: {0} updated_by: {1} exception: ' \
'{2}'.format(physical_device.serial_number,
my_attribs['updated_by'],
repr(ex))
LOG.error(msg)
raise
def convert_names_to_ids(params):
'''Converts nice names to ids for creating/updating a physical_device.'''
try:
try:
try:
physical_location = params['physical_location']['name']
except TypeError:
physical_location = params['physical_location']
physical_location = find_physical_location_by_name(physical_location)
params['physical_location_id'] = physical_location.id
LOG.debug('physical_location_id: {0}'.format(params['physical_location_id']))
del params['physical_location']
except NoResultFound:
msg = 'physical_location not found: {0}'.format(params['physical_location'])
LOG.error(msg)
raise NoResultFound(msg)
try:
try:
physical_rack_name = params['physical_rack']['name']
except TypeError:
physical_rack_name = params['physical_rack']
physical_rack = find_physical_rack_by_name_loc(physical_rack_name,
params['physical_location_id'])
params['physical_rack_id'] = physical_rack.id
del params['physical_rack']
except NoResultFound:
msg = 'physical_rack not found: {0}'.format(params['physical_rack'])
LOG.error(msg)
raise NoResultFound(msg)
try:
try:
physical_elevation_el = params['physical_elevation']['elevation']
except TypeError:
physical_elevation_el = params['physical_elevation']
physical_elevation = find_physical_elevation_by_elevation(physical_elevation_el,
params['physical_rack_id'])
params['physical_elevation_id'] = physical_elevation.id
del params['physical_elevation']
except NoResultFound:
msg = 'physical_elevation not found: {0}'.format(params['physical_elevation'])
LOG.error(msg)
raise NoResultFound(msg)
try:
try:
status_name = params['status']['name']
except TypeError:
status_name = params['status']
if not status_name:
status_name = 'available'
status = find_status_by_name(status_name)
params['status_id'] = status.id
try:
del params['status']
except KeyError:
pass
except NoResultFound:
msg = 'Unable to determine status of physical_device.'
LOG.error(msg)
raise NoResultFound(msg)
if params['hardware_profile']:
try:
hw_profile_name = params['hardware_profile']['name']
except TypeError:
hw_profile_name = params['hardware_profile']
try:
hardware_profile = get_hardware_profile(hw_profile_name)
params['hardware_profile_id'] = hardware_profile.id
del params['hardware_profile']
except AttributeError:
msg = 'hardware_profile not found: {0}'.format(params['hardware_profile'])
LOG.error(msg)
raise NoResultFound(msg)
except Exception as ex:
LOG.error(repr(ex))
raise
return params
# Routes
@view_config(route_name='api_physical_devices', request_method='GET', request_param='schema=true', renderer='json')
def api_physical_devices_schema(request):
'''Schema document for the physical_devices API.'''
physical_devices = {
}
return physical_devices
@view_config(route_name='api_physical_devices', permission='physical_device_write', request_method='PUT', renderer='json')
def api_physical_devices_write(request):
'''Process write requests for /api/physical_devices route.'''
try:
req_params = [
'hardware_profile',
'mac_address_1',
'physical_elevation',
'physical_location',
'physical_rack',
'serial_number',
]
opt_params = [
'mac_address_2',
'oob_ip_address',
'oob_mac_address',
'status',
]
params = collect_params(request, req_params, opt_params)
try:
params = convert_names_to_ids(params)
LOG.debug('params are: {0}'.format(params))
except NoResultFound as ex:
msg = 'Error writing to physical_devices API: {0}'.format(ex)
LOG.error(msg)
return api_404(msg=msg)
try:
physical_device = find_physical_device_by_serial(params['serial_number'])
physical_device = update_physical_device(physical_device, **params)
except NoResultFound:
physical_device = create_physical_device(**params)
return physical_device
except Exception as ex:
msg = 'Error writing to physical_devices API: {0} exception: {1}'.format(request.url, ex)
LOG.error(msg)
return api_500(msg=msg)
@view_config(route_name='api_physical_device_r', permission='physical_device_delete', request_method='DELETE', renderer='json')
@view_config(route_name='api_physical_device_r', permission='physical_device_write', request_method='PUT', renderer='json')
def api_physical_device_write_attrib(request):
'''Process write requests for the /api/physical_devices/{id}/{resource} route.'''
resource = request.matchdict['resource']
payload = request.json_body
auth_user = get_authenticated_user(request)
LOG.debug('Updating {0}'.format(request.url))
# First get the physical_device, then figure out what to do to it.
physical_device = find_physical_device_by_id(request.matchdict['id'])
LOG.debug('physical_device is: {0}'.format(physical_device))
# List of resources allowed
resources = [
'nothing_yet',
]
# There's nothing to do here yet. Maye add updates to existing # physical_devices?
if resource in resources:
try:
actionable = payload[resource]
except KeyError:
msg = 'Missing required parameter: {0}'.format(resource)
return api_400(msg=msg)
except Exception as ex:
LOG.error('Error updating physical_devices: {0} exception: {1}'.format(request.url, ex))
return api_500(msg=str(ex))
else:
return api_501()
return resp
| |
""" Compute heads of mentions. """
import logging
import re
from cort.core import spans
logger = logging.getLogger(__name__)
__author__ = 'smartschat'
class HeadFinder:
"""Compute heads of mentions.
This class provides functions to compute heads of mentions via modified
version of the rules that can be found in Michael Collins' PhD thesis.
The following changes were introduced:
- handle NML as NP,
- for coordinated phrases, take the coordination token as head,
Furthermore, this class provides a function for adjusting heads for proper
names to multi-token phrases via heuristics (see adjust_head_for_nam).
"""
def __init__(self):
self.__nonterminals = ["NP", "NML", "VP", "ADJP", "QP", "WHADVP", "S",
"ADVP", "WHNP", "SBAR", "SBARQ", "PP", "INTJ",
"SQ", "UCP", "X", "FRAG"]
self.__nonterminal_rules = {
"VP": (["TO", "VBD", "VBN", "MD", "VBZ", "VB", "VBG", "VBP", "VP",
"ADJP", "NN", "NNS", "NP"], False),
"ADJP": (["NNS", "QP", "NN", "\$", "ADVP", "JJ", "VBN", "VBG", "ADJP",
"JJR", "NP", "JJS", "DT", "FW", "RBR", "RBS", "SBAR", "RB"],
False),
"QP": (["\$", "NNS", "NN", "IN", "JJ", "RB", "DT", "CD", "NCD",
"QP", "JJR", "JJS"], False),
"WHADVP": (["CC", "WRB"], True),
"S": (["TO", "IN", "VP", "S", "SBAR", "ADJP", "UCP", "NP"], False),
"SBAR": (["WHNP", "WHPP", "WHADVP", "WHADJP", "IN", "DT", "S", "SQ",
"SINV", "SBAR", "FRAG"], False),
"SBARQ": (["SQ", "S", "SINV", "SBARQ", "FRAG"], False),
"SQ": (["VBZ", "VBD", "VBP", "VB", "MD", "VP", "SQ"], False),
"ADVP": (["RB", "RBR", "RBS", "FW", "ADVP", "TO", "CD", "JJR", "JJ",
"IN", "NP", "JJS", "NN"], True),
"WHNP": (["WDT", "WP", "WP$", "WHADJP", "WHPP", "WHNP"], True),
"PP": (["IN", "TO", "VBG", "VBN", "RP", "FW"], True),
"X": (["S", "VP", "ADJP", "JJP", "NP", "SBAR", "PP", "X"], True),
"FRAG": (["*"], True),
"INTJ": (["*"], False),
"UCP": (["*"], True),
}
def get_head(self, tree):
"""
Compute the head of a mention, which is represented by its parse tree.
Args:
tree (nltk.ParentedTree): The parse tree of a mention.
Returns:
nltk.ParentedTree: The subtree of the input tree which corresponds
to the head of the mention.
"""
head = None
label = tree.label()
if len(tree) == 1:
if tree.height() == 3:
head = tree[0]
elif tree.height() == 2:
head = tree
elif label in ["NP", "NML"]:
head = self.__get_head_for_np(tree)
elif label in self.__nonterminals:
head = self.__get_head_for_nonterminal(tree)
if head is None:
head = self.get_head(tree[-1])
return head
def __get_head_for_np(self, tree):
if self.__rule_cc(tree) is not None:
return self.__rule_cc(tree)
elif self.__collins_rule_nn(tree) is not None:
return self.__collins_rule_nn(tree)
elif self.__collins_rule_np(tree) is not None:
return self.get_head(self.__collins_rule_np(tree))
elif self.__collins_rule_nml(tree) is not None:
return self.get_head(self.__collins_rule_nml(tree))
elif self.__collins_rule_prn(tree) is not None:
return self.__collins_rule_prn(tree)
elif self.__collins_rule_cd(tree) is not None:
return self.__collins_rule_cd(tree)
elif self.__collins_rule_jj(tree) is not None:
return self.__collins_rule_jj(tree)
elif self.__collins_rule_last_word(tree) is not None:
return self.__collins_rule_last_word(tree)
def __get_head_for_nonterminal(self, tree):
label = tree.label()
values, traverse_reversed = self.__nonterminal_rules[label]
if traverse_reversed:
to_traverse = reversed(tree)
else:
to_traverse = tree
for val in values:
for child in to_traverse:
label = child.label()
if val == "*" or label == val:
if label in self.__nonterminals:
return self.get_head(child)
else:
return child
def __rule_cc(self, tree):
if tree.label() == "NP":
for child in tree:
if child.label() == "CC":
return child
def __collins_rule_pos(self, tree):
if tree.pos()[-1][1] == "POS":
return tree[-1]
def __collins_rule_nn(self, tree):
for i in range(len(tree)-1, -1, -1):
if re.match("NN|NNP|NNPS|JJR", tree[i].label()):
return tree[i]
elif tree[i].label() == "NX":
return self.get_head(tree[i])
def __collins_rule_np(self, tree):
for child in tree:
if child.label() == "NP":
return child
def __collins_rule_nml(self, tree):
for child in tree:
if child.label() == "NML":
return child
def __collins_rule_prn(self, tree):
for child in tree:
if child.label() == "PRN":
return self.get_head(child[0])
def __collins_rule_cd(self, tree):
for i in range(len(tree)-1, -1, -1):
if re.match("CD", tree[i].label()):
return tree[i]
def __collins_rule_jj(self, tree):
for i in range(len(tree)-1, -1, -1):
if re.match("JJ|JJS|RB", tree[i].label()):
return tree[i]
elif tree[i].label() == "QP":
return self.get_head(tree[i])
def __collins_rule_last_word(self, tree):
current_tree = tree[-1]
while current_tree.height() > 2:
current_tree = current_tree[-1]
@staticmethod
def adjust_head_for_nam(tokens, pos, ner_type, in_mention_span_old_head,
old_head):
"""
Adjust head for proper names via heuristics.
Based on heuristics depending on the named entity type (person,
organization, ...) and part-of-speech tags, adjust the head of a
named entity mention to a meaningful extent useful for coreference
resolution.
For example, for the mention "Khan Younes in Southern Gaza Strip",
this function will compute "Khan Younes" as the head.
Args:
tokens (list(str)): The tokens of the mention.
pos (list(str)): The part-of-speech tags of the mention.
ner_type (str): The named entity type of the mention. Should be
one of PERSON, ORG, GPE, FAC, NORP, PRODUCT, EVENT, MONEY,
WORK_OF_ART, LOC, LAW, LANGUAGE, DATE, TIME, ORDINAL,
CARDINAL, QUANTITY, PERCENT or NONE.
in_mention_span_old_head (spans.Span): The in-mention span of the
old head.
old_head (list(str)): The tokens of the old head.
Returns:
(Span, list(str)): The in-mention span of the adjusted head and
the tokens of the adjusted head.
"""
# TODO: get rid of this ugly hack
if len(pos) == 0:
return spans.Span(0, 0), "NOHEAD"
stop_regex = re.compile("CC|,|\.|:|;|V.*|IN|W.*|ADVP|NN$")
if re.match("ORG.*|GPE.*|FAC.*|NORP.*|PRODUCT|EVENT|MONEY|" +
"WORK_OF_ART|LOC.*|LAW|LANGUAGE", ner_type):
start_regex = re.compile("NN(S)?|NNP(S)?")
stop_regex = re.compile("V.*|IN|W.*|ADVP|,|-LRB-")
elif ner_type == "PERSON":
start_regex = re.compile("NN(S)?|NNP(S)?")
stop_regex = re.compile("IN|CC|,|\.|:|;|V.*|W.*|-LRB-")
elif re.match("DATE|TIME", ner_type):
start_regex = re.compile("NN(S)?|NNP(S)?|CD")
elif re.match("ORDINAL", ner_type):
start_regex = re.compile("NN|JJ|RB")
elif re.match("CARDINAL", ner_type):
start_regex = re.compile("CD")
elif re.match("QUANTITY|PERCENT", ner_type):
start_regex = re.compile("CD|JJ|NN")
elif ner_type == "NONE":
start_regex = re.compile("NN(S)?|NNP(S)?|CD")
else:
logger.warning("No head adjustment rule defined for NER class " +
ner_type + ".")
return in_mention_span_old_head, old_head
head_start = -1
position = 0
for i in range(0, len(tokens)):
position = i
if head_start == -1 and start_regex.match(pos[i]):
head_start = i
elif head_start >= 0 and stop_regex.match(pos[i]):
return spans.Span(head_start, i-1), tokens[head_start:i]
if head_start == -1:
head_start = 0
if pos[position] == "POS" and position == len(pos) - 1:
position -= 1
return spans.Span(head_start, position), tokens[head_start:position+1]
| |
"""
Tests related specifically to integration with Morango.
"""
import os
import sys
import unittest
import uuid
import requests
from django.test import TestCase
from morango.controller import MorangoProfileController
from morango.models import InstanceIDModel
from morango.models import Store
from morango.utils.register_models import _profile_models
from rest_framework import status
from six.moves.urllib.parse import urljoin
from ..models import Classroom
from ..models import Facility
from ..models import FacilityDataset
from ..models import FacilityUser
from ..models import LearnerGroup
from ..models import Role
from .helpers import DUMMY_PASSWORD
from .sync_utils import multiple_kolibri_servers
class FacilityDatasetCertificateTestCase(TestCase):
def test_creating_facility_creates_dataset(self):
facility = Facility.objects.create(name="hallo")
self.assertIsNotNone(facility.dataset)
def test_creating_facilitydataset_creates_certificate(self):
dataset = FacilityDataset.objects.create()
self.assertIsNotNone(dataset.get_root_certificate())
def test_partition_and_id_values(self):
facility = Facility.objects.create(name="hallo")
dataset_id = facility.dataset.id
self.assertEqual(dataset_id, facility.dataset.get_root_certificate().id)
self.assertEqual(dataset_id, facility.dataset._morango_source_id)
self.assertTrue(facility.dataset._morango_partition.startswith(dataset_id))
scope = facility.dataset.get_root_certificate().get_scope()
for partition in scope.read_filter + scope.write_filter:
self.assertTrue(partition.startswith(dataset_id))
class DateTimeTZFieldTestCase(TestCase):
def setUp(self):
self.controller = MorangoProfileController("facilitydata")
InstanceIDModel.get_or_create_current_instance()
def test_deserializing_field(self):
facility = Facility.objects.create(name="hallo")
FacilityUser.objects.create(username="jamie", facility=facility)
self.controller.serialize_into_store()
Store.objects.update(dirty_bit=True)
try:
self.controller.deserialize_from_store()
except AttributeError as e:
self.fail(e.message)
@unittest.skipIf(sys.platform.startswith("win"), "can't run on Windows")
@unittest.skipIf(
not os.environ.get("TRAVIS_TAG"), "This test will only be run during tagged builds."
)
class EcosystemTestCase(TestCase):
def _data(self, *args, **kwargs):
return kwargs
def _create_objects(self, server):
fac = Facility.objects.using(server.db_alias).first()
admin = FacilityUser(
username=uuid.uuid4().hex[:30], password=DUMMY_PASSWORD, facility=fac
)
admin.save(using=server.db_alias)
learner = FacilityUser(
username=uuid.uuid4().hex[:30], password=DUMMY_PASSWORD, facility=fac
)
learner.save(using=server.db_alias)
class_resp = self.request_server(
server, "classroom", data=self._data(parent=fac.id, name=uuid.uuid4().hex)
)
lg_resp = self.request_server(
server,
"learnergroup",
data=self._data(parent=class_resp.json()["id"], name=uuid.uuid4().hex),
)
self.request_server(
server,
"membership",
data=self._data(user=learner.id, collection=class_resp.json()["id"]),
)
self.request_server(
server,
"membership",
data=self._data(user=learner.id, collection=lg_resp.json()["id"]),
)
self.request_server(
server,
"role",
data=self._data(collection=fac.id, user=admin.id, kind="admin"),
)
def request_server(
self, server, endpoint, method="POST", lookup=None, data={}, params={}
):
"""
:param server: kolibri instance we are querying
:param endpoint: constant representing which kolibri endpoint we are querying
:param method: HTTP verb/method for request
:param lookup: the pk value for the specific object we are querying
:param data: dict that will be form-encoded in request
:param params: dict to be sent as part of URL's query string
:return: ``Response`` object from request
"""
# build up url and send request
if lookup:
lookup = lookup + "/"
url = urljoin(urljoin(server.base_url, "api/auth/" + endpoint + "/"), lookup)
auth = ("superuser", "password")
resp = requests.request(method, url, json=data, params=params, auth=auth)
resp.raise_for_status()
return resp
def assertServerQuerysetEqual(self, s1, s2, dataset_id):
syncable_models = list(_profile_models["facilitydata"].values())
syncable_models.pop(
0
) # remove FacilityDataset because __str__() does not point to correct db alias
for klass in syncable_models:
self.assertQuerysetEqual(
klass.objects.using(s1.db_alias).filter(dataset_id=dataset_id),
[
repr(u)
for u in klass.objects.using(s2.db_alias).filter(
dataset_id=dataset_id
)
],
ordered=False,
)
# morango models
self.assertQuerysetEqual(
Store.objects.using(s1.db_alias).filter(partition__startswith=dataset_id),
[
repr(u)
for u in Store.objects.using(s2.db_alias).filter(
partition__startswith=dataset_id
)
],
ordered=False,
)
@multiple_kolibri_servers(3)
def test_scenarios(self, servers):
servers_len = len(servers)
self.maxDiff = None
s0_alias = servers[0].db_alias
s0_url = servers[0].base_url
s1_alias = servers[1].db_alias
s1_url = servers[1].base_url
s2_alias = servers[2].db_alias
s2_url = servers[2].base_url
servers[0].manage("loaddata", "content_test")
servers[0].manage("generateuserdata", no_onboarding=True, num_content_items=1)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
servers[2].manage(
"fullfacilitysync",
base_url=s1_url,
username="superuser",
password="password",
)
# assert that all kolibri instances start off equal
for i in range(servers_len):
self.assertServerQuerysetEqual(
servers[i],
servers[(i + 1) % servers_len],
FacilityDataset.objects.using(servers[0].db_alias).first().id,
)
# assert created user is synced
FacilityUser(
username="user",
password=DUMMY_PASSWORD,
facility=Facility.objects.using(s0_alias).first(),
).save(using=s0_alias)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
self.assertTrue(
FacilityUser.objects.using(s1_alias).filter(username="user").exists()
)
# create user with same username on two servers and check they both exist
FacilityUser(
username="copycat",
password=DUMMY_PASSWORD,
facility=Facility.objects.using(s0_alias).first(),
).save(using=s0_alias)
FacilityUser(
username="copycat",
password=DUMMY_PASSWORD,
facility=Facility.objects.using(s1_alias).first(),
).save(using=s1_alias)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
self.assertEqual(
FacilityUser.objects.using(s0_alias).filter(username="copycat").count(), 2
)
self.assertEqual(
FacilityUser.objects.using(s1_alias).filter(username="copycat").count(), 2
)
# Add a classroom
self.request_server(
servers[0],
"classroom",
data=self._data(
name="classroom", parent=Facility.objects.using(s0_alias).first().id
),
)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
self.assertTrue(
Classroom.objects.using(s1_alias).filter(name="classroom").exists()
)
# Add a learnergroup
self.request_server(
servers[0],
"learnergroup",
data=self._data(
name="learnergroup", parent=Classroom.objects.using(s0_alias).first().id
),
)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
self.assertTrue(
LearnerGroup.objects.using(s1_alias).filter(name="learnergroup").exists()
)
# assert conflicting serialized data is appended after same role is created on different device
fac = Facility.objects.using(s1_alias).get()
alk_user = FacilityUser.objects.using(s0_alias).get(username="Antemblowind")
self.request_server(
servers[1],
"role",
data=self._data(collection=fac.id, user=alk_user.id, kind="admin"),
)
self.request_server(
servers[0],
"role",
data=self._data(collection=fac.id, user=alk_user.id, kind="admin"),
)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
role = Role.objects.using(s1_alias).get(user=alk_user)
admin_role = Store.objects.using(s1_alias).get(id=role.id)
self.assertTrue(admin_role.conflicting_serialized_data)
# assert deleted object is propagated
self.request_server(
servers[0], "facilityuser", method="DELETE", lookup=alk_user.id
)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
self.assertFalse(
FacilityUser.objects.using(s1_alias)
.filter(username="Antemblowind")
.exists()
)
self.assertTrue(Store.objects.using(s1_alias).get(id=alk_user.id).deleted)
# # role deletion and re-creation
# Change roles for users
alto_user = FacilityUser.objects.using(s1_alias).get(username="Altobjews1977")
resp = self.request_server(
servers[1],
"role",
data=self._data(collection=fac.id, user=alto_user.id, kind="admin"),
)
servers[1].manage(
"fullfacilitysync",
base_url=s2_url,
username="superuser",
password="password",
)
self.assertEqual(
FacilityUser.objects.using(s2_alias)
.get(username="Altobjews1977")
.roles.all()
.first()
.kind,
"admin",
)
# delete admin role and sync
self.request_server(
servers[2], "role", method="DELETE", lookup=resp.json()["id"]
)
servers[1].manage(
"fullfacilitysync",
base_url=s2_url,
username="superuser",
password="password",
)
# create admin role and sync
resp = self.request_server(
servers[1],
"role",
data=self._data(collection=fac.id, user=alto_user.id, kind="admin"),
)
servers[1].manage(
"fullfacilitysync",
base_url=s2_url,
username="superuser",
password="password",
)
self.assertFalse(
Store.objects.using(s2_alias).get(id=resp.json()["id"]).deleted
)
# Change password for a user, check if you can log in on other device
self.request_server(
servers[1],
"facilityuser",
method="PATCH",
lookup=alto_user.id,
data=self._data(password="syncing"),
)
servers[1].manage(
"fullfacilitysync",
base_url=s0_url,
username="superuser",
password="password",
)
resp = self.request_server(
servers[0],
"session",
data=self._data(
username=alto_user.username, password="syncing", facility=fac.id
),
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
# sync in a circle node twice to ensure full consistency
for i in range(2):
for j in range(servers_len):
servers[j].manage(
"fullfacilitysync",
base_url=servers[(j + 1) % servers_len].base_url,
username="superuser",
password="password",
)
# assert that the data of specific models match up
for i in range(servers_len):
self.assertServerQuerysetEqual(
servers[i],
servers[(i + 1) % servers_len],
FacilityDataset.objects.using(servers[0].db_alias).first().id,
)
@multiple_kolibri_servers(5)
def test_chaos_sync(self, servers):
servers_len = len(servers)
# consistent state for all servers
servers[0].manage("generateuserdata", no_onboarding=True)
for i in range(servers_len - 1):
servers[i + 1].manage(
"fullfacilitysync",
base_url=servers[0].base_url,
username="superuser",
password="password",
)
# randomly create objects on two servers and sync with each other
for i in range(10):
if (i % 2) == 0:
self._create_objects(servers[2])
else:
self._create_objects(servers[4])
servers[2].manage(
"fullfacilitysync",
base_url=servers[4].base_url,
username="superuser",
password="password",
)
# sync in a circle node twice to ensure full consistency
for i in range(2):
for j in range(servers_len):
servers[j].manage(
"fullfacilitysync",
base_url=servers[(j + 1) % servers_len].base_url,
username="superuser",
password="password",
)
# assert that the data of specific models match up
for i in range(servers_len):
self.assertServerQuerysetEqual(
servers[i],
servers[(i + 1) % servers_len],
FacilityDataset.objects.using(servers[0].db_alias).first().id,
)
| |
#!/usr/bin/env python
# Copyright (C) 2014 Teague Sterling, Regents of the University of California
# From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/PartitionTable
from collections import OrderedDict
import itertools
import sys
from pprint import pprint
from sqlalchemy.schema import Table, Column, CreateTable
from sqlalchemy.sql.expression import Alias
from sqlalchemy.ext.compiler import compiles
from sqlalchemy import *
from sqlalchemy.schema import CreateTable
from sqlalchemy.orm import relationship, backref
from sqlalchemy.orm.session import Session
from sqlalchemy.ext.declarative import *
def replace(old, new):
replacer = PartitionReplacer(old.__table__).set_replacement(new.__table__)
old_mapper = old.__mapper__
new_mapper = new.__mapper__
def transform(original):
query = original.filter()
query.__dict__.update({
'_criterion': replacement_traverse(query.__dict__['_criterion'], {}, replacer),
'_from_obj': tuple(replacement_traverse(fo, {}, replacer) for fo in query.__dict__['_from_obj']),
'_join_entities': tuple(new_mapper if ent is old_mapper else ent for ent in query.__dict__['_join_entities']),
'_joinpoint': {k: new if v is old else v for k,v in query.__dict__['_joinpoint'].items()},
})
return query
return transform
def replace_table(old_table, new_table):
replacer = PartitionReplacer(old_table, new_table)
def transform(original):
query = original.filter() # Copy query
# Determine internal replacements
criterion = replacer.apply(query._criterion)
from_obj = tuple(replacer.apply(obj) for obj in query._from_obj)
# Apply replacements to internal query structure
query.__dict__.update(_criterion=criterion,
_from_obj=from_obj)
return query
return transform
def replace_entity(old_cls, new_cls):
old_table = old_cls.__table__
old_map = old_cls.__mapper__
new_table = new_cls.__table__
new_map = new_cls.__mapper__
def transform(original):
query = original.with_transformation(replace_table(old_table, new_table))
join_entities = tuple(new_map if ent is old_map else ent for ent in query._join_entities)
joinpoint = dict((k, new_cls if v is old_cls else v) for k, v in query._joinpoint.items())
# Apply replacements to internal query structure
query.__dict__.update(_join_entities=join_entities,
_joinpoint=joinpoint)
return query
return transform
class PartitionReplacer(object):
def __init__(self, search, replacement=None):
self.search = search
self.replacement = replacement
def set_replacement(self, replacement):
self.replacement = replacement
return self
def __call__(self, elem):
# Replace instances of columns
try:
table = elem.table
name = elem.name
if table is self.search:
replace = getattr(self.replacement.columns, name)
return replace
except AttributeError:
pass
# Replace instances of table
if elem is self.search:
return self.replacement
return None
def apply(self, target, options={}):
return replacement_traverse(target, options, self)
def _find_subelement_replacements(element, tables_with_new=True, parameters_with_values=True):
names = set()
if hasattr(element, 'params') and parameters_with_values:
parameters_with_values = False
for name, value in element.compile().params.items():
replacement = (":{}".format(name), " {!r} ".format(value))
names.add(replacement)
if len(names) == 0:
parameters_with_values = True # No parameters found
if hasattr(element, 'clauses'):
for clause in element.clauses:
names.update(_find_subelement_replacements(clause, tables_with_new, parameters_with_values))
if hasattr(element, 'left'):
names.update(_find_subelement_replacements(element.left, tables_with_new, parameters_with_values))
if hasattr(element, 'right'):
names.update(_find_subelement_replacements(element.right, tables_with_new, parameters_with_values))
if hasattr(element, 'table') and tables_with_new:
old = str(element.compile())
new = "NEW.{}".format(element.name)
names.add((old, new))
return names
class Partition(object):
"""Represents a 'table partition'."""
def mock_parent(self):
return PartitionAlias(self, __parenttable__.fullname)
class PartitionAlias(Alias):
def alias(self, name):
a = Alias(self, name)
a.original = self
return a
@compiles(CreateTable, 'postgresql')
def create_partition_table(create, compiler, **kwargs):
parent_table = getattr(create.element, '__parenttable__', None)
create_ddl = compiler.visit_create_table(create, **kwargs)
if parent_table is not None:
parent_name = parent_table.fullname
create_ddl = "{create} INHERITS ({parent})".format(create=create_ddl,
parent=parent_name)
return create_ddl
@compiles(PartitionAlias)
def visit_partition(element, compiler, **kw):
if kw.get('asfrom'):
return element.name
else:
return compiler.visit_alias(element)
def copy_model(source, new_table, new_class=None, new_bases=None):
if new_class is None:
new_class = new_table.capitalize()
if new_bases is None:
new_bases = source.__bases__
definition = []
definition.extend((col.name, col.copy()) for col in source.__table__.columns)
definition.append(('__tablename__', new_table))
copied_model = type(new_class, new_bases, dict(definition))
return copied_model
class Partitioned(object):
__partitionprefix__ = None
__partitioncolumn__ = None
__partitionconstraint__ = None
__generate_partitions__ = None
__partitions_loaded__ = False
__partitions__ = None
@classmethod
def get_partition(cls, partition_name, constraint=None, **definition):
try:
return cls.__partitions__[partition_name]
except KeyError:
return cls.define_partition(partition_name, constraint, **definition)
@classmethod
def define_partition(cls, partition_name, constraint=None, **definition):
if cls.__partitions__ is None:
cls.__partitions__ = OrderedDict()
if partition_name is cls.__partitions__:
raise ValueError("Cannot redefine partition: {}".format(partition_name))
if cls.__partitionprefix__ is not None:
prefix = cls.__partitionprefix__
elif cls.__partitioncolumn__ is not None:
prefix_column = cls.__partitioncolumn__
if callable(prefix_column):
prefix_column = prefix_column()
prefix = "{0}_{1}_".format(cls.__tablename__,
prefix_column.name)
else:
prefix = cls.__tablename__ + "_"
partition_table = prefix + partition_name
partition_cls = partition_table.capitalize()
# Make sure we don't attempt to redefine the parititon in metadata
if partition_table in cls.metadata.tables:
return cls._decl_class_registry[partition_cls]
bases = tuple(base for base in cls.__bases__ if base is not Partitioned) + (Partition,)
definition.update({
'__tablename__': partition_table,
'__parenttable__': cls.__table__,
})
# Defer
definition['__raw_partition_constraint__'] = constraint
partition = type(partition_cls, bases, definition)
cls.__partitions__[partition_name] = partition
if partition.__raw_partition_constraint__ is not None:
raw_constraint = partition.__raw_partition_constraint__
if hasattr(raw_constraint, '__call__'):
try:
raw_constraint = raw_constraint(partition)
except TypeError:
raw_constraint = raw_constraint.im_func(partition) # Force raw call
constraint_name = "cst_" + partition_table + "_partition"
constraint_clause = CheckConstraint(raw_constraint, name=constraint_name)
else:
constraint_clause = None
setattr(partition, '__partitionconstraint__', constraint_clause)
setattr(partition.__table__, '__parenttable__', partition.__parenttable__)
return partition
@classmethod
def partition_defined(cls, name):
if cls.__partitions__ is None:
cls.__partitions__ = OrderedDict()
return name in cls.__partitions__
@classmethod
def load_partitions(cls):
if getattr(cls, '__partitions__', None) is None:
cls.__partitions__ = OrderedDict()
if getattr(cls, '__generate_partitions__', None) is not None and not cls.__partitions_loaded__:
cls.__generate_partitions__()
cls.__partitions_loaded__ = True
@classmethod
def partitions(cls):
cls.load_partitions()
return cls.__partitions__.values()
# TODO: Replace with custom DDL hooks
@classmethod
def create_insert_trigger_ddl(cls):
cls.load_partitions()
if all(part.__partitionconstraint__ is None for part in cls.__partitions__.values()):
return None
parent_table = cls.__tablename__
function_name = parent_table + "_insert_function"
trigger_name = parent_table + "_insert_trigger"
trigger_start = """
CREATE OR REPLACE FUNCTION {fn}()
RETURNS TRIGGER AS $$
BEGIN
""".format(fn=function_name)
trigger_checks = []
first = True
for partition in cls.__partitions__.values():
if partition.__partitionconstraint__ is None:
continue
if first:
check_tpl = "IF ({test}) THEN"
first = False
else:
check_tpl = "ELSIF ({test}) THEN"
check_tpl += """
INSERT INTO {partition_name} VALUES (NEW.*);
"""
test_structure = partition.__partitionconstraint__.sqltext
replacements = sorted(_find_subelement_replacements(test_structure,
tables_with_new=True, # Cooerse table to NEW row
parameters_with_values=True), key=len) # Hardcode parameters
test = str(test_structure)
for old, new in replacements:
test = test.replace(old, new)
check = check_tpl.format(partition_name=partition.__tablename__,
test=test)
trigger_checks.append(check)
trigger_end = """
ELSE
RAISE EXCEPTION 'Insert error on {parent}. No child defined. Consider updating {fn}()';
END IF;
RETURN NULL;
END;
$$
LANGUAGE plpgsql;
CREATE TRIGGER {trigger}
BEFORE INSERT ON {parent}
FOR EACH ROW EXECUTE PROCEDURE {fn};
""".format(parent=parent_table,
trigger=trigger_name,
fn=function_name)
sql = trigger_start + "\n".join(trigger_checks) + trigger_end
return sql
def test():
metadata = MetaData()
# t1 = Table('sometable', metadata,
# Column('id', Integer, primary_key=True),
# Column('data', String(50))
# )
#
# print select([t1]).where(t1.c.data == 'foo')
#
# print
#
# t1_partition_a = Partition(t1, "partition_a")
# print select([t1_partition_a]).where(t1_partition_a.c.data=='foo')
#
# print
#
# t1_p_alias = t1_partition_a.alias()
# print select([t1_p_alias]).where(t1_p_alias.c.data=='foo')
#
# print "-" * 80
#
Base = declarative_base()
class FingerprintId(Base):
__tablename__ = 'fpid'
id = Column('fpid', Integer, primary_key=True)
substances = relationship('Substance', backref="fingerprints")
class Substance(Base):
__tablename__ = 'substance'
id = Column('sub_id', Integer, primary_key=True)
smiles = Column('smiles', String)
fingerprint = Column('fp', ForeignKey(FingerprintId.id))
class FingerprintTable(object):
__partitioncolumn__ = classmethod(lambda cls: cls.id)
@declared_attr
def id(cls):
return Column('fp_id', ForeignKey('fpid.fpid'), primary_key=True)
@declared_attr
def ecfp4(cls):
return Column('ecfp4_fp', String)
@declared_attr
def substances(cls):
return relationship(Substance,
secondary="fpid",
primaryjoin="{}.id==FingerprintId.id".format(cls.__name__),
secondaryjoin=FingerprintId.id==Substance.fingerprint)
class Fingerprint(Base, FingerprintTable, Partitioned):
__tablename__ = 'fingerprints'
test_parts = [(1, 500000), (500001, 1000000), (1000001, 150000)]
@classmethod
def __generate_partitions__(cls):
for low, high in cls.test_parts:
name = "{0}_{1}".format(low, high)
check = lambda cls: (cls.id >= low) & (cls.id < high)
part = cls.define_partition(name, constraint=check, ID_LOWER_BOUND=low, ID_UPPER_BOUND=high)
for part in Fingerprint.partitions():
print part.get_create_ddl()
print
session = Session()
for partition in Fingerprint.partitions():
q = session.query(Substance)\
.join(FingerprintId)\
.join(partition)\
.filter(partition.ecfp4 % 'bla')
print q
print
print Fingerprint.create_insert_trigger_ddl()
return Substance, FingerprintId, Fingerprint
if __name__ == '__main__':
test()
| |
#!/usr/sbin/python
class Font:
name = 'CP437'
def getCharSize(self, char): return 8
def getLetterSpace(self, char): return 0
def getChar(self, char):
return self.__glyph[ord(char)%256]
__glyph = [
[ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 ], # 0x00
[ 0x7E, 0x81, 0x95, 0xB1, 0xB1, 0x95, 0x81, 0x7E ], # 0x01
[ 0x7E, 0xFF, 0xEB, 0xCF, 0xCF, 0xEB, 0xFF, 0x7E ], # 0x02
[ 0x0E, 0x1F, 0x3F, 0x7E, 0x3F, 0x1F, 0x0E, 0x00 ], # 0x03
[ 0x08, 0x1C, 0x3E, 0x7F, 0x3E, 0x1C, 0x08, 0x00 ], # 0x04
[ 0x18, 0xBA, 0xFF, 0xFF, 0xFF, 0xBA, 0x18, 0x00 ], # 0x05
[ 0x10, 0xB8, 0xFC, 0xFF, 0xFC, 0xB8, 0x10, 0x00 ], # 0x06
[ 0x00, 0x00, 0x18, 0x3C, 0x3C, 0x18, 0x00, 0x00 ], # 0x07
[ 0xFF, 0xFF, 0xE7, 0xC3, 0xC3, 0xE7, 0xFF, 0xFF ], # 0x08
[ 0x00, 0x3C, 0x66, 0x42, 0x42, 0x66, 0x3C, 0x00 ], # 0x09
[ 0xFF, 0xC3, 0x99, 0xBD, 0xBD, 0x99, 0xC3, 0xFF ], # 0x0A
[ 0x70, 0xF8, 0x88, 0x88, 0xFD, 0x7F, 0x07, 0x0F ], # 0x0B
[ 0x00, 0x4E, 0x5F, 0xF1, 0xF1, 0x5F, 0x4E, 0x00 ], # 0x0C
[ 0xC0, 0xE0, 0xFF, 0x7F, 0x05, 0x05, 0x07, 0x07 ], # 0x0D
[ 0xC0, 0xFF, 0x7F, 0x05, 0x05, 0x65, 0x7F, 0x3F ], # 0x0E
[ 0x99, 0x5A, 0x3C, 0xE7, 0xE7, 0x3C, 0x5A, 0x99 ], # 0x0F
[ 0x7F, 0x3E, 0x3E, 0x1C, 0x1C, 0x08, 0x08, 0x00 ], # 0x10
[ 0x08, 0x08, 0x1C, 0x1C, 0x3E, 0x3E, 0x7F, 0x00 ], # 0x11
[ 0x00, 0x24, 0x66, 0xFF, 0xFF, 0x66, 0x24, 0x00 ], # 0x12
[ 0x00, 0x5F, 0x5F, 0x00, 0x00, 0x5F, 0x5F, 0x00 ], # 0x13
[ 0x06, 0x0F, 0x09, 0x7F, 0x7F, 0x01, 0x7F, 0x7F ], # 0x14
[ 0x40, 0xDA, 0xBF, 0xA5, 0xFD, 0x59, 0x03, 0x02 ], # 0x15
[ 0x00, 0x70, 0x70, 0x70, 0x70, 0x70, 0x70, 0x00 ], # 0x16
[ 0x80, 0x94, 0xB6, 0xFF, 0xFF, 0xB6, 0x94, 0x80 ], # 0x17
[ 0x00, 0x04, 0x06, 0x7F, 0x7F, 0x06, 0x04, 0x00 ], # 0x18
[ 0x00, 0x10, 0x30, 0x7F, 0x7F, 0x30, 0x10, 0x00 ], # 0x19
[ 0x08, 0x08, 0x08, 0x2A, 0x3E, 0x1C, 0x08, 0x00 ], # 0x1A
[ 0x08, 0x1C, 0x3E, 0x2A, 0x08, 0x08, 0x08, 0x00 ], # 0x1B
[ 0x3C, 0x3C, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00 ], # 0x1C
[ 0x08, 0x1C, 0x3E, 0x08, 0x08, 0x3E, 0x1C, 0x08 ], # 0x1D
[ 0x30, 0x38, 0x3C, 0x3E, 0x3E, 0x3C, 0x38, 0x30 ], # 0x1E
[ 0x06, 0x0E, 0x1E, 0x3E, 0x3E, 0x1E, 0x0E, 0x06 ], # 0x1F
[ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 ], # ' '
[ 0x00, 0x06, 0x5F, 0x5F, 0x06, 0x00, 0x00, 0x00 ], # '!'
[ 0x00, 0x07, 0x07, 0x00, 0x07, 0x07, 0x00, 0x00 ], # '"'
[ 0x14, 0x7F, 0x7F, 0x14, 0x7F, 0x7F, 0x14, 0x00 ], # '#'
[ 0x24, 0x2E, 0x6B, 0x6B, 0x3A, 0x12, 0x00, 0x00 ], # '$'
[ 0x46, 0x66, 0x30, 0x18, 0x0C, 0x66, 0x62, 0x00 ], # '%'
[ 0x30, 0x7A, 0x4F, 0x5D, 0x37, 0x7A, 0x48, 0x00 ], # '&'
[ 0x04, 0x07, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00 ], # '''
[ 0x00, 0x1C, 0x3E, 0x63, 0x41, 0x00, 0x00, 0x00 ], # '('
[ 0x00, 0x41, 0x63, 0x3E, 0x1C, 0x00, 0x00, 0x00 ], # ')'
[ 0x08, 0x2A, 0x3E, 0x1C, 0x1C, 0x3E, 0x2A, 0x08 ], # '*'
[ 0x08, 0x08, 0x3E, 0x3E, 0x08, 0x08, 0x00, 0x00 ], # '+'
[ 0x00, 0x80, 0xE0, 0x60, 0x00, 0x00, 0x00, 0x00 ], # ','
[ 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x00, 0x00 ], # '-'
[ 0x00, 0x00, 0x60, 0x60, 0x00, 0x00, 0x00, 0x00 ], # '.'
[ 0x60, 0x30, 0x18, 0x0C, 0x06, 0x03, 0x01, 0x00 ], # '/'
[ 0x3E, 0x7F, 0x71, 0x59, 0x4D, 0x7F, 0x3E, 0x00 ], # '0'
[ 0x40, 0x42, 0x7F, 0x7F, 0x40, 0x40, 0x00, 0x00 ], # '1'
[ 0x62, 0x73, 0x59, 0x49, 0x6F, 0x66, 0x00, 0x00 ], # '2'
[ 0x22, 0x63, 0x49, 0x49, 0x7F, 0x36, 0x00, 0x00 ], # '3'
[ 0x18, 0x1C, 0x16, 0x53, 0x7F, 0x7F, 0x50, 0x00 ], # '4'
[ 0x27, 0x67, 0x45, 0x45, 0x7D, 0x39, 0x00, 0x00 ], # '5'
[ 0x3C, 0x7E, 0x4B, 0x49, 0x79, 0x30, 0x00, 0x00 ], # '6'
[ 0x03, 0x03, 0x71, 0x79, 0x0F, 0x07, 0x00, 0x00 ], # '7'
[ 0x36, 0x7F, 0x49, 0x49, 0x7F, 0x36, 0x00, 0x00 ], # '8'
[ 0x06, 0x4F, 0x49, 0x69, 0x3F, 0x1E, 0x00, 0x00 ], # '9'
[ 0x00, 0x00, 0x66, 0x66, 0x00, 0x00, 0x00, 0x00 ], # ':'
[ 0x00, 0x80, 0xE6, 0x66, 0x00, 0x00, 0x00, 0x00 ], # ';'
[ 0x08, 0x1C, 0x36, 0x63, 0x41, 0x00, 0x00, 0x00 ], # '<'
[ 0x24, 0x24, 0x24, 0x24, 0x24, 0x24, 0x00, 0x00 ], # '='
[ 0x00, 0x41, 0x63, 0x36, 0x1C, 0x08, 0x00, 0x00 ], # '>'
[ 0x02, 0x03, 0x51, 0x59, 0x0F, 0x06, 0x00, 0x00 ], # '?'
[ 0x3E, 0x7F, 0x41, 0x5D, 0x5D, 0x1F, 0x1E, 0x00 ], # '@'
[ 0x7C, 0x7E, 0x13, 0x13, 0x7E, 0x7C, 0x00, 0x00 ], # 'A'
[ 0x41, 0x7F, 0x7F, 0x49, 0x49, 0x7F, 0x36, 0x00 ], # 'B'
[ 0x1C, 0x3E, 0x63, 0x41, 0x41, 0x63, 0x22, 0x00 ], # 'C'
[ 0x41, 0x7F, 0x7F, 0x41, 0x63, 0x3E, 0x1C, 0x00 ], # 'D'
[ 0x41, 0x7F, 0x7F, 0x49, 0x5D, 0x41, 0x63, 0x00 ], # 'E'
[ 0x41, 0x7F, 0x7F, 0x49, 0x1D, 0x01, 0x03, 0x00 ], # 'F'
[ 0x1C, 0x3E, 0x63, 0x41, 0x51, 0x73, 0x72, 0x00 ], # 'G'
[ 0x7F, 0x7F, 0x08, 0x08, 0x7F, 0x7F, 0x00, 0x00 ], # 'H'
[ 0x00, 0x41, 0x7F, 0x7F, 0x41, 0x00, 0x00, 0x00 ], # 'I'
[ 0x30, 0x70, 0x40, 0x41, 0x7F, 0x3F, 0x01, 0x00 ], # 'J'
[ 0x41, 0x7F, 0x7F, 0x08, 0x1C, 0x77, 0x63, 0x00 ], # 'K'
[ 0x41, 0x7F, 0x7F, 0x41, 0x40, 0x60, 0x70, 0x00 ], # 'L'
[ 0x7F, 0x7F, 0x0E, 0x1C, 0x0E, 0x7F, 0x7F, 0x00 ], # 'M'
[ 0x7F, 0x7F, 0x06, 0x0C, 0x18, 0x7F, 0x7F, 0x00 ], # 'N'
[ 0x1C, 0x3E, 0x63, 0x41, 0x63, 0x3E, 0x1C, 0x00 ], # 'O'
[ 0x41, 0x7F, 0x7F, 0x49, 0x09, 0x0F, 0x06, 0x00 ], # 'P'
[ 0x1E, 0x3F, 0x21, 0x71, 0x7F, 0x5E, 0x00, 0x00 ], # 'Q'
[ 0x41, 0x7F, 0x7F, 0x09, 0x19, 0x7F, 0x66, 0x00 ], # 'R'
[ 0x26, 0x6F, 0x4D, 0x59, 0x73, 0x32, 0x00, 0x00 ], # 'S'
[ 0x03, 0x41, 0x7F, 0x7F, 0x41, 0x03, 0x00, 0x00 ], # 'T'
[ 0x7F, 0x7F, 0x40, 0x40, 0x7F, 0x7F, 0x00, 0x00 ], # 'U'
[ 0x1F, 0x3F, 0x60, 0x60, 0x3F, 0x1F, 0x00, 0x00 ], # 'V'
[ 0x7F, 0x7F, 0x30, 0x18, 0x30, 0x7F, 0x7F, 0x00 ], # 'W'
[ 0x43, 0x67, 0x3C, 0x18, 0x3C, 0x67, 0x43, 0x00 ], # 'X'
[ 0x07, 0x4F, 0x78, 0x78, 0x4F, 0x07, 0x00, 0x00 ], # 'Y'
[ 0x47, 0x63, 0x71, 0x59, 0x4D, 0x67, 0x73, 0x00 ], # 'Z'
[ 0x00, 0x7F, 0x7F, 0x41, 0x41, 0x00, 0x00, 0x00 ], # '['
[ 0x01, 0x03, 0x06, 0x0C, 0x18, 0x30, 0x60, 0x00 ], # backslash
[ 0x00, 0x41, 0x41, 0x7F, 0x7F, 0x00, 0x00, 0x00 ], # ']'
[ 0x08, 0x0C, 0x06, 0x03, 0x06, 0x0C, 0x08, 0x00 ], # '^'
[ 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80 ], # '_'
[ 0x00, 0x00, 0x03, 0x07, 0x04, 0x00, 0x00, 0x00 ], # '`'
[ 0x20, 0x74, 0x54, 0x54, 0x3C, 0x78, 0x40, 0x00 ], # 'a'
[ 0x41, 0x7F, 0x3F, 0x48, 0x48, 0x78, 0x30, 0x00 ], # 'b'
[ 0x38, 0x7C, 0x44, 0x44, 0x6C, 0x28, 0x00, 0x00 ], # 'c'
[ 0x30, 0x78, 0x48, 0x49, 0x3F, 0x7F, 0x40, 0x00 ], # 'd'
[ 0x38, 0x7C, 0x54, 0x54, 0x5C, 0x18, 0x00, 0x00 ], # 'e'
[ 0x48, 0x7E, 0x7F, 0x49, 0x03, 0x02, 0x00, 0x00 ], # 'f'
[ 0x98, 0xBC, 0xA4, 0xA4, 0xF8, 0x7C, 0x04, 0x00 ], # 'g'
[ 0x41, 0x7F, 0x7F, 0x08, 0x04, 0x7C, 0x78, 0x00 ], # 'h'
[ 0x00, 0x44, 0x7D, 0x7D, 0x40, 0x00, 0x00, 0x00 ], # 'i'
[ 0x60, 0xE0, 0x80, 0x80, 0xFD, 0x7D, 0x00, 0x00 ], # 'j'
[ 0x41, 0x7F, 0x7F, 0x10, 0x38, 0x6C, 0x44, 0x00 ], # 'k'
[ 0x00, 0x41, 0x7F, 0x7F, 0x40, 0x00, 0x00, 0x00 ], # 'l'
[ 0x7C, 0x7C, 0x18, 0x38, 0x1C, 0x7C, 0x78, 0x00 ], # 'm'
[ 0x7C, 0x7C, 0x04, 0x04, 0x7C, 0x78, 0x00, 0x00 ], # 'n'
[ 0x38, 0x7C, 0x44, 0x44, 0x7C, 0x38, 0x00, 0x00 ], # 'o'
[ 0x84, 0xFC, 0xF8, 0xA4, 0x24, 0x3C, 0x18, 0x00 ], # 'p'
[ 0x18, 0x3C, 0x24, 0xA4, 0xF8, 0xFC, 0x84, 0x00 ], # 'q'
[ 0x44, 0x7C, 0x78, 0x4C, 0x04, 0x1C, 0x18, 0x00 ], # 'r'
[ 0x48, 0x5C, 0x54, 0x54, 0x74, 0x24, 0x00, 0x00 ], # 's'
[ 0x00, 0x04, 0x3E, 0x7F, 0x44, 0x24, 0x00, 0x00 ], # 't'
[ 0x3C, 0x7C, 0x40, 0x40, 0x3C, 0x7C, 0x40, 0x00 ], # 'u'
[ 0x1C, 0x3C, 0x60, 0x60, 0x3C, 0x1C, 0x00, 0x00 ], # 'v'
[ 0x3C, 0x7C, 0x70, 0x38, 0x70, 0x7C, 0x3C, 0x00 ], # 'w'
[ 0x44, 0x6C, 0x38, 0x10, 0x38, 0x6C, 0x44, 0x00 ], # 'x'
[ 0x9C, 0xBC, 0xA0, 0xA0, 0xFC, 0x7C, 0x00, 0x00 ], # 'y'
[ 0x4C, 0x64, 0x74, 0x5C, 0x4C, 0x64, 0x00, 0x00 ], # 'z'
[ 0x08, 0x08, 0x3E, 0x77, 0x41, 0x41, 0x00, 0x00 ], # '{'
[ 0x00, 0x00, 0x00, 0x77, 0x77, 0x00, 0x00, 0x00 ], # '|'
[ 0x41, 0x41, 0x77, 0x3E, 0x08, 0x08, 0x00, 0x00 ], # '}'
[ 0x02, 0x03, 0x01, 0x03, 0x02, 0x03, 0x01, 0x00 ], # '~'
[ 0x70, 0x78, 0x4C, 0x46, 0x4C, 0x78, 0x70, 0x00 ], # 0x7F
[ 0x0E, 0x9F, 0x91, 0xB1, 0xFB, 0x4A, 0x00, 0x00 ], # 0x80
[ 0x3A, 0x7A, 0x40, 0x40, 0x7A, 0x7A, 0x40, 0x00 ], # 0x81
[ 0x38, 0x7C, 0x54, 0x55, 0x5D, 0x19, 0x00, 0x00 ], # 0x82
[ 0x02, 0x23, 0x75, 0x55, 0x55, 0x7D, 0x7B, 0x42 ], # 0x83
[ 0x21, 0x75, 0x54, 0x54, 0x7D, 0x79, 0x40, 0x00 ], # 0x84
[ 0x21, 0x75, 0x55, 0x54, 0x7C, 0x78, 0x40, 0x00 ], # 0x85
[ 0x20, 0x74, 0x57, 0x57, 0x7C, 0x78, 0x40, 0x00 ], # 0x86
[ 0x18, 0x3C, 0xA4, 0xA4, 0xE4, 0x40, 0x00, 0x00 ], # 0x87
[ 0x02, 0x3B, 0x7D, 0x55, 0x55, 0x5D, 0x1B, 0x02 ], # 0x88
[ 0x39, 0x7D, 0x54, 0x54, 0x5D, 0x19, 0x00, 0x00 ], # 0x89
[ 0x39, 0x7D, 0x55, 0x54, 0x5C, 0x18, 0x00, 0x00 ], # 0x8A
[ 0x01, 0x45, 0x7C, 0x7C, 0x41, 0x01, 0x00, 0x00 ], # 0x8B
[ 0x02, 0x03, 0x45, 0x7D, 0x7D, 0x43, 0x02, 0x00 ], # 0x8C
[ 0x01, 0x45, 0x7D, 0x7C, 0x40, 0x00, 0x00, 0x00 ], # 0x8D
[ 0x79, 0x7D, 0x16, 0x12, 0x16, 0x7D, 0x79, 0x00 ], # 0x8E
[ 0x70, 0x78, 0x2B, 0x2B, 0x78, 0x70, 0x00, 0x00 ], # 0x8F
[ 0x44, 0x7C, 0x7C, 0x55, 0x55, 0x45, 0x00, 0x00 ], # 0x90
[ 0x20, 0x74, 0x54, 0x54, 0x7C, 0x7C, 0x54, 0x54 ], # 0x91
[ 0x7C, 0x7E, 0x0B, 0x09, 0x7F, 0x7F, 0x49, 0x00 ], # 0x92
[ 0x32, 0x7B, 0x49, 0x49, 0x7B, 0x32, 0x00, 0x00 ], # 0x93
[ 0x32, 0x7A, 0x48, 0x48, 0x7A, 0x32, 0x00, 0x00 ], # 0x94
[ 0x32, 0x7A, 0x4A, 0x48, 0x78, 0x30, 0x00, 0x00 ], # 0x95
[ 0x3A, 0x7B, 0x41, 0x41, 0x7B, 0x7A, 0x40, 0x00 ], # 0x96
[ 0x3A, 0x7A, 0x42, 0x40, 0x78, 0x78, 0x40, 0x00 ], # 0x97
[ 0x9A, 0xBA, 0xA0, 0xA0, 0xFA, 0x7A, 0x00, 0x00 ], # 0x98
[ 0x01, 0x19, 0x3C, 0x66, 0x66, 0x3C, 0x19, 0x01 ], # 0x99
[ 0x3D, 0x7D, 0x40, 0x40, 0x7D, 0x3D, 0x00, 0x00 ], # 0x9A
[ 0x18, 0x3C, 0x24, 0xE7, 0xE7, 0x24, 0x24, 0x00 ], # 0x9B
[ 0x68, 0x7E, 0x7F, 0x49, 0x43, 0x66, 0x20, 0x00 ], # 0x9C
[ 0x2B, 0x2F, 0xFC, 0xFC, 0x2F, 0x2B, 0x00, 0x00 ], # 0x9D
[ 0xFF, 0xFF, 0x09, 0x09, 0x2F, 0xF6, 0xF8, 0xA0 ], # 0x9E
[ 0x40, 0xC0, 0x88, 0xFE, 0x7F, 0x09, 0x03, 0x02 ], # 0x9F
[ 0x20, 0x74, 0x54, 0x55, 0x7D, 0x79, 0x40, 0x00 ], # 0xA0
[ 0x00, 0x44, 0x7D, 0x7D, 0x41, 0x00, 0x00, 0x00 ], # 0xA1
[ 0x30, 0x78, 0x48, 0x4A, 0x7A, 0x32, 0x00, 0x00 ], # 0xA2
[ 0x38, 0x78, 0x40, 0x42, 0x7A, 0x7A, 0x40, 0x00 ], # 0xA3
[ 0x7A, 0x7A, 0x0A, 0x0A, 0x7A, 0x70, 0x00, 0x00 ], # 0xA4
[ 0x7D, 0x7D, 0x19, 0x31, 0x7D, 0x7D, 0x00, 0x00 ], # 0xA5
[ 0x00, 0x26, 0x2F, 0x29, 0x2F, 0x2F, 0x28, 0x00 ], # 0xA6
[ 0x00, 0x26, 0x2F, 0x29, 0x2F, 0x26, 0x00, 0x00 ], # 0xA7
[ 0x30, 0x78, 0x4D, 0x45, 0x60, 0x20, 0x00, 0x00 ], # 0xA8
[ 0x38, 0x38, 0x08, 0x08, 0x08, 0x08, 0x00, 0x00 ], # 0xA9
[ 0x08, 0x08, 0x08, 0x08, 0x38, 0x38, 0x00, 0x00 ], # 0xAA
[ 0x4F, 0x6F, 0x30, 0x18, 0xCC, 0xEE, 0xBB, 0x91 ], # 0xAB
[ 0x4F, 0x6F, 0x30, 0x18, 0x6C, 0x76, 0xFB, 0xF9 ], # 0xAC
[ 0x00, 0x00, 0x00, 0x7B, 0x7B, 0x00, 0x00, 0x00 ], # 0xAD
[ 0x08, 0x1C, 0x36, 0x22, 0x08, 0x1C, 0x36, 0x22 ], # 0xAE
[ 0x22, 0x36, 0x1C, 0x08, 0x22, 0x36, 0x1C, 0x08 ], # 0xAF
[ 0xAA, 0x00, 0x55, 0x00, 0xAA, 0x00, 0x55, 0x00 ], # 0xB0
[ 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55 ], # 0xB1
[ 0xDD, 0xFF, 0xAA, 0x77, 0xDD, 0xAA, 0xFF, 0x77 ], # 0xB2
[ 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x00, 0x00, 0x00 ], # 0xB3
[ 0x10, 0x10, 0x10, 0xFF, 0xFF, 0x00, 0x00, 0x00 ], # 0xB4
[ 0x14, 0x14, 0x14, 0xFF, 0xFF, 0x00, 0x00, 0x00 ], # 0xB5
[ 0x10, 0x10, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0x00 ], # 0xB6
[ 0x10, 0x10, 0xF0, 0xF0, 0x10, 0xF0, 0xF0, 0x00 ], # 0xB7
[ 0x14, 0x14, 0x14, 0xFC, 0xFC, 0x00, 0x00, 0x00 ], # 0xB8
[ 0x14, 0x14, 0xF7, 0xF7, 0x00, 0xFF, 0xFF, 0x00 ], # 0xB9
[ 0x00, 0x00, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0x00 ], # 0xBA
[ 0x14, 0x14, 0xF4, 0xF4, 0x04, 0xFC, 0xFC, 0x00 ], # 0xBB
[ 0x14, 0x14, 0x17, 0x17, 0x10, 0x1F, 0x1F, 0x00 ], # 0xBC
[ 0x10, 0x10, 0x1F, 0x1F, 0x10, 0x1F, 0x1F, 0x00 ], # 0xBD
[ 0x14, 0x14, 0x14, 0x1F, 0x1F, 0x00, 0x00, 0x00 ], # 0xBE
[ 0x10, 0x10, 0x10, 0xF0, 0xF0, 0x00, 0x00, 0x00 ], # 0xBF
[ 0x00, 0x00, 0x00, 0x1F, 0x1F, 0x10, 0x10, 0x10 ], # 0xC0
[ 0x10, 0x10, 0x10, 0x1F, 0x1F, 0x10, 0x10, 0x10 ], # 0xC1
[ 0x10, 0x10, 0x10, 0xF0, 0xF0, 0x10, 0x10, 0x10 ], # 0xC2
[ 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x10, 0x10, 0x10 ], # 0xC3
[ 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10, 0x10 ], # 0xC4
[ 0x10, 0x10, 0x10, 0xFF, 0xFF, 0x10, 0x10, 0x10 ], # 0xC5
[ 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x14, 0x14, 0x14 ], # 0xC6
[ 0x00, 0x00, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0x10 ], # 0xC7
[ 0x00, 0x00, 0x1F, 0x1F, 0x10, 0x17, 0x17, 0x14 ], # 0xC8
[ 0x00, 0x00, 0xFC, 0xFC, 0x04, 0xF4, 0xF4, 0x14 ], # 0xC9
[ 0x14, 0x14, 0x17, 0x17, 0x10, 0x17, 0x17, 0x14 ], # 0xCA
[ 0x14, 0x14, 0xF4, 0xF4, 0x04, 0xF4, 0xF4, 0x14 ], # 0xCB
[ 0x00, 0x00, 0xFF, 0xFF, 0x00, 0xF7, 0xF7, 0x14 ], # 0xCC
[ 0x14, 0x14, 0x14, 0x14, 0x14, 0x14, 0x14, 0x14 ], # 0xCD
[ 0x14, 0x14, 0xF7, 0xF7, 0x00, 0xF7, 0xF7, 0x14 ], # 0xCE
[ 0x14, 0x14, 0x14, 0x17, 0x17, 0x14, 0x14, 0x14 ], # 0xCF
[ 0x10, 0x10, 0x1F, 0x1F, 0x10, 0x1F, 0x1F, 0x10 ], # 0xD0
[ 0x14, 0x14, 0x14, 0xF4, 0xF4, 0x14, 0x14, 0x14 ], # 0xD1
[ 0x10, 0x10, 0xF0, 0xF0, 0x10, 0xF0, 0xF0, 0x10 ], # 0xD2
[ 0x00, 0x00, 0x1F, 0x1F, 0x10, 0x1F, 0x1F, 0x10 ], # 0xD3
[ 0x00, 0x00, 0x00, 0x1F, 0x1F, 0x14, 0x14, 0x14 ], # 0xD4
[ 0x00, 0x00, 0x00, 0xFC, 0xFC, 0x14, 0x14, 0x14 ], # 0xD5
[ 0x00, 0x00, 0xF0, 0xF0, 0x10, 0xF0, 0xF0, 0x10 ], # 0xD6
[ 0x10, 0x10, 0xFF, 0xFF, 0x10, 0xFF, 0xFF, 0x10 ], # 0xD7
[ 0x14, 0x14, 0x14, 0xFF, 0xFF, 0x14, 0x14, 0x14 ], # 0xD8
[ 0x10, 0x10, 0x10, 0x1F, 0x1F, 0x00, 0x00, 0x00 ], # 0xD9
[ 0x00, 0x00, 0x00, 0xF0, 0xF0, 0x10, 0x10, 0x10 ], # 0xDA
[ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF ], # 0xDB
[ 0xF0, 0xF0, 0xF0, 0xF0, 0xF0, 0xF0, 0xF0, 0xF0 ], # 0xDC
[ 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00 ], # 0xDD
[ 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF ], # 0xDE
[ 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F, 0x0F ], # 0xDF
[ 0x38, 0x7C, 0x44, 0x6C, 0x38, 0x6C, 0x44, 0x00 ], # 0xE0
[ 0xFC, 0xFE, 0x2A, 0x2A, 0x3E, 0x14, 0x00, 0x00 ], # 0xE1
[ 0x7E, 0x7E, 0x02, 0x02, 0x06, 0x06, 0x00, 0x00 ], # 0xE2
[ 0x02, 0x7E, 0x7E, 0x02, 0x7E, 0x7E, 0x02, 0x00 ], # 0xE3
[ 0x63, 0x77, 0x5D, 0x49, 0x63, 0x63, 0x00, 0x00 ], # 0xE4
[ 0x38, 0x7C, 0x44, 0x7C, 0x3C, 0x04, 0x04, 0x00 ], # 0xE5
[ 0x80, 0xFE, 0x7E, 0x20, 0x20, 0x3E, 0x1E, 0x00 ], # 0xE6
[ 0x04, 0x06, 0x02, 0x7E, 0x7C, 0x06, 0x02, 0x00 ], # 0xE7
[ 0x99, 0xBD, 0xE7, 0xE7, 0xBD, 0x99, 0x00, 0x00 ], # 0xE8
[ 0x1C, 0x3E, 0x6B, 0x49, 0x6B, 0x3E, 0x1C, 0x00 ], # 0xE9
[ 0x4C, 0x7E, 0x73, 0x01, 0x73, 0x7E, 0x4C, 0x00 ], # 0xEA
[ 0x30, 0x78, 0x4A, 0x4F, 0x7D, 0x39, 0x00, 0x00 ], # 0xEB
[ 0x18, 0x3C, 0x24, 0x3C, 0x3C, 0x24, 0x3C, 0x18 ], # 0xEC
[ 0x98, 0xFC, 0x64, 0x3C, 0x3E, 0x27, 0x3D, 0x18 ], # 0xED
[ 0x1C, 0x3E, 0x6B, 0x49, 0x49, 0x00, 0x00, 0x00 ], # 0xEE
[ 0x7E, 0x7F, 0x01, 0x01, 0x7F, 0x7E, 0x00, 0x00 ], # 0xEF
[ 0x2A, 0x2A, 0x2A, 0x2A, 0x2A, 0x2A, 0x00, 0x00 ], # 0xF0
[ 0x44, 0x44, 0x5F, 0x5F, 0x44, 0x44, 0x00, 0x00 ], # 0xF1
[ 0x40, 0x51, 0x5B, 0x4E, 0x44, 0x40, 0x00, 0x00 ], # 0xF2
[ 0x40, 0x44, 0x4E, 0x5B, 0x51, 0x40, 0x00, 0x00 ], # 0xF3
[ 0x00, 0x00, 0x00, 0xFE, 0xFF, 0x01, 0x07, 0x06 ], # 0xF4
[ 0x60, 0xE0, 0x80, 0xFF, 0x7F, 0x00, 0x00, 0x00 ], # 0xF5
[ 0x08, 0x08, 0x6B, 0x6B, 0x08, 0x08, 0x00, 0x00 ], # 0xF6
[ 0x24, 0x36, 0x12, 0x36, 0x24, 0x36, 0x12, 0x00 ], # 0xF7
[ 0x00, 0x06, 0x0F, 0x09, 0x0F, 0x06, 0x00, 0x00 ], # 0xF8
[ 0x00, 0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x00 ], # 0xF9
[ 0x00, 0x00, 0x00, 0x10, 0x10, 0x00, 0x00, 0x00 ], # 0xFA
[ 0x10, 0x30, 0x70, 0xC0, 0xFF, 0xFF, 0x01, 0x01 ], # 0xFB
[ 0x00, 0x1F, 0x1F, 0x01, 0x1F, 0x1E, 0x00, 0x00 ], # 0xFC
[ 0x00, 0x19, 0x1D, 0x17, 0x12, 0x00, 0x00, 0x00 ], # 0xFD
[ 0x00, 0x00, 0x3C, 0x3C, 0x3C, 0x3C, 0x00, 0x00 ], # 0xFE
[ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 ], # 0xFF
];
| |
import sys
import os
import bpy
import re
import shutil
import mathutils
from math import radians
from ..rfb_utils import filepath_utils
from ..rfb_utils.envconfig_utils import envconfig
from ..rfb_utils import string_utils
from ..rfb_utils import shadergraph_utils
from ..rfb_utils import object_utils
from ..rfb_utils import transform_utils
from ..rfb_utils import texture_utils
from ..rfb_utils import color_manager_blender as clr_mgr
from ..rfb_utils.prefs_utils import get_pref, get_addon_prefs
from ..rfb_utils.property_utils import __GAINS_TO_ENABLE__, __LOBES_ENABLE_PARAMS__, is_vstruct_and_linked, BlPropInfo
from ..rfb_logger import rfb_log
from ..rman_bl_nodes import __BL_NODES_MAP__, __RMAN_NODE_TYPES__
from ..rman_constants import RMAN_STYLIZED_FILTERS, RFB_FLOAT3, CYCLES_NODE_MAP, RMAN_SUPPORTED_VERSION_STRING
from ..rfb_utils.shadergraph_utils import RmanConvertNode
import rman_utils.rman_assets.lib as ral
from rman_utils.rman_assets.core import RmanAsset, FilePath
from rman_utils.rman_assets.core import TrMode, TrStorage, TrSpace, TrType
from rman_utils.rman_assets.common.external_files import ExternalFile
__BLENDER_PRESETS_HOST_PREFS__ = None
def get_host_prefs():
global __BLENDER_PRESETS_HOST_PREFS__
if not __BLENDER_PRESETS_HOST_PREFS__:
__BLENDER_PRESETS_HOST_PREFS__ = BlenderHostPrefs()
__BLENDER_PRESETS_HOST_PREFS__.initConfig()
# restore the last library selection
try:
__BLENDER_PRESETS_HOST_PREFS__.cfg.setCurrentLibraryByPath(
__BLENDER_PRESETS_HOST_PREFS__.getSelectedLibrary())
except BaseException:
# the last library selected by the client app can not be found.
# we fallback to the first available library and update the
# client's prefs.
__BLENDER_PRESETS_HOST_PREFS__.cfg.setCurrentLibraryByName(None)
__BLENDER_PRESETS_HOST_PREFS__.setSelectedLibrary(
__BLENDER_PRESETS_HOST_PREFS__.cfg.getCurrentLibraryPath())
return __BLENDER_PRESETS_HOST_PREFS__
##
# @brief Exception class to tell the world about our miserable failings.
#
class RmanAssetBlenderError(Exception):
def __init__(self, value):
self.value = "RmanAssetBlender Error: %s" % value
def __str__(self):
return repr(self.value)
def default_label_from_file_name(filename):
# print filename
lbl = os.path.splitext(os.path.basename(filename))[0]
# print lbl
lbl = re.sub('([A-Z]+)', r' \1', lbl)
# print lbl
lbl = lbl.replace('_', ' ')
return lbl.strip().capitalize()
def asset_name_from_label(label):
"""Builds a filename from the asset label string.
Args:
- label (str): User-friendly label
Returns:
- the asset file name
"""
assetDir = re.sub(r'[^\w]', '', re.sub(' ', '_', label)) + '.rma'
return assetDir
class BlenderProgress:
def __init__(self,):
self._val = -1
self._pbar = bpy.context.window_manager
# print 'Progress init: using %s' % self._pbar
def Start(self):
self._pbar.progress_begin(0,100)
def Update(self, val, msg=None):
self._pbar.progress_update(val)
def End(self):
self._pbar.progress_end()
class BlenderHostPrefs(ral.HostPrefs):
def __init__(self):
super(BlenderHostPrefs, self).__init__(RMAN_SUPPORTED_VERSION_STRING)
self.debug = False
# === Library Prefs ===
#
self.rpbConfigFile = FilePath(self.getHostPref('rpbConfigFile', u''))
# the list of user libraries from Maya prefs
self.rpbUserLibraries = self.getHostPref('rpbUserLibraries', [])
# We don't initialize the library configuration just yet. We want
# to do it only once the prefs objects has been fully contructed.
# This is currently done in rman_assets.ui.Ui.__init__()
self.cfg = None
# === UI Prefs ===
#
# our prefered swatch size in the UI.
self.rpbSwatchSize = self.getHostPref('rpbSwatchSize', 64)
# the last selected preview type
self.rpbSelectedPreviewEnv = self.getHostPref(
'rpbSelectedPreviewEnv', 0)
# the last selected category
self.rpbSelectedCategory = self.getHostPref('rpbSelectedCategory', u'')
# the last selected preset
self.rpbSelectedPreset = self.getHostPref('rpbSelectedPreset', u'')
# the last selected library
self.rpbSelectedLibrary = FilePath(self.getHostPref(
'rpbSelectedLibrary', u''))
storagePrefs = (
('rpbStorageMode', 0),
('rpbStorageKey', ''),
('rpbStoragePath', ''),
('rpbConvertToTex', 1))
for name, default in storagePrefs:
setattr(self, name, self.getHostPref(name, default))
# store these to make sure we render the previews with the same version.
self.hostTree = os.environ.get('RFMTREE', '')
self.rmanTree = os.environ.get('RMANTREE', '')
# === User Prefs ===
#
# render all HDR environments ?
self.rpbRenderAllHDRs = self.getHostPref('rpbRenderAllHDRs', 0)
self.rpbHideFactoryLib = self.getHostPref('rpbHideFactoryLib', 0)
self._nodesToExport = dict()
self.renderman_output_node = None
self.blender_material = None
self.bl_world = None
self.progress = BlenderProgress()
def getHostPref(self, prefName, defaultValue): # pylint: disable=unused-argument
if prefName == 'rpbUserLibraries':
val = list()
prefs = get_addon_prefs()
for p in prefs.rpbUserLibraries:
path = p.path
if path.endswith('/'):
path = path[:-1]
if path not in val:
val.append(path)
else:
val = get_pref(pref_name=prefName, default=defaultValue)
return val
def setHostPref(self, prefName, value): # pylint: disable=unused-argument
"""Save the given value in the host's preferences.
First look at the value's type and call the matching host API.
Args:
prefName (str): The class attribute name for that pref.
value (any): The value we should pass to the delegate.
Raises:
RmanAssetLibError: If we don't support the given data type.
"""
isArray = isinstance(value, list)
tvalue = value
prefs = get_addon_prefs()
if isArray:
tvalue = value[0]
if isinstance(tvalue, int):
if isArray:
pass
else:
setattr(prefs, prefName, value)
elif isinstance(tvalue, str):
if isArray:
if prefName == 'rpbUserLibraries':
prefs = get_addon_prefs()
prefs.rpbUserLibraries.clear()
for val in list(dict.fromkeys(value)):
if not os.path.exists(val):
continue
p = prefs.rpbUserLibraries.add()
p.path = val
else:
setattr(prefs, prefName, value)
else:
arrayStr = ''
if isArray:
arrayStr = ' array'
msg = ('HostPrefs.setHostPref: %s%s NOT supported !' %
(type(tvalue), arrayStr))
pass
bpy.ops.wm.save_userpref()
def saveAllPrefs(self):
self.setHostPref('rpbUserLibraries', self.rpbUserLibraries)
self.setHostPref('rpbSelectedLibrary', self.rpbSelectedLibrary)
self.setHostPref('rpbSelectedCategory', self.rpbSelectedCategory)
self.setHostPref('rpbSelectedPreset', self.rpbSelectedPreset)
self.setHostPref('rpbStorageMode', self.rpbStorageMode)
self.setHostPref('rpbStorageKey', self.rpbStorageKey)
self.setHostPref('rpbStoragePath', self.rpbStoragePath)
self.setHostPref('rpbConvertToTex', self.rpbConvertToTex)
self.setHostPref('rpbSwatchSize', self.rpbSwatchSize)
def updateLibraryConfig(self):
self.cfg.buildLibraryList(updateFromPrefs=True)
def getSelectedCategory(self):
return self.rpbSelectedCategory
def setSelectedCategory(self, val):
self.rpbSelectedCategory = val
def getSelectedPreset(self):
return self.rpbSelectedPreset
def setSelectedPreset(self, val):
self.rpbSelectedPreset = val
def getSelectedLibrary(self):
return self.rpbSelectedLibrary
def setSelectedLibrary(self, path):
self.rpbSelectedLibrary = path
def getSwatchSize(self):
return self.rpbSwatchSize
def setSwatchSize(self, value):
self.rpbSwatchSize = min(max(value, 64), 128)
def doAssign(self):
return True
def gather_material_nodes(self, mat):
lst = list()
out = shadergraph_utils.is_renderman_nodetree(mat)
self.renderman_output_node = out
nodes = shadergraph_utils.gather_nodes(out)
lst.extend(nodes)
self._nodesToExport['material'] = lst
def gather_displayfilter_nodes(self, context):
self.bl_world = context.scene.world
nodes = shadergraph_utils.find_displayfilter_nodes(self.bl_world)
self._nodesToExport['displayfilter'] = nodes
def preExportCheck(self, mode, hdr=None): # pylint: disable=unused-argument
context = bpy.context
if mode == 'material':
ob = getattr(context, 'active_object', None)
if not ob:
if hasattr(context, 'selected_objects'):
ob = context.selected_objects[0]
else:
scene = context.scene
ob = scene.view_layers[0].objects.active
mat = ob.active_material
self.blender_material = mat
self.gather_material_nodes(mat)
self._nodesToExport['displayfilter'] = list()
self.gather_displayfilter_nodes(context)
self._defaultLabel = mat.name
elif mode == 'lightrigs':
lst = list()
selected_light_objects = []
selected_objects = getattr(context, 'selected_objects', None)
if not selected_objects:
scene = context.scene
selected_objects = scene.view_layers[0].objects.selected
if selected_objects:
for obj in selected_objects:
if object_utils._detect_primitive_(obj) == 'LIGHT':
selected_light_objects.append(obj)
if not selected_light_objects:
return False
lst.extend(selected_light_objects)
self._nodesToExport['lightrigs'] = lst
self._defaultLabel = lst[0].name
elif mode == 'envmap':
if not hdr.exists():
rfb_log().warning('hdr file does not exist: %s', hdr)
return False
self._nodesToExport['envmap'] = [hdr]
self._defaultLabel = default_label_from_file_name(hdr)
return True
else:
rfb_log().error('preExportCheck: unknown mode: %s', repr(mode))
return False
return True
def exportMaterial(self, categorypath, infodict, previewtype): # pylint: disable=unused-argument
return export_asset(self._nodesToExport, 'nodeGraph', infodict, categorypath,
self.cfg, previewtype, isLightRig=False)
def exportLightRig(self, categorypath, infodict): # pylint: disable=unused-argument
return export_asset(self._nodesToExport, 'nodeGraph', infodict, categorypath,
self.cfg, renderPreview='std', isLightRig=True)
def exportEnvMap(self, categorypath, infodict): # pylint: disable=unused-argument
return export_asset(self._nodesToExport, 'envMap', infodict, categorypath,
self.cfg)
def importAsset(self, asset, assignToSelected=False): # pylint: disable=unused-argument
import_asset(asset, assignToSelected)
def getAllCategories(self, asDict=False):
return sorted(ral.getAllCategories(self.cfg, asDict=asDict))
def getAssetList(self, relpath):
return ral.getAssetList(self.cfg, relpath)
def fix_blender_name(name):
return name.replace(' ', '').replace('.', '')
def set_asset_params(ob, node, nodeName, Asset):
node_type = node.bl_label
# If node is OSL node get properties from dynamic location.
if node.bl_idname == "PxrOSLPatternNode":
for input_name, input in node.inputs.items():
prop_type = input.renderman_type
if input.is_linked:
to_socket = input
from_socket = input.links[0].from_socket
param_type = 'reference %s' % prop_type
param_name = input_name
val = None
elif type(input).__name__ != 'RendermanNodeSocketStruct':
param_type = prop_type
param_name = input_name
val = string_utils.convert_val(input.default_value, type_hint=prop_type)
pdict = {'type': param_type, 'value': val}
Asset.addParam(nodeName, node_type, param_name, pdict)
return
for prop_name, meta in node.prop_meta.items():
bl_prop_info = BlPropInfo(node, prop_name, meta)
if not bl_prop_info.do_export:
continue
param_widget = bl_prop_info.widget
prop = bl_prop_info.prop
param_name = bl_prop_info.renderman_name
param_type = bl_prop_info.renderman_type
is_linked = bl_prop_info.is_linked
if is_linked:
param_type = 'reference %s' % meta['renderman_type']
param_name = meta['renderman_name']
pdict = {'type': param_type, 'value': None}
Asset.addParam(nodeName, node_type, param_name, pdict)
# see if vstruct linked
elif is_vstruct_and_linked(node, prop_name):
val = None
vstruct_name, vstruct_member = bl_prop_info.vstructmember.split('.')
from_socket = node.inputs[
vstruct_name].links[0].from_socket
vstruct_from_param = "%s_%s" % (
from_socket.identifier, vstruct_member)
if vstruct_from_param in from_socket.node.output_meta:
node_meta = getattr(
node, 'shader_meta') if node.bl_idname == "PxrOSLPatternNode" else node.output_meta
node_meta = node_meta.get(vstruct_from_param)
if node_meta:
expr = node_meta.get('vstructConditionalExpr')
# check if we should connect or just set a value
if expr:
if expr.split(' ')[0] == 'set':
val = 1
param_type = meta['renderman_type']
pdict = {'type': param_type, 'value': val}
Asset.addParam(nodeName, node_type, param_name, pdict)
else:
rfb_log().warning('Warning! %s not found on %s' %
(vstruct_from_param, from_socket.node.name))
# else output rib
else:
val = None
# if this is a gain on PxrSurface and the lobe isn't
# enabled
if node.bl_idname == 'PxrSurfaceBxdfNode' and \
prop_name in __GAINS_TO_ENABLE__ and \
not getattr(node, __GAINS_TO_ENABLE__[prop_name]):
val = [0, 0, 0] if meta[
'renderman_type'] == 'color' else 0
elif param_type == 'string':
val = string_utils.expand_string(prop)
options = meta['options']
if bl_prop_info.is_texture:
tx_node_id = texture_utils.generate_node_id(node, param_name, ob=ob)
tx_val = texture_utils.get_txmanager().get_output_tex_from_id(tx_node_id)
val = tx_val if tx_val != '' else val
elif param_widget == 'assetidoutput':
display = 'openexr'
if 'texture' in options:
display = 'texture'
val = string_utils.expand_string(val, display=display, asFilePath=True)
elif param_type == 'array':
val_array = []
val_ref_array = []
coll_nm = '%s_collection' % prop_name
collection = getattr(node, coll_nm)
array_len = len(collection)
param_array_type = bl_prop_info.renderman_array_type
param_type = '%s[%d]' % (param_array_type, array_len)
for elem in collection:
nm = elem.name
if hasattr(node, 'inputs') and nm in node.inputs and \
node.inputs[nm].is_linked:
val_ref_array.append('')
else:
prop = getattr(elem, 'value_%s' % param_array_type)
val = string_utils.convert_val(prop, type_hint=param_array_type)
if param_array_type in RFB_FLOAT3:
val_array.extend(val)
else:
val_array.append(val)
if val_ref_array:
pdict = {'type': '%s [%d]' % (param_array_type, len(val_ref_array)), 'value': None}
Asset.addParam(nodeName, node_type, param_name, pdict)
else:
pdict = {'type': param_array_type, 'value': val_array}
Asset.addParam(nodeName, node_type, param_name, pdict)
continue
elif param_type == 'colorramp':
nt = bpy.data.node_groups[node.rman_fake_node_group]
if nt:
ramp_name = prop
color_ramp_node = nt.nodes[ramp_name]
colors = []
positions = []
# double the start and end points
positions.append(float(color_ramp_node.color_ramp.elements[0].position))
colors.append(color_ramp_node.color_ramp.elements[0].color[:3])
for e in color_ramp_node.color_ramp.elements:
positions.append(float(e.position))
colors.append(e.color[:3])
positions.append(
float(color_ramp_node.color_ramp.elements[-1].position))
colors.append(color_ramp_node.color_ramp.elements[-1].color[:3])
array_size = len(positions)
pdict = {'type': 'int', 'value': array_size}
Asset.addParam(nodeName, node_type, prop_name, pdict)
pdict = {'type': 'float[%d]' % array_size, 'value': positions}
Asset.addParam(nodeName, node_type, "%s_Knots" % prop_name, pdict)
pdict = {'type': 'color[%d]' % array_size, 'value': colors}
Asset.addParam(nodeName, node_type, "%s_Colors" % prop_name, pdict)
rman_interp_map = { 'LINEAR': 'linear', 'CONSTANT': 'constant'}
interp = rman_interp_map.get(color_ramp_node.color_ramp.interpolation,'catmull-rom')
pdict = {'type': 'string', 'value': interp}
Asset.addParam(nodeName, node_type, "%s_Interpolation" % prop_name, pdict)
continue
elif param_type == 'floatramp':
nt = bpy.data.node_groups[node.rman_fake_node_group]
if nt:
ramp_name = prop
float_ramp_node = nt.nodes[ramp_name]
curve = float_ramp_node.mapping.curves[0]
knots = []
vals = []
# double the start and end points
knots.append(curve.points[0].location[0])
vals.append(curve.points[0].location[1])
for p in curve.points:
knots.append(p.location[0])
vals.append(p.location[1])
knots.append(curve.points[-1].location[0])
vals.append(curve.points[-1].location[1])
array_size = len(knots)
pdict = {'type': 'int', 'value': array_size}
Asset.addParam(nodeName, node_type, prop_name, pdict)
pdict = {'type': 'float[%d]' % array_size, 'value': knots}
Asset.addParam(nodeName, node_type, "%s_Knots" % prop_name, pdict)
pdict = {'type': 'float[%d]' % array_size, 'value': vals}
Asset.addParam(nodeName, node_type, "%s_Floats" % prop_name, pdict)
pdict = {'type': 'string', 'value': 'catmull-rom'}
Asset.addParam(nodeName, node_type, "%s_Interpolation" % prop_name, pdict)
continue
else:
val = string_utils.convert_val(prop, type_hint=param_type)
pdict = {'type': param_type, 'value': val}
Asset.addParam(nodeName, node_type, param_name, pdict)
def set_asset_connections(nodes_list, Asset):
for node in nodes_list:
if type(node) == RmanConvertNode:
# conversion node, skip
continue
cnx = [l for inp in node.inputs for l in inp.links ]
if not cnx:
continue
for l in cnx:
ignoreDst = l.to_node.bl_label not in __BL_NODES_MAP__
ignoreSrc = l.from_node.bl_label not in __BL_NODES_MAP__
if ignoreDst or ignoreSrc:
rfb_log().debug("Ignoring connection %s -> %s" % (l.from_node.name, l.to_node.name))
continue
if shadergraph_utils.do_convert_socket(l.from_socket, l.to_socket):
# this needs a conversion node.
# the connection should already have been dealt with earlier
continue
from_node = l.from_node
to_node = l.to_node
from_socket_name = l.from_socket.name
to_socket_name = l.to_socket.name
renderman_node_type = getattr(from_node, 'renderman_node_type', '')
if renderman_node_type == 'bxdf':
# for Bxdf nodes, use the same socket name as RfM
from_socket_name = 'outColor'
srcPlug = "%s.%s" % (fix_blender_name(l.from_node.name), from_socket_name)
dstPlug = "%s.%s" % (fix_blender_name(l.to_node.name), to_socket_name)
Asset.addConnection(srcPlug, dstPlug)
def export_material_preset(mat, nodes_to_convert, renderman_output_node, Asset):
# first, create a Maya-like shadingEngine node for our output node
nodeClass = 'root'
rmanNode = 'shadingEngine'
nodeType = 'shadingEngine'
nodeName = '%s_SG' % Asset.label()
Asset.addNode(nodeName, nodeType,
nodeClass, rmanNode,
externalosl=False)
if renderman_output_node.inputs['Bxdf'].is_linked:
infodict = {}
infodict['name'] = 'rman__surface'
infodict['type'] = 'reference float3'
infodict['value'] = None
Asset.addParam(nodeName, nodeType, 'rman__surface', infodict)
from_node = renderman_output_node.inputs['Bxdf'].links[0].from_node
srcPlug = "%s.%s" % (fix_blender_name(from_node.name), 'outColor')
dstPlug = "%s.%s" % (nodeName, 'rman__surface')
Asset.addConnection(srcPlug, dstPlug)
if renderman_output_node.inputs['Displacement'].is_linked:
infodict = {}
infodict['name'] = 'rman__displacement'
infodict['type'] = 'reference float3'
infodict['value'] = None
Asset.addParam(nodeName, nodeType, 'rman__displacement', infodict)
from_node = renderman_output_node.inputs['Displacement'].links[0].from_node
srcPlug = "%s.%s" % (fix_blender_name(from_node.name), 'outColor')
dstPlug = "%s.%s" % (nodeName, 'rman__displacement')
Asset.addConnection(srcPlug, dstPlug)
for node in nodes_to_convert:
if type(node) == RmanConvertNode:
# insert conversion node
node_type = node.node_type
from_node = node.from_node
from_socket = node.from_socket
to_node = node.to_node
to_socket = node.to_socket
output_name = 'resultRGB' if node_type == 'PxrToFloat3' else 'resultF'
node_name = 'convert_%s_%s' % (fix_blender_name(from_node.name), from_socket.name)
Asset.addNode(
node_name, node_type,
'pattern', node_type, False)
# from node to convert node
srcPlug = "%s.%s" % (fix_blender_name(from_node.name), from_socket.name)
dstPlug = "%s.%s" % (node_name, 'input')
Asset.addConnection(srcPlug, dstPlug)
# from convert node to destination node
srcPlug = "%s.%s" % (node_name, output_name)
dstPlug = "%s.%s" % (fix_blender_name(to_node.name), to_socket.name)
Asset.addConnection(srcPlug, dstPlug)
elif type(node) != type((1,2,3)):
externalosl = False
renderman_node_type = getattr(node, 'renderman_node_type', '')
if node.bl_idname == "PxrOSLPatternNode":
if getattr(node, "codetypeswitch") == "EXT":
osl_path = string_utils.expand_string(getattr(node, 'shadercode'))
FileName = os.path.basename(osl_path)
FileNameNoEXT,ext = os.path.splitext(FileName)
shaders_path = os.path.join(string_utils.expand_string('<OUT>'), "shaders")
out_file = os.path.join(shaders_path, FileName)
if ext == ".oso":
if not os.path.exists(out_file) or not os.path.samefile(osl_path, out_file):
if not os.path.exists(shaders_path):
os.mkdir(shaders_path)
shutil.copy(osl_path, out_file)
externalosl = True
Asset.processExternalFile(None, ExternalFile.k_osl, out_file)
elif renderman_node_type == '':
# check if a cycles node
if node.bl_idname not in CYCLES_NODE_MAP.keys():
rfb_log().debug('No translation for node of type %s named %s' % (node.bl_idname, node.name))
continue
mapping = CYCLES_NODE_MAP[node.bl_idname]
cycles_shader_dir = filepath_utils.get_cycles_shader_path()
out_file = os.path.join(cycles_shader_dir, '%s.oso' % cycles_shader_dir)
externalosl = True
Asset.processExternalFile(None, ExternalFile.k_osl, out_file)
node_name = fix_blender_name(node.name)
shader_name = node.bl_label
Asset.addNode(
node_name, shader_name,
renderman_node_type, shader_name, externalosl)
set_asset_params(mat, node, node_name, Asset)
set_asset_connections(nodes_to_convert, Asset)
def find_portal_dome_parent(portal):
dome = None
parent = portal.parent
while parent:
if parent.type == 'LIGHT' and hasattr(parent.data, 'renderman'):
rm = parent.data.renderman
if rm.renderman_light_role == 'RMAN_LIGHT' and rm.get_light_node_name() == 'PxrDomeLight':
dome = parent
break
parent = parent.parent
return dome
def export_light_rig(obs, Asset):
dome_to_portals = dict()
for ob in obs:
bl_node = shadergraph_utils.get_light_node(ob)
nodeName = fix_blender_name(bl_node.name)
nodeType = bl_node.bl_label
nodeClass = 'light'
rmanNodeName = bl_node.bl_label
Asset.addNode(nodeName, nodeType,
nodeClass, rmanNodeName,
externalosl=False)
mtx = ob.matrix_world
floatVals = list()
floatVals = transform_utils.convert_matrix(mtx)
Asset.addNodeTransform(nodeName, floatVals )
set_asset_params(ob, bl_node, nodeName, Asset)
if nodeType == "PxrPortaLight":
# if a portal light, fine the associate PxrDomeLight
dome = find_portal_dome_parent(ob)
if not dome:
continue
dome_name = dome.name
portals = dome_to_portals.get(dome_name, list())
portals.append(nodeName)
dome_to_portals[dome_name] = portals
# do portal connections
for dome,portals in dome_to_portals.items():
for i, portal in enumerate(portals):
dst = '%s.rman__portals[%d]' % (dome, i)
src = '%s.message' % (portal)
Asset.addConnection(src, dst)
# light filters
for ob in obs:
light = ob.data
rm = light.renderman
for i, lf in enumerate(rm.light_filters):
light_filter = lf.linked_filter_ob
if not light_filter:
continue
bl_node = shadergraph_utils.get_light_node(light_filter)
nodeName = fix_blender_name(bl_node.name)
nodeType = bl_node.bl_label
nodeClass = 'lightfilter'
rmanNodeName = bl_node.bl_label
Asset.addNode(nodeName, nodeType,
nodeClass, rmanNodeName,
externalosl=False)
mtx = ob.matrix_world
floatVals = list()
floatVals = transform_utils.convert_matrix(mtx)
Asset.addNodeTransform(nodeName, floatVals )
set_asset_params(ob, bl_node, nodeName, Asset)
srcPlug = "%s.outColor" % fix_blender_name(light_filter.name)
dstPlug = "%s.rman__lightfilters[%d]" % (fix_blender_name(ob.name), i)
Asset.addConnection(srcPlug, dstPlug)
def export_displayfilter_nodes(world, nodes, Asset):
any_stylized = False
for node in nodes:
nodeName = fix_blender_name(node.name)
shaderName = node.bl_label
externalosl = False
Asset.addNode(
nodeName, shaderName,
'displayfilter', shaderName, externalosl)
set_asset_params(world, node, nodeName, Asset)
if not any_stylized and shaderName in RMAN_STYLIZED_FILTERS:
any_stylized = True
if any_stylized:
# add stylized channels to Asset
from .. import rman_config
stylized_tmplt = rman_config.__RMAN_DISPLAY_TEMPLATES__.get('Stylized', None)
rman_dspy_channels = rman_config.__RMAN_DISPLAY_CHANNELS__
for chan in stylized_tmplt['channels']:
settings = rman_dspy_channels[chan]
chan_src = settings['channelSource']
chan_type = settings['channelType']
Asset.addNode(chan, chan,
'displaychannel', 'DisplayChannel',
datatype=chan_type)
pdict = dict()
pdict['value'] = chan_src
pdict['name'] = 'source'
pdict['type'] = 'string'
if pdict['value'].startswith('lpe:'):
pdict['value'] = 'color ' + pdict['value']
Asset.addParam(chan, 'rmanDisplayChannel', 'source', pdict)
def parse_texture(imagePath, Asset):
"""Gathers infos from the image header
Args:
imagePath {list} -- A list of texture paths.
Asset {RmanAsset} -- the asset in which the infos will be stored.
"""
img = FilePath(imagePath)
# gather info on the envMap
#
Asset.addTextureInfos(img)
def setParams(Asset, node, paramsList):
'''Set param values.
Note: we are only handling a subset of maya attribute types.'''
float3 = ['color', 'point', 'vector', 'normal']
ramp_names = []
rman_ramp_size = dict()
rman_ramps = dict()
rman_color_ramps = dict()
# Look for ramps
for param in paramsList:
pname = param.name()
if pname in node.outputs:
continue
if pname not in node.prop_meta:
continue
prop_meta = node.prop_meta[pname]
param_widget = prop_meta.get('widget', 'default')
if prop_meta['renderman_type'] == 'colorramp':
prop = getattr(node, pname)
nt = bpy.data.node_groups[node.rman_fake_node_group]
if nt:
ramp_name = prop
color_ramp_node = nt.nodes[ramp_name]
rman_color_ramps[pname] = color_ramp_node
rman_ramp_size[pname] = param.value()
ramp_names.append(pname)
continue
elif prop_meta['renderman_type'] == 'floatramp':
prop = getattr(node, pname)
nt = bpy.data.node_groups[node.rman_fake_node_group]
if nt:
ramp_name = prop
float_ramp_node = nt.nodes[ramp_name]
rman_ramps[pname] = float_ramp_node
rman_ramp_size[pname] = param.value()
ramp_names.append(pname)
continue
# set ramp params
for nm in ramp_names:
knots_param = None
colors_param = None
floats_param = None
interpolation_param = None
if (nm not in rman_ramps) and (nm not in rman_color_ramps):
continue
for param in paramsList:
pname = param.name()
if pname in node.outputs:
continue
if pname.startswith(nm):
if '_Knots' in pname:
knots_param = param
elif '_Colors' in pname:
colors_param = param
elif '_Floats' in pname:
floats_param = param
elif '_Interpolation' in pname:
interpolation_param = param
if colors_param:
n = rman_color_ramps[nm]
elements = n.color_ramp.elements
size = rman_ramp_size[nm]
knots_vals = knots_param.value()
colors_vals = colors_param.value()
rman_interp = interpolation_param.value()
rman_interp_map = { 'bspline':'B_SPLINE' , 'linear': 'LINEAR', 'constant': 'CONSTANT'}
interp = rman_interp_map.get(rman_interp, 'LINEAR')
n.color_ramp.interpolation = interp
if len(colors_vals) == size:
for i in range(0, size):
if i == 0:
elem = elements[0]
elem.position = knots_vals[i]
elif i == 1:
elem = elements[1]
elem.position = knots_vals[i]
else:
elem = elements.new(knots_vals[i])
elem.color = (colors_vals[i][0], colors_vals[i][1], colors_vals[i][2], 1.0)
else:
j = 0
for i in range(0, size):
if i == 0:
elem = elements[0]
elem.position = knots_vals[i]
elif i == 1:
elem = elements[1]
elem.position = knots_vals[i]
else:
elem = elements.new(knots_vals[i])
elem.color = (colors_vals[j], colors_vals[j+1], colors_vals[j+2], 1.0)
j += 3
elif floats_param:
n = rman_ramps[nm]
curve = n.mapping.curves[0]
points = curve.points
size = rman_ramp_size[nm]
knots_vals = knots_param.value()
floats_vals = floats_param.value()
for i in range(0, size):
if i == 0:
point = points[0]
point.location[0] = knots_vals[i]
point.location[0] = floats_vals[i]
elif i == 1:
point = points[1]
point.location[0] = knots_vals[i]
point.location[0] = floats_vals[i]
else:
points.new(knots_vals[i], floats_vals[i])
for param in paramsList:
pname = param.name()
if pname in node.outputs:
continue
ptype = param.type()
prop_meta = node.prop_meta.get(pname, dict())
param_widget = prop_meta.get('widget', 'default')
if pname in ramp_names:
continue
is_ramp_param = False
for nm in ramp_names:
if pname.startswith(nm):
is_ramp_param = True
break
if is_ramp_param:
continue
# arrays
elif '[' in ptype:
# always set the array length
# try to get array length
rman_type = ptype.split('[')[0]
array_len = ptype.split('[')[1].split(']')[0]
if array_len == '':
continue
array_len = int(array_len)
coll_nm = '%s_collection' % pname
coll_idx_nm = '%s_collection_index' % pname
collection = getattr(node, coll_nm, None)
if collection is None:
continue
elem_type = rman_type
if 'reference' in elem_type:
elem_type = elem_type.replace('reference ', '')
if elem_type == 'integer':
elem_type = 'int'
for i in range(array_len):
override = {'node': node}
bpy.ops.renderman.add_remove_array_elem(override,
'EXEC_DEFAULT',
action='ADD',
param_name=pname,
collection=coll_nm,
collection_index=coll_idx_nm,
elem_type=elem_type)
pval = param.value()
if pval is None or pval == []:
# connected param
continue
plen = len(pval)
if rman_type in ['integer', 'float', 'string']:
for i in range(0, plen):
val = pval[i]
elem = collection[i]
elem.type = elem_type
setattr(node, 'value_%s' % elem.type, val)
# float3 types
elif rman_type in float3:
j = 1
if isinstance(pval[0], list):
for i in range(0, plen):
elem = collection[j]
val = (pval[i][0], pval[i][0], pval[i][0])
elem.type = elem_type
setattr(node, 'value_%s' % elem.type, val)
j +=1
else:
for i in range(0, plen, 3):
elem = collection[j]
val = (pval[i], pval[i+1], pval[i+2])
elem.type = elem_type
setattr(node, 'value_%s' % elem.type, val)
j = j+1
elif pname in node.bl_rna.properties.keys():
if ptype is None or ptype in ['vstruct', 'struct']:
# skip vstruct and struct params : they are only useful when connected.
continue
pval = param.value()
if pval is None or pval == []:
# connected param
continue
if pname == "placementMatrix":
# this param is always connected.
continue
if 'string' in ptype:
if pval != '':
depfile = Asset.getDependencyPath(pname, pval)
if depfile:
pval = depfile
setattr(node, pname, pval)
elif ptype in float3:
try:
setattr(node, pname, pval)
except:
rfb_log().error('setParams float3 FAILED: %s ptype: %s pval: %s' %
(pname, ptype, repr(pval)))
else:
try:
if type(getattr(node,pname)) == type(""):
setattr(node, pname, str(pval))
else:
setattr(node, pname, pval)
except:
if type(getattr(node, pname)) == bpy.types.EnumProperty:
setattr(node, pname, str(pval))
# if this is a PxrSurface, turn on all of the enable gains.
if hasattr(node, 'plugin_name') and node.plugin_name in ['PxrLayer', 'PxrSurface']:
for enable in __LOBES_ENABLE_PARAMS__:
setattr(node, enable, True)
def createNodes(Asset):
nodeDict = {}
nt = None
mat = bpy.data.materials.new(Asset.label())
mat.use_nodes = True
nt = mat.node_tree
# create output node
output_node = nt.nodes.new('RendermanOutputNode')
curr_x = 250
for node in Asset.nodeList():
nodeId = node.name()
nodeType = node.type()
nodeClass = node.nodeClass()
# print('%s %s: %s' % (nodeId, nodeType, nodeClass))
fmt, vals, ttype = node.transforms()
# print('+ %s %s: %s' % (fmt, vals, ttype))
if nodeClass == 'bxdf':
bl_node_name = __BL_NODES_MAP__.get(nodeType, None)
if not bl_node_name:
continue
created_node = nt.nodes.new(bl_node_name)
created_node.location[0] = -curr_x
curr_x = curr_x + 250
created_node.name = nodeId
created_node.label = nodeId
elif nodeClass == 'displace':
bl_node_name = __BL_NODES_MAP__.get(nodeType, None)
if not bl_node_name:
continue
created_node = nt.nodes.new(bl_node_name)
created_node.location[0] = -curr_x
curr_x = curr_x + 250
created_node.name = nodeId
created_node.label = nodeId
elif nodeClass == 'pattern':
if nodeType == 'PxrDisplace':
# Temporary. RfM presets seem to be setting PxrDisplace as a pattern node
bl_node_name = __BL_NODES_MAP__.get(nodeType, None)
if not bl_node_name:
continue
created_node = nt.nodes.new(bl_node_name)
created_node.location[0] = -curr_x
curr_x = curr_x + 250
created_node.name = nodeId
created_node.label = nodeId
elif node.externalOSL():
# if externalOSL() is True, it is a dynamic OSL node i.e. one
# loaded through a PxrOSL node.
# if PxrOSL is used, we need to find the oso in the asset to
# use it in a PxrOSL node.
oso = Asset.getDependencyPath(ExternalFile.k_osl, nodeType + '.oso')
if oso is None:
err = ('createNodes: OSL file is missing "%s"'
% nodeType)
raise RmanAssetBlenderError(err)
created_node = nt.nodes.new('PxrOSLPatternNode')
created_node.location[0] = -curr_x
curr_x = curr_x + 250
created_node.codetypeswitch = 'EXT'
created_node.shadercode = oso
created_node.RefreshNodes({}, nodeOR=created_node)
else:
bl_node_name = __BL_NODES_MAP__.get(nodeType, None)
if not bl_node_name:
continue
created_node = nt.nodes.new(bl_node_name)
created_node.location[0] = -curr_x
curr_x = curr_x + 250
created_node.name = nodeId
created_node.label = nodeId
elif nodeClass == 'root':
output_node.name = nodeId
nodeDict[nodeId] = output_node.name
continue
if created_node:
nodeDict[nodeId] = created_node.name
setParams(Asset, created_node, node.paramsDict())
if nodeClass == 'bxdf':
created_node.update_mat(mat)
return mat,nt,nodeDict
def import_light_rig(Asset):
nodeDict = {}
filter_nodes = dict()
light_nodes = dict()
domelight_nodes = dict()
portallight_nodes = dict()
curr_x = 250
for node in Asset.nodeList():
nodeId = node.name()
nodeType = node.type()
nodeClass = node.nodeClass()
if nodeClass not in ['light', 'lightfilter']:
continue
# print('%s %s: %s' % (nodeId, nodeType, nodeClass))
fmt, vals, ttype = node.transforms()
# print('+ %s %s: %s' % (fmt, vals, ttype))
created_node = None
light = None
if nodeClass == 'light':
# we don't deal with mesh lights
if nodeType == 'PxrMeshLight':
continue
bpy.ops.object.rman_add_light(rman_light_name=nodeType)
elif nodeClass == 'lightfilter':
bpy.ops.object.rman_add_light_filter(rman_lightfilter_name=nodeType, add_to_selected=False)
light = bpy.context.active_object
nt = light.data.node_tree
light.name = nodeId
light.data.name = nodeId
created_node = light.data.renderman.get_light_node()
if created_node:
nodeDict[nodeId] = created_node.name
setParams(Asset, created_node, node.paramsDict())
if nodeClass == 'light':
light_nodes[nodeId] = light
elif nodeClass == 'lightfilter':
filter_nodes[nodeId] = light
if nodeType == "PxrDomeLight":
domelight_nodes[nodeId] = light
elif nodeType == "PxrPortalLight":
portallight_nodes[nodeId] = light
if fmt[2] == TrMode.k_flat:
if fmt[0] == TrStorage.k_matrix:
light.matrix_world[0] = vals[0:4]
light.matrix_world[1] = vals[4:8]
light.matrix_world[2] = vals[8:12]
light.matrix_world[3] = vals[12:]
light.matrix_world.transpose()
elif fmt[0] == TrStorage.k_TRS:
light.location = vals[0:3]
light.scale = vals[6:9]
# rotation
light.rotation_euler = (radians(vals[3]), radians(vals[4]), radians(vals[5]))
try:
cdata = Asset._assetData['compatibility']
if cdata['host']['name'] != 'Blender':
if nodeType not in ['PxrDomeLight', 'PxrEnvDayLight']:
# assume that if a lightrig did not come from Blender,
# we need convert from Y-up to Z-up
yup_to_zup = mathutils.Matrix.Rotation(radians(90.0), 4, 'X')
light.matrix_world = yup_to_zup @ light.matrix_world
else:
# for dome and envdaylight, flip the Y and Z rotation axes
# and ignore scale and translations
euler = light.matrix_world.to_euler('XYZ')
tmp = euler.y
euler.y = euler.z
euler.z = tmp
light.matrix_world = mathutils.Matrix.Identity(4) @ euler.to_matrix().to_4x4()
except:
pass
if bpy.context.view_layer.objects.active:
bpy.context.view_layer.objects.active.select_set(False)
lights_to_filters = dict()
# loop over connections, and map each light to filters
for con in Asset.connectionList():
srcNode = con.srcNode()
dstNode = con.dstNode()
# check if this is portal light/dome light connection
# if so, let's do it, here
if (srcNode in portallight_nodes) and (dstNode in domelight_nodes):
portal = portallight_nodes[srcNode]
dome = domelight_nodes[dstNode]
portal.parent = dome
continue
if dstNode not in lights_to_filters:
lights_to_filters[dstNode] = [srcNode]
else:
lights_to_filters[dstNode].append(srcNode)
for light,filters in lights_to_filters.items():
if light not in light_nodes:
continue
light_node = light_nodes[light]
for i,f in enumerate(filters):
filter_node = filter_nodes[f]
light_filter_item = light_node.data.renderman.light_filters.add()
light_filter_item.linked_filter_ob = filter_node
return nodeDict
def connectNodes(Asset, nt, nodeDict):
output = shadergraph_utils.find_node_from_nodetree(nt, 'RendermanOutputNode')
bxdf_socket = output.inputs['Bxdf']
displace_socket = output.inputs['Displacement']
for con in Asset.connectionList():
#print('+ %s.%s -> %s.%s' % (nodeDict[con.srcNode()](), con.srcParam(),
# nodeDict[con.dstNode()](), con.dstParam()))
srcName = nodeDict.get(con.srcNode(), '')
dstName = nodeDict.get(con.dstNode(), '')
if srcName == '' or dstName == '':
continue
srcNode = nt.nodes.get(srcName, None)
dstNode = nt.nodes.get(dstName, None)
if srcNode == None or dstNode == None:
continue
srcSocket = con.srcParam()
dstSocket = con.dstParam()
renderman_node_type = getattr(srcNode, 'renderman_node_type', '')
if srcSocket in srcNode.outputs and dstSocket in dstNode.inputs:
nt.links.new(srcNode.outputs[srcSocket], dstNode.inputs[dstSocket])
elif output == dstNode:
# check if this is a root node connection
if dstSocket == 'surfaceShader' or dstSocket == 'rman__surface':
nt.links.new(srcNode.outputs['Bxdf'], output.inputs['Bxdf'])
elif dstSocket == 'displacementShader' or dstSocket == 'rman__displacement':
nt.links.new(srcNode.outputs['Displacement'], output.inputs['Displacement'])
elif renderman_node_type == 'bxdf':
# this is a regular upstream bxdf connection
nt.links.new(srcNode.outputs['Bxdf'], dstNode.inputs[dstSocket])
else:
rfb_log().debug('error connecting %s.%s to %s.%s' % (srcNode.name,srcSocket, dstNode.name, dstSocket))
if not bxdf_socket.is_linked:
# Our RenderManOutputNode still does not have a bxdf connected
# look for all bxdf nodes and find one that does not have a connected output
bxdf_candidate = None
displace_candidate = None
for node in nt.nodes:
renderman_node_type = getattr(node, 'renderman_node_type', '')
if renderman_node_type == 'bxdf':
if not node.outputs['Bxdf'].is_linked:
bxdf_candidate = node
elif renderman_node_type == 'displace':
displace_candidate = node
if bxdf_candidate:
nt.links.new(bxdf_candidate.outputs['Bxdf'], output.inputs['Bxdf'])
if not displace_socket.is_linked and displace_candidate:
nt.links.new(displace_candidate.outputs['Displacement'], output.inputs['Displacement'])
def create_displayfilter_nodes(Asset):
has_stylized = False
df_list = Asset.displayFilterList()
world = bpy.context.scene.world
if not world.renderman.use_renderman_node:
bpy.ops.material.rman_add_rman_nodetree('EXEC_DEFAULT', idtype='world')
output = shadergraph_utils.find_node(world, 'RendermanDisplayfiltersOutputNode')
nt = world.node_tree
nodeDict = {}
for df_node in df_list:
node_id = df_node.name()
node_type = df_node.rmanNode()
bl_node_name = __BL_NODES_MAP__.get(node_type, None)
if not bl_node_name:
continue
created_node = nt.nodes.new(bl_node_name)
created_node.name = node_id
created_node.label = node_id
output.add_input()
nt.links.new(created_node.outputs['DisplayFilter'], output.inputs[-1])
nodeDict[node_id] = created_node.name
setParams(Asset, created_node, df_node.paramsDict())
if not has_stylized and node_type in RMAN_STYLIZED_FILTERS:
bpy.ops.scene.rman_enable_stylized_looks('EXEC_DEFAULT')
has_stylized = True
def import_asset(Asset, assignToSelected):
assetType = Asset.type()
if assetType == "nodeGraph":
mat = None
if Asset.displayFilterList():
create_displayfilter_nodes(Asset)
if Asset.nodeList():
path = os.path.dirname(Asset.path())
paths = path.split('/')
if 'Materials' in paths:
mat,nt,newNodes = createNodes(Asset)
connectNodes(Asset, nt, newNodes)
elif 'LightRigs' in paths:
newNodes = import_light_rig(Asset)
if mat and assignToSelected:
scene = bpy.context.scene
for ob in scene.view_layers[0].objects.selected:
if ob.type == 'EMPTY':
ob.renderman.rman_material_override = mat
ob.update_tag(refresh={'OBJECT'})
elif hasattr(ob, 'active_material'):
ob.active_material = mat
elif assetType == "envMap":
scene = bpy.context.scene
dome_lights = [ob for ob in scene.objects if ob.type == 'LIGHT' \
and ob.data.renderman.get_light_node_name() == 'PxrDomeLight']
selected_dome_lights = [ob for ob in dome_lights if ob.select_get()]
env_map_path = Asset.envMapPath()
if not selected_dome_lights:
if not dome_lights:
# create a new dome light
bpy.ops.object.rman_add_light(rman_light_name='PxrDomeLight')
ob = bpy.context.view_layer.objects.active
plugin_node = ob.data.renderman.get_light_node()
plugin_node.lightColorMap = env_map_path
elif len(dome_lights) == 1:
light = dome_lights[0].data
plugin_node = light.renderman.get_light_node()
plugin_node.lightColorMap = env_map_path
else:
rfb_log().error('More than one dome in scene. Not sure which to use')
else:
for light in selected_dome_lights:
light = dome_lights[0].data
plugin_node = light.renderman.get_light_node()
plugin_node.lightColorMap = env_map_path
else:
raise RmanAssetBlenderError("Unknown asset type : %s" % assetType)
def export_asset(nodes, atype, infodict, category, cfg, renderPreview='std',
alwaysOverwrite=False, isLightRig=False):
"""Exports a nodeGraph or envMap as a RenderManAsset.
Args:
nodes (dict) -- dictionary containing the nodes to export
atype (str) -- Asset type : 'nodeGraph' or 'envMap'
infodict (dict) -- dict with 'label', 'author' & 'version'
category (str) -- Category as a path, i.e.: "/Lights/LookDev"
Kwargs:
renderPreview (str) -- Render an asset preview ('std', 'fur', None).\
Render the standard preview swatch by default.\
(default: {'std'})
alwaysOverwrite {bool) -- Will ask the user if the asset already \
exists when not in batch mode. (default: {False})
"""
label = infodict['label']
Asset = RmanAsset(assetType=atype, label=label, previewType=renderPreview,
storage=infodict.get('storage', None),
convert_to_tex=infodict.get('convert_to_tex', True)
)
# On save, we can get the current color manager to store the config.
color_mgr = clr_mgr.color_manager()
ocio_config = {
'config': color_mgr.cfg_name,
'path': color_mgr.config_file_path(),
'rules': color_mgr.conversion_rules,
'aliases': color_mgr.aliases
}
rfb_log().debug('ocio_config %s', '=' * 80)
rfb_log().debug(' config = %s', ocio_config['config'])
rfb_log().debug(' path = %s', ocio_config['path'])
rfb_log().debug(' rules = %s', ocio_config['rules'])
Asset.ocio = ocio_config
# Add user metadata
#
metadata = infodict.get('metadata', dict())
for k, v in metadata.items():
if k == 'label':
continue
Asset.addMetadata(k, v)
# Compatibility data
# This will help other application decide if they can use this asset.
#
prmanversion = envconfig().build_info.version()
Asset.setCompatibility(hostName='Blender',
hostVersion=bpy.app.version,
rendererVersion=prmanversion)
# parse scene
hostPrefs = get_host_prefs()
if atype == "nodeGraph":
if not isLightRig:
export_material_preset(hostPrefs.blender_material, nodes['material'], hostPrefs.renderman_output_node, Asset)
Asset.registerUsedNodeTypes()
is_stylized = 'PxrStylized' in Asset.getUsedNodeTypes(asString=True)
if is_stylized and nodes['displayfilter']:
export_displayfilter_nodes(hostPrefs.bl_world, nodes['displayfilter'], Asset)
else:
export_light_rig(nodes['lightrigs'], Asset)
elif atype == "envMap":
parse_texture(nodes['envmap'][0], Asset)
else:
raise RmanAssetBlenderError("%s is not a known asset type !" % atype)
# Get path to our library
#
assetPath = ral.getAbsCategoryPath(cfg, category)
# Create our directory
#
assetDir = asset_name_from_label(str(label))
dirPath = assetPath.join(assetDir)
if not dirPath.exists():
os.mkdir(dirPath)
# Check if we are overwriting an existing asset
#
jsonfile = dirPath.join("asset.json")
if jsonfile.exists():
if alwaysOverwrite:
print('Replacing existing file : %s' % jsonfile)
else:
return False
# Save our json file
#
# print("exportAsset: %s..." % dirPath)
Asset.save(jsonfile, compact=False)
return True
| |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'MessageCountByMinute', fields ['date']
db.create_index('sentry_messagecountbyminute', ['date'])
def backwards(self, orm):
# Removing index on 'MessageCountByMinute', fields ['date']
db.delete_index('sentry_messagecountbyminute', ['date'])
models = {
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'first_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '30'
})
},
'contenttypes.contenttype': {
'Meta': {
'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"
},
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'model': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '100'
})
},
'sentry.event': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'"
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'event_set'",
'null': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'server_name': (
'django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True',
'db_index': 'True'
}
),
'site': (
'django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True',
'db_index': 'True'
}
),
'time_spent': ('django.db.models.fields.FloatField', [], {
'null': 'True'
})
},
'sentry.filterkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'FilterKey'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
})
},
'sentry.filtervalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'FilterValue'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'logger', 'culprit', 'checksum'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {
'default': '0'
}),
'times_seen': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
),
'views': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.View']",
'symmetrical': 'False',
'blank': 'True'
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'group': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.messagecountbyminute': {
'Meta': {
'unique_together': "(('project', 'group', 'date'),)",
'object_name': 'MessageCountByMinute'
},
'date': ('django.db.models.fields.DateTimeField', [], {
'db_index': 'True'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {
'default': '0'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.messagefiltervalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value', 'group'),)",
'object_name': 'MessageFilterValue'
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.messageindex': {
'Meta': {
'unique_together': "(('column', 'value', 'object_id'),)",
'object_name': 'MessageIndex'
},
'column': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '128'
})
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {
'unique_together': "(('team', 'email'),)",
'object_name': 'PendingTeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'pending_member_set'",
'to': "orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '0'
})
},
'sentry.project': {
'Meta': {
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'owner': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_owned_project_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'slug': (
'django.db.models.fields.SlugField', [], {
'max_length': '50',
'unique': 'True',
'null': 'True'
}
),
'status': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']",
'null': 'True'
}
)
},
'sentry.projectcountbyminute': {
'Meta': {
'unique_together': "(('project', 'date'),)",
'object_name': 'ProjectCountByMinute'
},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {
'default': '0'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.searchdocument': {
'Meta': {
'unique_together': "(('project', 'group'),)",
'object_name': 'SearchDocument'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_changed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '1'
})
},
'sentry.searchtoken': {
'Meta': {
'unique_together': "(('document', 'field', 'token'),)",
'object_name': 'SearchToken'
},
'document': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'token_set'",
'to': "orm['sentry.SearchDocument']"
}
),
'field':
('django.db.models.fields.CharField', [], {
'default': "'text'",
'max_length': '64'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '1'
}),
'token': ('django.db.models.fields.CharField', [], {
'max_length': '128'
})
},
'sentry.team': {
'Meta': {
'object_name': 'Team'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'owner':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
})
},
'sentry.teammember': {
'Meta': {
'unique_together': "(('team', 'user'),)",
'object_name': 'TeamMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'team': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Team']"
}
),
'type': ('django.db.models.fields.IntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'related_name': "'sentry_teammember_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user':
('sentry.db.models.fields.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.view': {
'Meta': {
'object_name': 'View'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '100'
}),
'verbose_name':
('django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True'
}),
'verbose_name_plural':
('django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True'
})
}
}
complete_apps = ['sentry']
| |
# -*- coding: utf-8 -*-
import unittest
from openprocurement.api.tests.base import BaseTenderWebTest, test_tender_data, test_lots
class TenderComplaintResourceTest(BaseTenderWebTest):
def test_create_tender_complaint_invalid(self):
response = self.app.post_json('/tenders/some_id/complaints', {
'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location': u'url', u'name': u'tender_id'}
])
request_path = '/tenders/{}/complaints'.format(self.tender_id)
response = self.app.post(request_path, 'data', status=415)
self.assertEqual(response.status, '415 Unsupported Media Type')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description':
u"Content-Type header should be one of ['application/json']", u'location': u'header', u'name': u'Content-Type'}
])
response = self.app.post(
request_path, 'data', content_type='application/json', status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'No JSON object could be decoded',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, 'data', status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(
request_path, {'not_data': {}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Data not available',
u'location': u'body', u'name': u'data'}
])
response = self.app.post_json(request_path, {'data': {}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'author'},
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'title'},
])
response = self.app.post_json(request_path, {'data': {
'invalid_field': 'invalid_value'}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Rogue field', u'location':
u'body', u'name': u'invalid_field'}
])
response = self.app.post_json(request_path, {
'data': {'author': {'identifier': 'invalid_value'}}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': {u'identifier': [
u'Please use a mapping for this field or Identifier instance instead of unicode.']}, u'location': u'body', u'name': u'author'}
])
response = self.app.post_json(request_path, {
'data': {'title': 'complaint title', 'description': 'complaint description', 'author': {'identifier': {}}}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': {u'contactPoint': [u'This field is required.'], u'identifier': {u'scheme': [u'This field is required.'], u'id': [u'This field is required.']}, u'name': [u'This field is required.'], u'address': [u'This field is required.']}, u'location': u'body', u'name': u'author'}
])
response = self.app.post_json(request_path, {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': {
'name': 'name', 'identifier': {'uri': 'invalid_value'}}}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': {u'contactPoint': [u'This field is required.'], u'identifier': {u'scheme': [u'This field is required.'], u'id': [u'This field is required.'], u'uri': [u'Not a well formed URL.']}, u'address': [u'This field is required.']}, u'location': u'body', u'name': u'author'}
])
response = self.app.post_json(request_path, {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"], 'relatedLot': '0' * 32}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': [u'relatedLot should be one of lots'], u'location': u'body', u'name': u'relatedLot'}
])
def test_create_tender_complaint(self):
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"], 'status': 'claim'}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
owner_token = response.json['access']['token']
self.assertEqual(complaint['author']['name'], test_tender_data["procuringEntity"]['name'])
self.assertIn('id', complaint)
self.assertIn(complaint['id'], response.headers['Location'])
tender = self.db.get(self.tender_id)
tender['status'] = 'active.awarded'
tender['awardPeriod'] = {'endDate': '2014-01-01'}
self.db.save(tender)
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"status": "answered"
}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'resolutionType'},
])
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"status": "answered",
"resolutionType": "invalid",
"resolution": "spam"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "answered")
self.assertEqual(response.json['data']["resolutionType"], "invalid")
self.assertEqual(response.json['data']["resolution"], "spam")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"satisfied": True,
"status": "resolved"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "resolved")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {"status": "cancelled", "cancellationReason": "reason"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update complaint in current (resolved) status")
response = self.app.get('/tenders/{}'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], 'active.awarded')
self.set_status('unsuccessful')
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add complaint in current (unsuccessful) tender status")
def test_patch_tender_complaint(self):
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
owner_token = response.json['access']['token']
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"status": "cancelled",
"cancellationReason": "reason"
}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Forbidden")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"title": "claim title",
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["title"], "claim title")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"status": "claim",
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], "claim")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"resolution": "changing rules"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["resolution"], "changing rules")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"status": "answered",
"resolutionType": "resolved",
"resolution": "resolution text"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "answered")
self.assertEqual(response.json['data']["resolutionType"], "resolved")
self.assertEqual(response.json['data']["resolution"], "resolution text")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"satisfied": False
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["satisfied"], False)
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"status": "resolved"
}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update complaint")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"status": "pending"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"status": "cancelled"
}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'cancellationReason'},
])
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"status": "cancelled",
"cancellationReason": "reason"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "cancelled")
self.assertEqual(response.json['data']["cancellationReason"], "reason")
response = self.app.patch_json('/tenders/{}/complaints/some_id'.format(self.tender_id), {"data": {"status": "resolved", "resolution": "resolution text"}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'complaint_id'}
])
response = self.app.patch_json('/tenders/some_id/complaints/some_id', {"data": {"status": "resolved", "resolution": "resolution text"}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
response = self.app.get('/tenders/{}/complaints/{}'.format(self.tender_id, complaint['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "cancelled")
self.assertEqual(response.json['data']["cancellationReason"], "reason")
self.assertEqual(response.json['data']["resolutionType"], "resolved")
self.assertEqual(response.json['data']["resolution"], "resolution text")
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
owner_token = response.json['access']['token']
self.set_status('complete')
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"status": "claim",
}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update complaint in current (complete) tender status")
def test_review_tender_complaint(self):
complaints = []
for i in range(3):
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), {'data': {
'title': 'complaint title',
'description': 'complaint description',
'author': test_tender_data["procuringEntity"],
'status': 'claim'
}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
owner_token = response.json['access']['token']
complaints.append(complaint)
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"status": "answered",
"resolutionType": "resolved",
"resolution": "resolution text"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "answered")
self.assertEqual(response.json['data']["resolutionType"], "resolved")
self.assertEqual(response.json['data']["resolution"], "resolution text")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"satisfied": False,
"status": "pending"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending")
self.app.authorization = ('Basic', ('reviewer', ''))
for complaint, status in zip(complaints, ['invalid', 'resolved', 'declined']):
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint['id']), {"data": {
"decision": '{} complaint'.format(status)
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["decision"], '{} complaint'.format(status))
response = self.app.patch_json('/tenders/{}/complaints/{}'.format(self.tender_id, complaint['id']), {"data": {
"status": status
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], status)
def test_get_tender_complaint(self):
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
response = self.app.get('/tenders/{}/complaints/{}'.format(self.tender_id, complaint['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'], complaint)
response = self.app.get('/tenders/{}/complaints/some_id'.format(self.tender_id), status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'complaint_id'}
])
response = self.app.get('/tenders/some_id/complaints/some_id', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
def test_get_tender_complaints(self):
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
response = self.app.get('/tenders/{}/complaints'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'][0], complaint)
response = self.app.get('/tenders/some_id/complaints', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
class TenderLotAwardComplaintResourceTest(BaseTenderWebTest):
initial_lots = test_lots
def test_create_tender_complaint(self):
response = self.app.post_json('/tenders/{}/complaints'.format(self.tender_id), {'data': {
'title': 'complaint title',
'description': 'complaint description',
'author': test_tender_data["procuringEntity"],
'relatedLot': self.initial_lots[0]['id'],
'status': 'claim'
}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
complaint = response.json['data']
owner_token = response.json['access']['token']
self.assertEqual(complaint['author']['name'], test_tender_data["procuringEntity"]['name'])
self.assertIn('id', complaint)
self.assertIn(complaint['id'], response.headers['Location'])
tender = self.db.get(self.tender_id)
tender['status'] = 'active.awarded'
tender['awardPeriod'] = {'endDate': '2014-01-01'}
self.db.save(tender)
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"status": "answered"
}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': [u'This field is required.'], u'location': u'body', u'name': u'resolutionType'},
])
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], self.tender_token), {"data": {
"status": "answered",
"resolutionType": "invalid",
"resolution": "spam"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "answered")
self.assertEqual(response.json['data']["resolutionType"], "invalid")
self.assertEqual(response.json['data']["resolution"], "spam")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {
"satisfied": True,
"status": "resolved"
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "resolved")
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, complaint['id'], owner_token), {"data": {"status": "cancelled", "cancellationReason": "reason"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update complaint in current (resolved) status")
response = self.app.get('/tenders/{}'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], 'active.awarded')
self.set_status('unsuccessful')
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add complaint in current (unsuccessful) tender status")
class TenderComplaintDocumentResourceTest(BaseTenderWebTest):
def setUp(self):
super(TenderComplaintDocumentResourceTest, self).setUp()
# Create complaint
response = self.app.post_json('/tenders/{}/complaints'.format(
self.tender_id), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_tender_data["procuringEntity"]}})
complaint = response.json['data']
self.complaint_id = complaint['id']
self.complaint_owner_token = response.json['access']['token']
def test_not_found(self):
response = self.app.post('/tenders/some_id/complaints/some_id/documents', status=404, upload_files=[
('file', 'name.doc', 'content')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
response = self.app.post('/tenders/{}/complaints/some_id/documents'.format(self.tender_id), status=404, upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'complaint_id'}
])
response = self.app.post('/tenders/{}/complaints/{}/documents'.format(self.tender_id, self.complaint_id), status=404, upload_files=[
('invalid_value', 'name.doc', 'content')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'body', u'name': u'file'}
])
response = self.app.get('/tenders/some_id/complaints/some_id/documents', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
response = self.app.get('/tenders/{}/complaints/some_id/documents'.format(self.tender_id), status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'complaint_id'}
])
response = self.app.get('/tenders/some_id/complaints/some_id/documents/some_id', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
response = self.app.get('/tenders/{}/complaints/some_id/documents/some_id'.format(self.tender_id), status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'complaint_id'}
])
response = self.app.get('/tenders/{}/complaints/{}/documents/some_id'.format(self.tender_id, self.complaint_id), status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'document_id'}
])
response = self.app.put('/tenders/some_id/complaints/some_id/documents/some_id', status=404,
upload_files=[('file', 'name.doc', 'content2')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
response = self.app.put('/tenders/{}/complaints/some_id/documents/some_id'.format(self.tender_id), status=404, upload_files=[
('file', 'name.doc', 'content2')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'complaint_id'}
])
response = self.app.put('/tenders/{}/complaints/{}/documents/some_id'.format(
self.tender_id, self.complaint_id), status=404, upload_files=[('file', 'name.doc', 'content2')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location': u'url', u'name': u'document_id'}
])
def test_create_tender_complaint_document(self):
response = self.app.post('/tenders/{}/complaints/{}/documents'.format(
self.tender_id, self.complaint_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (draft) complaint status")
response = self.app.post('/tenders/{}/complaints/{}/documents?acc_token={}'.format(
self.tender_id, self.complaint_id, self.complaint_owner_token), upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
self.assertIn(doc_id, response.headers['Location'])
self.assertEqual('name.doc', response.json["data"]["title"])
key = response.json["data"]["url"].split('?')[-1]
response = self.app.get('/tenders/{}/complaints/{}/documents'.format(self.tender_id, self.complaint_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"][0]["id"])
self.assertEqual('name.doc', response.json["data"][0]["title"])
response = self.app.get('/tenders/{}/complaints/{}/documents?all=true'.format(self.tender_id, self.complaint_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"][0]["id"])
self.assertEqual('name.doc', response.json["data"][0]["title"])
response = self.app.get('/tenders/{}/complaints/{}/documents/{}?download=some_id'.format(
self.tender_id, self.complaint_id, doc_id), status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location': u'url', u'name': u'download'}
])
response = self.app.get('/tenders/{}/complaints/{}/documents/{}?{}'.format(
self.tender_id, self.complaint_id, doc_id, key))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/msword')
self.assertEqual(response.content_length, 7)
self.assertEqual(response.body, 'content')
response = self.app.get('/tenders/{}/complaints/{}/documents/{}'.format(
self.tender_id, self.complaint_id, doc_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
self.assertEqual('name.doc', response.json["data"]["title"])
self.set_status('complete')
response = self.app.post('/tenders/{}/complaints/{}/documents'.format(
self.tender_id, self.complaint_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (complete) tender status")
def test_put_tender_complaint_document(self):
response = self.app.post('/tenders/{}/complaints/{}/documents?acc_token={}'.format(
self.tender_id, self.complaint_id, self.complaint_owner_token), upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
self.assertIn(doc_id, response.headers['Location'])
response = self.app.put('/tenders/{}/complaints/{}/documents/{}'.format(self.tender_id, self.complaint_id, doc_id),
status=404,
upload_files=[('invalid_name', 'name.doc', 'content')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'body', u'name': u'file'}
])
response = self.app.put('/tenders/{}/complaints/{}/documents/{}'.format(
self.tender_id, self.complaint_id, doc_id), upload_files=[('file', 'name.doc', 'content2')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can update document only author")
response = self.app.put('/tenders/{}/complaints/{}/documents/{}?acc_token={}'.format(
self.tender_id, self.complaint_id, doc_id, self.complaint_owner_token), upload_files=[('file', 'name.doc', 'content2')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key = response.json["data"]["url"].split('?')[-1]
response = self.app.get('/tenders/{}/complaints/{}/documents/{}?{}'.format(
self.tender_id, self.complaint_id, doc_id, key))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/msword')
self.assertEqual(response.content_length, 8)
self.assertEqual(response.body, 'content2')
response = self.app.get('/tenders/{}/complaints/{}/documents/{}'.format(
self.tender_id, self.complaint_id, doc_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
self.assertEqual('name.doc', response.json["data"]["title"])
response = self.app.put('/tenders/{}/complaints/{}/documents/{}?acc_token={}'.format(
self.tender_id, self.complaint_id, doc_id, self.complaint_owner_token), 'content3', content_type='application/msword')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key = response.json["data"]["url"].split('?')[-1]
response = self.app.get('/tenders/{}/complaints/{}/documents/{}?{}'.format(
self.tender_id, self.complaint_id, doc_id, key))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/msword')
self.assertEqual(response.content_length, 8)
self.assertEqual(response.body, 'content3')
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, self.complaint_id, self.complaint_owner_token), {"data": {
"status": "claim",
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], "claim")
response = self.app.put('/tenders/{}/complaints/{}/documents/{}?acc_token={}'.format(self.tender_id, self.complaint_id, doc_id, self.complaint_owner_token), 'content', content_type='application/msword', status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update document in current (claim) complaint status")
self.set_status('complete')
response = self.app.put('/tenders/{}/complaints/{}/documents/{}?acc_token={}'.format(
self.tender_id, self.complaint_id, doc_id, self.complaint_owner_token), upload_files=[('file', 'name.doc', 'content3')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update document in current (complete) tender status")
def test_patch_tender_complaint_document(self):
response = self.app.post('/tenders/{}/complaints/{}/documents?acc_token={}'.format(
self.tender_id, self.complaint_id, self.complaint_owner_token), upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
self.assertIn(doc_id, response.headers['Location'])
response = self.app.patch_json('/tenders/{}/complaints/{}/documents/{}'.format(self.tender_id, self.complaint_id, doc_id), {"data": {"description": "document description"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can update document only author")
response = self.app.patch_json('/tenders/{}/complaints/{}/documents/{}?acc_token={}'.format(self.tender_id, self.complaint_id, doc_id, self.complaint_owner_token), {"data": {"description": "document description"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
response = self.app.get('/tenders/{}/complaints/{}/documents/{}'.format(
self.tender_id, self.complaint_id, doc_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
self.assertEqual('document description', response.json["data"]["description"])
response = self.app.patch_json('/tenders/{}/complaints/{}?acc_token={}'.format(self.tender_id, self.complaint_id, self.complaint_owner_token), {"data": {
"status": "claim",
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], "claim")
response = self.app.patch_json('/tenders/{}/complaints/{}/documents/{}?acc_token={}'.format(self.tender_id, self.complaint_id, doc_id, self.complaint_owner_token), {"data": {"description": "document description"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update document in current (claim) complaint status")
self.set_status('complete')
response = self.app.patch_json('/tenders/{}/complaints/{}/documents/{}?acc_token={}'.format(self.tender_id, self.complaint_id, doc_id, self.complaint_owner_token), {"data": {"description": "document description"}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update document in current (complete) tender status")
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TenderComplaintDocumentResourceTest))
suite.addTest(unittest.makeSuite(TenderComplaintResourceTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| |
import datetime
from django.shortcuts import render, redirect
from django.http import HttpResponse, HttpResponseRedirect
from django.views.generic import TemplateView, RedirectView, View
from django.core import serializers
from django.views.decorators.csrf import csrf_exempt
from models import Project, ProjectMember, Mvp, MvpRedaction, Workstream, Ticket
import json
from django.core.urlresolvers import reverse
class RootProjectView(View):
def get(self, request, *args, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if not project.idea:
return redirect("big_idea", slug=project_slug)
elif not (project.validate_customer and project.validate_offering and project.validate_value_prop):
return redirect("validate", slug=project_slug)
elif (not project.has_mvp) or not project.mvp.original_statement:
# Do more ifs once the data model is more complete
return redirect("create_mvp", slug=project_slug)
else:
return redirect("minify_mvp", slug=project_slug)
class CreateProjectView(TemplateView):
template_name = "create_project.html"
def get_context_data(self, **kwargs):
return {
"event_default": "Hack Tennessee 7"
}
def post(self, request, *args, **kwargs):
project = Project.objects.create(name=request.POST.get("projectName", ""))
project.started = project.created
project.ended = project.started + datetime.timedelta(days=14)
event = request.POST.get("event")
if event:
project.event = event
project.save()
owner = ProjectMember.objects.create(project=project,
name=request.POST.get("ownerName", ""),
email=request.POST.get("ownerEmail", ""),
owner=True)
owner.save()
memberNames = request.POST.getlist("memberName")
memberEmails = request.POST.getlist("memberEmail")
for name, email in zip(memberNames, memberEmails):
if name:
member = ProjectMember.objects.create(project=project,
name=name,
email=email,
owner=False)
member.save()
return redirect("big_idea", slug=project.slug)
class BigIdeaView(TemplateView):
template_name = "big_idea.html"
def get_context_data(self, **kwargs):
return {
"project": Project.objects.get(slug=kwargs["slug"])
}
def post(self, request, *args, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
project.idea = request.POST.get("bigIdea", "")
project.save()
return redirect("validate", slug=project_slug)
class ValidateView(TemplateView):
template_name = "validate.html"
def get_context_data(self, **kwargs):
return {
"project": Project.objects.get(slug=kwargs["slug"])
}
def post(self, request, *args, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
project.validate_offering = request.POST.get("offering", "")
project.validate_customer = request.POST.get("customer", "")
project.validate_value_prop = request.POST.get("valueProp", "")
project.save()
return redirect("create_mvp", slug=project_slug)
class CreateMvpView(TemplateView):
template_name = "create_mvp.html"
def get_context_data(self, **kwargs):
project = Project.objects.get(slug=kwargs["slug"])
return {
"project": project,
}
def post(self, request, *args, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if project.has_mvp:
mvp = project.mvp
else:
mvp = Mvp.objects.create(project=project)
mvp.original_statement = request.POST.get("original_statement", "")
mvp.save()
return redirect("minify_mvp", slug=project_slug)
class MinifyMvpView(TemplateView):
template_name = "minify_mvp.html"
def get_context_data(self, **kwargs):
project = Project.objects.get(slug=kwargs["slug"])
if project.has_mvp:
current_selection = [{"line": r.line, "statement_start": r.statement_start, "statement_end": r.statement_end} for r in project.mvp.mvpredaction_set.all()]
else:
current_selection = []
return {
"project": project,
"selectionJson": json.dumps(current_selection)
}
def post(self, request, *args, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if project.has_mvp:
mvp = project.mvp
else:
mvp = Mvp.objects.create(project=project)
for redaction in mvp.mvpredaction_set.all():
redaction.delete()
raw_redactions = json.loads(request.POST.get("redactions", "[]"))
# deduplicate
redactions = [dict(t) for t in set([tuple(d.items()) for d in raw_redactions])]
for add_redaction in redactions:
line = add_redaction["line"]
redaction = MvpRedaction.objects.create(mvp=mvp, line=line, statement_start=add_redaction["statement_start"],statement_end=add_redaction["statement_end"])
redaction.save()
return redirect("breakdown_mvp", slug=project_slug)
class BreakdownMvpView(TemplateView):
template_name = "breakdown_mvp.html"
def get_context_data(self, **kwargs):
project = Project.objects.get(slug=kwargs["slug"])
if project.has_mvp:
current_selection = [{"line": w.line, "statement_start": w.statement_start, "statement_end": w.statement_end} for w in project.mvp.workstream_set.all()]
else:
current_selection = []
return {
"project": project,
"selectionJson": json.dumps(current_selection)
}
def post(self, request, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if project.has_mvp:
mvp = project.mvp
else:
mvp = Mvp.objects.create(project=project)
for workstream in mvp.workstream_set.all():
workstream.delete()
raw_workstreams = json.loads(request.POST.get("workstreams", "[]"))
workstreams = [dict(t) for t in set([tuple(d.items()) for d in raw_workstreams])]
for add_workstream in workstreams:
line = add_workstream["line"]
start = add_workstream["statement_start"]
end = add_workstream["statement_end"]
name = mvp.statement.split("\n")[line][start:end]
workstream = Workstream.objects.create(mvp=mvp, name=name, line=line, statement_start=start, statement_end=end,)
workstream.save()
# add "select tech" workstream
tech_ws = Workstream.objects.create(mvp=mvp,name="Tools & Technology", line=0, statement_start=0, statement_end=0)
tech_ws.save()
return redirect("select_tools", slug=project_slug)
class SelectToolsView(TemplateView):
template_name = "select_tools.html"
def get_context_data(self, **kwargs):
return {
"project": Project.objects.get(slug=kwargs["slug"])
}
def post(self, request, *args, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
tools = request.POST.getlist("tool")
ws = Workstream.objects.get(mvp=project.mvp, name="Tools & Technology")
tools_text = ""
for tool in tools:
if tool:
ticket = Ticket.objects.create(mvp=project.mvp, workstream=ws, content=tool, status='ready')
ticket.save()
tools_text += "\n" + tool if tools_text else tool
if tools_text:
project.tools = tools_text
project.save()
return create_gravity_board(request, project)
def create_gravity_board(request, project):
project.send_created_email(request.build_absolute_uri(reverse("gravity_board", kwargs={"slug": project.slug})))
return redirect("gravity_board", slug=project.slug)
class GravityBoardView(TemplateView):
template_name = "gravity_board.html"
def get_context_data(self, **kwargs):
project = Project.objects.get(slug=kwargs["slug"])
return {
"project": project,
}
class TicketView(View):
def get_context_data(self, **kwargs):
project = Project.objects.get(slug=kwargs["slug"])
return {
"project": project,
}
def get(self, request, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if project.has_mvp:
mvp = project.mvp
else:
mvp = Mvp.objects.create(project=project)
workstreams = mvp.workstream_set.all()
return HttpResponse(json.dumps(self._convert_workstreams_to_json(workstreams)),mimetype='application/json')
def _convert_ticket_to_json(self, t):
t_out = {"id": t.id, "text": t.content}
return t_out
def _convert_workstreams_to_json(self, workstreams):
all = []
for w in workstreams:
w_out = { "name": w.name, "ready":[], "doing":[], "done":[] }
for t in w.ticket_set.all():
t_out = self._convert_ticket_to_json(t)
w_out[t.status].append(t_out)
all.append(w_out)
return all
def post(self, request, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if project.has_mvp:
mvp = project.mvp
else:
mvp = Mvp.objects.create(project=project)
create_ticket = json.loads(request.body)
workstream = mvp.workstream_set.filter(name=create_ticket['workstream'])[0]
ticket = Ticket.objects.create(mvp=mvp,content=create_ticket['content'],status=create_ticket['status'],workstream=workstream)
ticket.save()
return HttpResponse(json.dumps(self._convert_ticket_to_json(ticket)), content_type='application/json')
def patch(self, request, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if project.has_mvp:
mvp = project.mvp
else:
mvp = Mvp.objects.create(project=project)
update_ticket = json.loads(request.body)
workstream = mvp.workstream_set.filter(name=update_ticket['workstream'])[0]
ticket = Ticket.objects.get(pk=update_ticket['id'])
ticket.workstream=workstream
ticket.content=update_ticket['content']
ticket.status=update_ticket['status']
ticket.save()
return HttpResponse("ok")
def delete(self, request, **kwargs):
project_slug = kwargs["slug"]
project = Project.objects.get(slug=project_slug)
if project.has_mvp:
mvp = project.mvp
else:
mvp = Mvp.objects.create(project=project)
delete_ticket = json.loads(request.body)
Ticket.objects.get(pk=delete_ticket['id']).delete()
return HttpResponse("id")
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(TicketView, self).dispatch(*args, **kwargs)
| |
import logging
import errno
import stat
import socket
from plumbum.machines.base import PopenAddons
from plumbum.machines.remote import BaseRemoteMachine
from plumbum.machines.session import ShellSession
from plumbum.lib import _setdoc, six
from plumbum.path.local import LocalPath
from plumbum.path.remote import RemotePath, StatRes
from plumbum.commands.processes import iter_lines
try:
# Sigh... we need to gracefully-import paramiko for Sphinx builds, etc
import paramiko
except ImportError:
class paramiko(object):
def __nonzero__(self):
return False
__bool__ = __nonzero__
def __getattr__(self, name):
raise ImportError("No module named paramiko")
paramiko = paramiko()
logger = logging.getLogger("plumbum.paramiko")
class ParamikoPopen(PopenAddons):
def __init__(self, argv, stdin, stdout, stderr, encoding, stdin_file = None,
stdout_file = None, stderr_file = None):
self.argv = argv
self.channel = stdout.channel
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.encoding = encoding
self.returncode = None
self.pid = None
self.stdin_file = stdin_file
self.stdout_file = stdout_file
self.stderr_file = stderr_file
def poll(self):
if self.returncode is None:
if self.channel.exit_status_ready():
return self.wait()
return self.returncode
def wait(self):
if self.returncode is None:
self.channel.recv_exit_status()
self.returncode = self.channel.exit_status
self.close()
return self.returncode
def close(self):
self.channel.shutdown_read()
self.channel.shutdown_write()
self.channel.close()
def kill(self):
# possible way to obtain pid:
# "(cmd ; echo $?) & echo ?!"
# and then client.exec_command("kill -9 %s" % (pid,))
raise EnvironmentError("Cannot kill remote processes, we don't have their PIDs")
terminate = kill
def send_signal(self, sig):
raise NotImplementedError()
def communicate(self):
stdout = []
stderr = []
infile = self.stdin_file
sources = [("1", stdout, self.stdout, self.stdout_file), ("2", stderr, self.stderr, self.stderr_file)]
i = 0
while sources:
if infile:
try:
line = infile.readline()
except (ValueError, IOError):
line = None
logger.debug("communicate: %r", line)
if not line:
infile.close()
infile = None
self.stdin.close()
else:
self.stdin.write(line)
self.stdin.flush()
i = (i + 1) % len(sources)
name, coll, pipe, outfile = sources[i]
line = pipe.readline()
# logger.debug("%s> %r", name, line)
if not line:
del sources[i]
elif outfile:
outfile.write(line)
outfile.flush()
else:
coll.append(line)
self.wait()
stdout = six.b("").join(six.b(s) for s in stdout)
stderr = six.b("").join(six.b(s) for s in stderr)
return stdout, stderr
def iter_lines(self, timeout=None, **kwargs):
if timeout is not None:
raise NotImplementedError("The 'timeout' parameter is not supported with ParamikoMachine")
return iter_lines(self, _iter_lines=_iter_lines, **kwargs)
__iter__ = iter_lines
class ParamikoMachine(BaseRemoteMachine):
"""
An implementation of :class:`remote machine <plumbum.machines.remote.BaseRemoteMachine>`
over Paramiko (a Python implementation of openSSH2 client/server). Invoking a remote command
translates to invoking it over SSH ::
with ParamikoMachine("yourhostname") as rem:
r_ls = rem["ls"]
# r_ls is the remote `ls`
# executing r_ls() is equivalent to `ssh yourhostname ls`, only without
# spawning a new ssh client
:param host: the host name to connect to (SSH server)
:param user: the user to connect as (if ``None``, the default will be used)
:param port: the server's port (if ``None``, the default will be used)
:param password: the user's password (if a password-based authentication is to be performed)
(if ``None``, key-based authentication will be used)
:param keyfile: the path to the identity file (if ``None``, the default will be used)
:param load_system_host_keys: whether or not to load the system's host keys (from ``/etc/ssh``
and ``~/.ssh``). The default is ``True``, which means Paramiko
behaves much like the ``ssh`` command-line client
:param missing_host_policy: the value passed to the underlying ``set_missing_host_key_policy``
of the client. The default is ``None``, which means
``set_missing_host_key_policy`` is not invoked and paramiko's
default behavior (reject) is employed
:param encoding: the remote machine's encoding (defaults to UTF8)
:param look_for_keys: set to False to disable searching for discoverable
private key files in ``~/.ssh``
:param connect_timeout: timeout for TCP connection
"""
class RemoteCommand(BaseRemoteMachine.RemoteCommand):
def __or__(self, *_):
raise NotImplementedError("Not supported with ParamikoMachine")
def __gt__(self, *_):
raise NotImplementedError("Not supported with ParamikoMachine")
def __rshift__(self, *_):
raise NotImplementedError("Not supported with ParamikoMachine")
def __ge__(self, *_):
raise NotImplementedError("Not supported with ParamikoMachine")
def __lt__(self, *_):
raise NotImplementedError("Not supported with ParamikoMachine")
def __lshift__(self, *_):
raise NotImplementedError("Not supported with ParamikoMachine")
def __init__(self, host, user = None, port = None, password = None, keyfile = None,
load_system_host_keys = True, missing_host_policy = None, encoding = "utf8",
look_for_keys = None, connect_timeout = None, keep_alive = 0):
self.host = host
kwargs = {}
if user:
self._fqhost = "%s@%s" % (user, host)
kwargs['username'] = user
else:
self._fqhost = host
self._client = paramiko.SSHClient()
if load_system_host_keys:
self._client.load_system_host_keys()
if port is not None:
kwargs["port"] = port
if keyfile is not None:
kwargs["key_filename"] = keyfile
if password is not None:
kwargs["password"] = password
if missing_host_policy is not None:
self._client.set_missing_host_key_policy(missing_host_policy)
if look_for_keys is not None:
kwargs["look_for_keys"] = look_for_keys
if connect_timeout is not None:
kwargs["timeout"] = connect_timeout
self._client.connect(host, **kwargs)
self._keep_alive = keep_alive
self._sftp = None
BaseRemoteMachine.__init__(self, encoding, connect_timeout)
def __str__(self):
return "paramiko://%s" % (self._fqhost,)
def close(self):
BaseRemoteMachine.close(self)
self._client.close()
@property
def sftp(self):
"""
Returns an SFTP client on top of the current SSH connection; it can be used to manipulate
files directly, much like an interactive FTP/SFTP session
"""
if not self._sftp:
self._sftp = self._client.open_sftp()
return self._sftp
@_setdoc(BaseRemoteMachine)
def session(self, isatty = False, term = "vt100", width = 80, height = 24, new_session = False):
# new_session is ignored for ParamikoMachine
trans = self._client.get_transport()
trans.set_keepalive(self._keep_alive)
chan = trans.open_session()
if isatty:
chan.get_pty(term, width, height)
chan.set_combine_stderr()
chan.invoke_shell()
stdin = chan.makefile('wb', -1)
stdout = chan.makefile('rb', -1)
stderr = chan.makefile_stderr('rb', -1)
proc = ParamikoPopen(["<shell>"], stdin, stdout, stderr, self.encoding)
return ShellSession(proc, self.encoding, isatty)
@_setdoc(BaseRemoteMachine)
def popen(self, args, stdin = None, stdout = None, stderr = None, new_session = False, cwd = None):
# new_session is ignored for ParamikoMachine
argv = []
envdelta = self.env.getdelta()
argv.extend(["cd", str(cwd or self.cwd), "&&"])
if envdelta:
argv.append("env")
argv.extend("%s=%s" % (k, v) for k, v in envdelta.items())
argv.extend(args.formulate())
cmdline = " ".join(argv)
logger.debug(cmdline)
si, so, se = streams = self._client.exec_command(cmdline, 1)
return ParamikoPopen(argv, si, so, se, self.encoding, stdin_file = stdin,
stdout_file = stdout, stderr_file = stderr)
@_setdoc(BaseRemoteMachine)
def download(self, src, dst):
if isinstance(src, LocalPath):
raise TypeError("src of download cannot be %r" % (src,))
if isinstance(src, RemotePath) and src.remote != self:
raise TypeError("src %r points to a different remote machine" % (src,))
if isinstance(dst, RemotePath):
raise TypeError("dst of download cannot be %r" % (dst,))
return self._download(src if isinstance(src, RemotePath) else self.path(src),
dst if isinstance(dst, LocalPath) else LocalPath(dst))
def _download(self, src, dst):
if src.is_dir():
if not dst.exists():
self.sftp.mkdir(str(dst))
for fn in src:
self._download(fn, dst / fn.name)
elif dst.is_dir():
self.sftp.get(str(src), str(dst / src.name))
else:
self.sftp.get(str(src), str(dst))
@_setdoc(BaseRemoteMachine)
def upload(self, src, dst):
if isinstance(src, RemotePath):
raise TypeError("src of upload cannot be %r" % (src,))
if isinstance(dst, LocalPath):
raise TypeError("dst of upload cannot be %r" % (dst,))
if isinstance(dst, RemotePath) and dst.remote != self:
raise TypeError("dst %r points to a different remote machine" % (dst,))
return self._upload(src if isinstance(src, LocalPath) else LocalPath(src),
dst if isinstance(dst, RemotePath) else self.path(dst))
def _upload(self, src, dst):
if src.is_dir():
if not dst.exists():
self.sftp.mkdir(str(dst))
for fn in src:
self._upload(fn, dst / fn.name)
elif dst.is_dir():
self.sftp.put(str(src), str(dst / src.name))
else:
self.sftp.put(str(src), str(dst))
def connect_sock(self, dport, dhost = "localhost", ipv6 = False):
"""Returns a Paramiko ``Channel``, connected to dhost:dport on the remote machine.
The ``Channel`` behaves like a regular socket; you can ``send`` and ``recv`` on it
and the data will pass encrypted over SSH. Usage::
mach = ParamikoMachine("myhost")
sock = mach.connect_sock(12345)
data = sock.recv(100)
sock.send("foobar")
sock.close()
"""
if ipv6 and dhost == "localhost":
dhost = "::1"
srcaddr = ("::1", 0, 0, 0) if ipv6 else ("127.0.0.1", 0)
trans = self._client.get_transport()
trans.set_keepalive(self._keep_alive)
chan = trans.open_channel('direct-tcpip', (dhost, dport), srcaddr)
return SocketCompatibleChannel(chan)
#
# Path implementation
#
def _path_listdir(self, fn):
return self.sftp.listdir(str(fn))
def _path_read(self, fn):
f = self.sftp.open(str(fn), 'rb')
data = f.read()
f.close()
return data
def _path_write(self, fn, data):
if self.encoding and isinstance(data, six.unicode_type):
data = data.encode(self.encoding)
f = self.sftp.open(str(fn), 'wb')
f.write(data)
f.close()
def _path_stat(self, fn):
try:
st = self.sftp.stat(str(fn))
except IOError as e:
if e.errno == errno.ENOENT:
return None
raise OSError(e.errno)
res = StatRes((st.st_mode, 0, 0, 0, st.st_uid, st.st_gid,
st.st_size, st.st_atime, st.st_mtime, 0))
if stat.S_ISDIR(st.st_mode):
res.text_mode = 'directory'
if stat.S_ISREG(st.st_mode):
res.text_mode = 'regular file'
return res
###################################################################################################
# Make paramiko.Channel adhere to the socket protocol, namely, send and recv should fail
# when the socket has been closed
###################################################################################################
class SocketCompatibleChannel(object):
def __init__(self, chan):
self._chan = chan
def __getattr__(self, name):
return getattr(self._chan, name)
def send(self, s):
if self._chan.closed:
raise socket.error(errno.EBADF, 'Bad file descriptor')
return self._chan.send(s)
def recv(self, count):
if self._chan.closed:
raise socket.error(errno.EBADF, 'Bad file descriptor')
return self._chan.recv(count)
###################################################################################################
# Custom iter_lines for paramiko.Channel
###################################################################################################
def _iter_lines(proc, decode, linesize):
try:
from selectors import DefaultSelector, EVENT_READ
except ImportError:
# Pre Python 3.4 implementation
from select import select
def selector():
while True:
rlist, _, _ = select([proc.stdout.channel], [], [])
for _ in rlist:
yield
else:
# Python 3.4 implementation
def selector():
sel = DefaultSelector()
sel.register(proc.stdout.channel, EVENT_READ)
while True:
for key, mask in sel.select():
yield
for _ in selector():
if proc.stdout.channel.recv_ready():
yield 0, decode(six.b(proc.stdout.readline(linesize)))
if proc.stdout.channel.recv_stderr_ready():
yield 1, decode(six.b(proc.stderr.readline(linesize)))
if proc.poll() is not None:
break
for line in proc.stdout:
yield 0, decode(six.b(line))
for line in proc.stderr:
yield 1, decode(six.b(line))
| |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Version-independent api tests"""
import httplib2
from oslo_serialization import jsonutils
from six.moves import http_client
from glance.tests import functional
class TestApiVersions(functional.FunctionalTest):
def test_version_configurations(self):
"""Test that versioning is handled properly through all channels"""
# v1 and v2 api enabled
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(versions, content)
def test_v2_api_configuration(self):
self.api_server.enable_v1_api = False
self.api_server.enable_v2_api = True
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
]}
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(versions, content)
def test_v1_api_configuration(self):
self.api_server.enable_v1_api = True
self.api_server.enable_v2_api = False
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = {'versions': [
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(versions, content)
class TestApiPaths(functional.FunctionalTest):
def setUp(self):
super(TestApiPaths, self).setUp()
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
self.versions = {'versions': [
{
'id': 'v2.5',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
images = {'images': []}
self.images_json = jsonutils.dumps(images)
def test_get_root_path(self):
"""Assert GET / with `no Accept:` header.
Verify version choices returned.
Bug lp:803260 no Accept header causes a 500 in glance-api
"""
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_images_path(self):
"""Assert GET /images with `no Accept:` header.
Verify version choices returned.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v1_images_path(self):
"""GET /v1/images with `no Accept:` header.
Verify empty images list returned.
"""
path = 'http://%s:%d/v1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
def test_get_root_path_with_unknown_header(self):
"""Assert GET / with Accept: unknown header
Verify version choices returned. Verify message in API log about
unknown accept header.
"""
path = 'http://%s:%d/' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'unknown'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_root_path_with_openstack_header(self):
"""Assert GET / with an Accept: application/vnd.openstack.images-v1
Verify empty image list returned
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.images_json, content.decode())
def test_get_images_path_with_openstack_header(self):
"""Assert GET /images with a
`Accept: application/vnd.openstack.compute-v1` header.
Verify version choices returned. Verify message in API log
about unknown accept header.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.compute-v1'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v10_images_path(self):
"""Assert GET /v1.0/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_v1a_images_path(self):
"""Assert GET /v1.a/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_va1_images_path(self):
"""Assert GET /va.1/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/va.1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_versions_path(self):
"""Assert GET /versions with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_versions_path_with_openstack_header(self):
"""Assert GET /versions with the
`Accept: application/vnd.openstack.images-v1` header.
Verify version choices returned.
"""
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v1_versions_path(self):
"""Assert GET /v1/versions with `no Accept:` header
Verify 404 returned
"""
path = 'http://%s:%d/v1/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.NOT_FOUND, response.status)
def test_get_versions_choices(self):
"""Verify version choices returned"""
path = 'http://%s:%d/v10' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_images_path_with_openstack_v2_header(self):
"""Assert GET /images with a
`Accept: application/vnd.openstack.compute-v2` header.
Verify version choices returned. Verify message in API log
about unknown version in accept header.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v10'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v12_images_path(self):
"""Assert GET /v1.2/images with `no Accept:` header
Verify version choices returned
"""
path = 'http://%s:%d/v1.2/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
| |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import os
import core
import data_feeder
import executor
import framework
import io
# optimizer is same as the parameter of Trainer.__init__. Rename it to opt_module
import optimizer as opt_module
import parallel_executor
from transpiler import distribute_transpiler
__all__ = [
'Trainer',
'BeginEpochEvent',
'EndEpochEvent',
'BeginStepEvent',
'EndStepEvent',
]
class BeginEpochEvent(object):
def __init__(self, epoch_id):
self.epoch = epoch_id
class EndEpochEvent(object):
def __init__(self, epoch_id):
self.epoch = epoch_id
class BeginStepEvent(object):
def __init__(self, epoch_id, step_id):
self.epoch = epoch_id
self.step = step_id
self.fetch_metrics = True
class EndStepEvent(object):
def __init__(self, epoch_id, step_id, metrics):
self.epoch = epoch_id
self.step = step_id
self.metrics = metrics
def check_and_get_place(place):
"""
Check the type of place or get the default place
Args:
place(None|core.CUDAPlace|core.CPUPlace): the place that trainer will be executed on.
Raises:
TypeError if the type mismatched.
Returns:
the original place if it is not None.
if fluid is compiled with CUDA, returns CUDAPlace(0) by default.
Otherwise returns CPUPlace by default.
"""
if place is None:
if core.is_compiled_with_cuda():
return core.CUDAPlace(0)
else:
return core.CPUPlace()
else:
if not isinstance(place, core.CUDAPlace) and not isinstance(
place, core.CPUPlace):
raise TypeError("Place should be either CUDAPlace or CPUPlace")
return place
class Trainer(object):
"""
Args:
train_func(callable): A function which will return loss. The loss must be a scalar.
optimizer(optimizer.Optimizer): The optimizer should be an instance of Optimizer
place: The device place of this trainer.
"""
def __init__(self,
train_func,
optimizer,
param_path=None,
place=None,
parallel=False):
self.__stop = False
self.parallel = parallel
# 1. we need to generate a framework.Program by calling
# program_func. Reference: fluid.program_guard in
# test_word2vec.py
if not isinstance(optimizer, opt_module.Optimizer):
raise TypeError("The optimizer should be an instance of Optimizer")
self.scope = core.Scope()
self.startup_program = framework.Program()
self.train_program = framework.Program()
with framework.program_guard(self.train_program, self.startup_program):
program_func_outs = train_func()
self.train_func_outputs = program_func_outs if isinstance(
program_func_outs, list) else [program_func_outs]
self.test_program = self.train_program.clone()
if not isinstance(optimizer, opt_module.Optimizer):
raise TypeError(
"The optimizer should be an instance of Optimizer")
# The fisrt element of program_func_outs is loss.
loss = self.train_func_outputs[0]
optimize_ops, params_grads = optimizer.minimize(loss)
self.place = check_and_get_place(place)
self._dist_transpile_if_necessary(optimize_ops, params_grads)
# 2. move the default_main_program to self.program and run the
# default_startup program on an empty core.Scope()
# Run startup program
with self._prog_and_scope_guard():
exe = executor.Executor(place)
exe.run(self.startup_program)
if param_path:
# load params from param_path into scope
io.load_persistables(exe, dirname=param_path)
def _transpile_nccl2_dist(self):
# PADDLE_TRAINER_IPS
if "PADDLE_TRAINER_IPS" not in os.environ:
self.nccl_id_var = None
else:
self.trainer_id = int(os.getenv("PADDLE_TRAINER_ID"))
port = os.getenv("PADDLE_PSERVER_PORT")
worker_ips = os.getenv("PADDLE_TRAINER_IPS")
worker_endpoints = []
for ip in worker_ips.split(","):
worker_endpoints.append(':'.join([ip, port]))
self.num_trainers = len(worker_endpoints)
current_endpoint = os.getenv("POD_IP") + ":" + port
worker_endpoints.remove(current_endpoint)
# TODO(wuyi): use self.nccl_id_var, self.num_trainers and self.trainer_id
# in ParallelExecutor to start
# distributed training using NCCL2
self.nccl_id_var = self.startup_program.global_block().create_var(
name="NCCLID", persistable=True, type=core.VarDesc.VarType.RAW)
self.startup_program.global_block().append_op(
type="gen_nccl_id",
inputs={},
outputs={"NCCLID": self.nccl_id_var},
attrs={
"endpoint": current_endpoint,
"endpoint_list": worker_endpoints,
"trainer_id": self.trainer_id
})
def _dist_transpile_if_necessary(self, optimize_ops, params_grads):
self._transpile_nccl2_dist()
if self.nccl_id_var != None:
return
if "PADDLE_TRAINING_ROLE" not in os.environ:
return
# the port of all pservers, needed by both trainer and pserver
port = os.getenv("PADDLE_PSERVER_PORT", "6174")
# comma separated ips of all pservers, needed by trainer and
# pserver
pserver_ips = os.getenv("PADDLE_PSERVER_IPS", "")
eplist = []
for ip in pserver_ips.split(","):
eplist.append(':'.join([ip, port]))
pserver_endpoints = ",".join(eplist)
# total number of workers/trainers in the job, needed by
# trainer and pserver
trainers = int(os.getenv("PADDLE_TRAINERS"))
# the IP of the local machine, needed by pserver only
current_endpoint = os.getenv("PADDLE_CURRENT_IP", "") + ":" + port
# the unique trainer id, starting from 0, needed by trainer
# only
trainer_id = int(os.getenv("PADDLE_TRAINER_ID", "0"))
# the role, should be either PSERVER or TRAINER
training_role = os.getenv("PADDLE_TRAINING_ROLE")
with self._prog_and_scope_guard():
t = distribute_transpiler.DistributeTranspiler()
t.transpile(
trainer_id, pservers=pserver_endpoints, trainers=trainers)
if training_role == "PSERVER":
self.train_program = t.get_pserver_program(current_endpoint)
self.startup_program = t.get_startup_program(current_endpoint,
self.train_program)
elif training_role == "TRAINER":
self.train_program = t.get_trainer_program()
else:
raise ValueError(
'TRAINING_ROLE environment variable must be either TRAINER or PSERVER'
)
def stop(self):
"""
stop training
"""
self.__stop = True
def train(self, num_epochs, event_handler, reader=None, feed_order=None):
"""
Train the model.
Args:
num_epochs: The number of epoch. An epoch will process all data in reader
event_handler: The event handler. A function with type (ev:Event)->void
reader:
feed_order: Feeding order of reader. None will following the defining
order in program
Returns:
"""
training_role = os.getenv("PADDLE_TRAINING_ROLE", "")
if training_role == "PSERVER":
with self._prog_and_scope_guard():
exe = executor.Executor(self.place)
exe.run()
return
if self.parallel:
self._train_by_parallel_executor(num_epochs, event_handler, reader,
feed_order)
else:
self._train_by_executor(num_epochs, event_handler, reader,
feed_order)
def test(self, reader, feed_order):
"""
Test the model on given test data
Args:
reader: The reader that yields test data.
feed_order: Feeding order of reader. None will following the defining
order in program
"""
return self._test_by_executor(reader, feed_order,
self.train_func_outputs)
def save_params(self, param_path):
# reference: save_persistables in io.py
with self._prog_and_scope_guard():
exe = executor.Executor(self.place)
io.save_persistables(exe, dirname=param_path)
@contextlib.contextmanager
def _prog_and_scope_guard(self):
with framework.program_guard(
main_program=self.train_program,
startup_program=self.startup_program):
with executor.scope_guard(self.scope):
yield
def _train_by_executor(self, num_epochs, event_handler, reader, feed_order):
"""
Train by Executor and single device.
Args:
num_epochs:
event_handler:
reader:
feed_order:
Returns:
"""
with self._prog_and_scope_guard():
feed_var_list = build_feed_var_list(self.train_program, feed_order)
feeder = data_feeder.DataFeeder(
feed_list=feed_var_list, place=self.place)
exe = executor.Executor(self.place)
reader = feeder.decorate_reader(reader, multi_devices=False)
self._train_by_any_executor(event_handler, exe, num_epochs, reader)
def _train_by_any_executor(self, event_handler, exe, num_epochs, reader):
for epoch_id in range(num_epochs):
event_handler(BeginEpochEvent(epoch_id))
for step_id, data in enumerate(reader()):
if self.__stop:
return
begin_event = BeginStepEvent(epoch_id, step_id)
event_handler(begin_event)
if begin_event.fetch_metrics:
metrics = exe.run(feed=data,
fetch_list=[
var.name
for var in self.train_func_outputs
])
else:
metrics = exe.run(feed=data, fetch_list=[])
event_handler(EndStepEvent(epoch_id, step_id, metrics))
event_handler(EndEpochEvent(epoch_id))
def _test_by_executor(self, reader, feed_order, fetch_list):
with executor.scope_guard(self.scope):
feed_var_list = build_feed_var_list(self.test_program, feed_order)
feeder = data_feeder.DataFeeder(
feed_list=feed_var_list, place=self.place)
exe = executor.Executor(self.place)
accumulated = len(fetch_list) * [0]
count = 0
for data in reader():
outs = exe.run(program=self.test_program,
feed=feeder.feed(data),
fetch_list=fetch_list)
accumulated = [x[0] + x[1][0] for x in zip(accumulated, outs)]
count += 1
return [x / count for x in accumulated]
def _train_by_parallel_executor(self, num_epochs, event_handler, reader,
feed_order):
with self._prog_and_scope_guard():
pe = self._get_or_create_parallel_executor()
feed_var_list = build_feed_var_list(self.train_program, feed_order)
feeder = data_feeder.DataFeeder(
feed_list=feed_var_list, place=self.place)
reader = feeder.decorate_reader(reader, multi_devices=True)
self._train_by_any_executor(event_handler, pe, num_epochs, reader)
def _get_parallel_executor(self):
return getattr(self, 'parallel_executor', None)
def _get_or_create_parallel_executor(self):
if self._get_parallel_executor() is None:
self.parallel_executor = parallel_executor.ParallelExecutor(
use_cuda=isinstance(self.place, core.CUDAPlace),
loss_name=self.train_func_outputs[0].name)
return self._get_parallel_executor()
def build_feed_var_list(program, feed_order):
if not isinstance(program, framework.Program):
raise TypeError("The 'program' should be an object of Program")
if isinstance(feed_order, list):
feed_var_list = [
program.global_block().var(var_name) for var_name in feed_order
]
else:
if not isinstance(feed_order, dict):
raise TypeError(
"The 'feed_order' should be either None, list or dict.")
if not sorted(feed_order.values()) == range(len(feed_order)):
raise ValueError(
"The values of 'feed_order' should be a permutation of [0, len(feed_order))"
)
sorted_pair_list = sorted(feed_order.items(), key=lambda item: item[1])
feed_var_list = [
program.global_block().var(pair[0]) for pair in sorted_pair_list
]
return feed_var_list
| |
from itertools import product
import numpy as np
from scipy.sparse import (bsr_matrix, coo_matrix, csc_matrix, csr_matrix,
dok_matrix, lil_matrix)
from sklearn import metrics
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import assert_greater
from sklearn.utils.validation import check_random_state
from sklearn.metrics.pairwise import pairwise_distances
from sklearn import neighbors, datasets
from sklearn.exceptions import DataConversionWarning
rng = np.random.RandomState(0)
# load and shuffle iris dataset
iris = datasets.load_iris()
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
# load and shuffle digits
digits = datasets.load_digits()
perm = rng.permutation(digits.target.size)
digits.data = digits.data[perm]
digits.target = digits.target[perm]
SPARSE_TYPES = (bsr_matrix, coo_matrix, csc_matrix, csr_matrix, dok_matrix,
lil_matrix)
SPARSE_OR_DENSE = SPARSE_TYPES + (np.asarray,)
ALGORITHMS = ('ball_tree', 'brute', 'kd_tree', 'auto')
P = (1, 2, 3, 4, np.inf)
# Filter deprecation warnings.
neighbors.kneighbors_graph = ignore_warnings(neighbors.kneighbors_graph)
neighbors.radius_neighbors_graph = ignore_warnings(
neighbors.radius_neighbors_graph)
def _weight_func(dist):
""" Weight function to replace lambda d: d ** -2.
The lambda function is not valid because:
if d==0 then 0^-2 is not valid. """
# Dist could be multidimensional, flatten it so all values
# can be looped
with np.errstate(divide='ignore'):
retval = 1. / dist
return retval ** 2
def test_unsupervised_kneighbors(n_samples=20, n_features=5,
n_query_pts=2, n_neighbors=5):
# Test unsupervised neighbors methods
X = rng.rand(n_samples, n_features)
test = rng.rand(n_query_pts, n_features)
for p in P:
results_nodist = []
results = []
for algorithm in ALGORITHMS:
neigh = neighbors.NearestNeighbors(n_neighbors=n_neighbors,
algorithm=algorithm,
p=p)
neigh.fit(X)
results_nodist.append(neigh.kneighbors(test,
return_distance=False))
results.append(neigh.kneighbors(test, return_distance=True))
for i in range(len(results) - 1):
assert_array_almost_equal(results_nodist[i], results[i][1])
assert_array_almost_equal(results[i][0], results[i + 1][0])
assert_array_almost_equal(results[i][1], results[i + 1][1])
def test_unsupervised_inputs():
# test the types of valid input into NearestNeighbors
X = rng.random_sample((10, 3))
nbrs_fid = neighbors.NearestNeighbors(n_neighbors=1)
nbrs_fid.fit(X)
dist1, ind1 = nbrs_fid.kneighbors(X)
nbrs = neighbors.NearestNeighbors(n_neighbors=1)
for input in (nbrs_fid, neighbors.BallTree(X), neighbors.KDTree(X)):
nbrs.fit(input)
dist2, ind2 = nbrs.kneighbors(X)
assert_array_almost_equal(dist1, dist2)
assert_array_almost_equal(ind1, ind2)
def test_precomputed(random_state=42):
"""Tests unsupervised NearestNeighbors with a distance matrix."""
# Note: smaller samples may result in spurious test success
rng = np.random.RandomState(random_state)
X = rng.random_sample((10, 4))
Y = rng.random_sample((3, 4))
DXX = metrics.pairwise_distances(X, metric='euclidean')
DYX = metrics.pairwise_distances(Y, X, metric='euclidean')
for method in ['kneighbors']:
# TODO: also test radius_neighbors, but requires different assertion
# As a feature matrix (n_samples by n_features)
nbrs_X = neighbors.NearestNeighbors(n_neighbors=3)
nbrs_X.fit(X)
dist_X, ind_X = getattr(nbrs_X, method)(Y)
# As a dense distance matrix (n_samples by n_samples)
nbrs_D = neighbors.NearestNeighbors(n_neighbors=3, algorithm='brute',
metric='precomputed')
nbrs_D.fit(DXX)
dist_D, ind_D = getattr(nbrs_D, method)(DYX)
assert_array_almost_equal(dist_X, dist_D)
assert_array_almost_equal(ind_X, ind_D)
# Check auto works too
nbrs_D = neighbors.NearestNeighbors(n_neighbors=3, algorithm='auto',
metric='precomputed')
nbrs_D.fit(DXX)
dist_D, ind_D = getattr(nbrs_D, method)(DYX)
assert_array_almost_equal(dist_X, dist_D)
assert_array_almost_equal(ind_X, ind_D)
# Check X=None in prediction
dist_X, ind_X = getattr(nbrs_X, method)(None)
dist_D, ind_D = getattr(nbrs_D, method)(None)
assert_array_almost_equal(dist_X, dist_D)
assert_array_almost_equal(ind_X, ind_D)
# Must raise a ValueError if the matrix is not of correct shape
assert_raises(ValueError, getattr(nbrs_D, method), X)
target = np.arange(X.shape[0])
for Est in (neighbors.KNeighborsClassifier,
neighbors.RadiusNeighborsClassifier,
neighbors.KNeighborsRegressor,
neighbors.RadiusNeighborsRegressor):
print(Est)
est = Est(metric='euclidean')
est.radius = est.n_neighbors = 1
pred_X = est.fit(X, target).predict(Y)
est.metric = 'precomputed'
pred_D = est.fit(DXX, target).predict(DYX)
assert_array_almost_equal(pred_X, pred_D)
def test_precomputed_cross_validation():
# Ensure array is split correctly
rng = np.random.RandomState(0)
X = rng.rand(20, 2)
D = pairwise_distances(X, metric='euclidean')
y = rng.randint(3, size=20)
for Est in (neighbors.KNeighborsClassifier,
neighbors.RadiusNeighborsClassifier,
neighbors.KNeighborsRegressor,
neighbors.RadiusNeighborsRegressor):
metric_score = cross_val_score(Est(), X, y)
precomp_score = cross_val_score(Est(metric='precomputed'), D, y)
assert_array_equal(metric_score, precomp_score)
def test_unsupervised_radius_neighbors(n_samples=20, n_features=5,
n_query_pts=2, radius=0.5,
random_state=0):
# Test unsupervised radius-based query
rng = np.random.RandomState(random_state)
X = rng.rand(n_samples, n_features)
test = rng.rand(n_query_pts, n_features)
for p in P:
results = []
for algorithm in ALGORITHMS:
neigh = neighbors.NearestNeighbors(radius=radius,
algorithm=algorithm,
p=p)
neigh.fit(X)
ind1 = neigh.radius_neighbors(test, return_distance=False)
# sort the results: this is not done automatically for
# radius searches
dist, ind = neigh.radius_neighbors(test, return_distance=True)
for (d, i, i1) in zip(dist, ind, ind1):
j = d.argsort()
d[:] = d[j]
i[:] = i[j]
i1[:] = i1[j]
results.append((dist, ind))
assert_array_almost_equal(np.concatenate(list(ind)),
np.concatenate(list(ind1)))
for i in range(len(results) - 1):
assert_array_almost_equal(np.concatenate(list(results[i][0])),
np.concatenate(list(results[i + 1][0]))),
assert_array_almost_equal(np.concatenate(list(results[i][1])),
np.concatenate(list(results[i + 1][1])))
def test_kneighbors_classifier(n_samples=40,
n_features=5,
n_test_pts=10,
n_neighbors=5,
random_state=0):
# Test k-neighbors classification
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = ((X ** 2).sum(axis=1) < .5).astype(np.int)
y_str = y.astype(str)
weight_func = _weight_func
for algorithm in ALGORITHMS:
for weights in ['uniform', 'distance', weight_func]:
knn = neighbors.KNeighborsClassifier(n_neighbors=n_neighbors,
weights=weights,
algorithm=algorithm)
knn.fit(X, y)
epsilon = 1e-5 * (2 * rng.rand(1, n_features) - 1)
y_pred = knn.predict(X[:n_test_pts] + epsilon)
assert_array_equal(y_pred, y[:n_test_pts])
# Test prediction with y_str
knn.fit(X, y_str)
y_pred = knn.predict(X[:n_test_pts] + epsilon)
assert_array_equal(y_pred, y_str[:n_test_pts])
def test_kneighbors_classifier_float_labels(n_samples=40, n_features=5,
n_test_pts=10, n_neighbors=5,
random_state=0):
# Test k-neighbors classification
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = ((X ** 2).sum(axis=1) < .5).astype(np.int)
knn = neighbors.KNeighborsClassifier(n_neighbors=n_neighbors)
knn.fit(X, y.astype(np.float))
epsilon = 1e-5 * (2 * rng.rand(1, n_features) - 1)
y_pred = knn.predict(X[:n_test_pts] + epsilon)
assert_array_equal(y_pred, y[:n_test_pts])
def test_kneighbors_classifier_predict_proba():
# Test KNeighborsClassifier.predict_proba() method
X = np.array([[0, 2, 0],
[0, 2, 1],
[2, 0, 0],
[2, 2, 0],
[0, 0, 2],
[0, 0, 1]])
y = np.array([4, 4, 5, 5, 1, 1])
cls = neighbors.KNeighborsClassifier(n_neighbors=3, p=1) # cityblock dist
cls.fit(X, y)
y_prob = cls.predict_proba(X)
real_prob = np.array([[0, 2. / 3, 1. / 3],
[1. / 3, 2. / 3, 0],
[1. / 3, 0, 2. / 3],
[0, 1. / 3, 2. / 3],
[2. / 3, 1. / 3, 0],
[2. / 3, 1. / 3, 0]])
assert_array_equal(real_prob, y_prob)
# Check that it also works with non integer labels
cls.fit(X, y.astype(str))
y_prob = cls.predict_proba(X)
assert_array_equal(real_prob, y_prob)
# Check that it works with weights='distance'
cls = neighbors.KNeighborsClassifier(
n_neighbors=2, p=1, weights='distance')
cls.fit(X, y)
y_prob = cls.predict_proba(np.array([[0, 2, 0], [2, 2, 2]]))
real_prob = np.array([[0, 1, 0], [0, 0.4, 0.6]])
assert_array_almost_equal(real_prob, y_prob)
def test_radius_neighbors_classifier(n_samples=40,
n_features=5,
n_test_pts=10,
radius=0.5,
random_state=0):
# Test radius-based classification
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = ((X ** 2).sum(axis=1) < .5).astype(np.int)
y_str = y.astype(str)
weight_func = _weight_func
for algorithm in ALGORITHMS:
for weights in ['uniform', 'distance', weight_func]:
neigh = neighbors.RadiusNeighborsClassifier(radius=radius,
weights=weights,
algorithm=algorithm)
neigh.fit(X, y)
epsilon = 1e-5 * (2 * rng.rand(1, n_features) - 1)
y_pred = neigh.predict(X[:n_test_pts] + epsilon)
assert_array_equal(y_pred, y[:n_test_pts])
neigh.fit(X, y_str)
y_pred = neigh.predict(X[:n_test_pts] + epsilon)
assert_array_equal(y_pred, y_str[:n_test_pts])
def test_radius_neighbors_classifier_when_no_neighbors():
# Test radius-based classifier when no neighbors found.
# In this case it should rise an informative exception
X = np.array([[1.0, 1.0], [2.0, 2.0]])
y = np.array([1, 2])
radius = 0.1
z1 = np.array([[1.01, 1.01], [2.01, 2.01]]) # no outliers
z2 = np.array([[1.01, 1.01], [1.4, 1.4]]) # one outlier
weight_func = _weight_func
for outlier_label in [0, -1, None]:
for algorithm in ALGORITHMS:
for weights in ['uniform', 'distance', weight_func]:
rnc = neighbors.RadiusNeighborsClassifier
clf = rnc(radius=radius, weights=weights, algorithm=algorithm,
outlier_label=outlier_label)
clf.fit(X, y)
assert_array_equal(np.array([1, 2]),
clf.predict(z1))
if outlier_label is None:
assert_raises(ValueError, clf.predict, z2)
elif False:
assert_array_equal(np.array([1, outlier_label]),
clf.predict(z2))
def test_radius_neighbors_classifier_outlier_labeling():
# Test radius-based classifier when no neighbors found and outliers
# are labeled.
X = np.array([[1.0, 1.0], [2.0, 2.0]])
y = np.array([1, 2])
radius = 0.1
z1 = np.array([[1.01, 1.01], [2.01, 2.01]]) # no outliers
z2 = np.array([[1.01, 1.01], [1.4, 1.4]]) # one outlier
correct_labels1 = np.array([1, 2])
correct_labels2 = np.array([1, -1])
weight_func = _weight_func
for algorithm in ALGORITHMS:
for weights in ['uniform', 'distance', weight_func]:
clf = neighbors.RadiusNeighborsClassifier(radius=radius,
weights=weights,
algorithm=algorithm,
outlier_label=-1)
clf.fit(X, y)
assert_array_equal(correct_labels1, clf.predict(z1))
assert_array_equal(correct_labels2, clf.predict(z2))
def test_radius_neighbors_classifier_zero_distance():
# Test radius-based classifier, when distance to a sample is zero.
X = np.array([[1.0, 1.0], [2.0, 2.0]])
y = np.array([1, 2])
radius = 0.1
z1 = np.array([[1.01, 1.01], [2.0, 2.0]])
correct_labels1 = np.array([1, 2])
weight_func = _weight_func
for algorithm in ALGORITHMS:
for weights in ['uniform', 'distance', weight_func]:
clf = neighbors.RadiusNeighborsClassifier(radius=radius,
weights=weights,
algorithm=algorithm)
clf.fit(X, y)
assert_array_equal(correct_labels1, clf.predict(z1))
def test_neighbors_regressors_zero_distance():
# Test radius-based regressor, when distance to a sample is zero.
X = np.array([[1.0, 1.0], [1.0, 1.0], [2.0, 2.0], [2.5, 2.5]])
y = np.array([1.0, 1.5, 2.0, 0.0])
radius = 0.2
z = np.array([[1.1, 1.1], [2.0, 2.0]])
rnn_correct_labels = np.array([1.25, 2.0])
knn_correct_unif = np.array([1.25, 1.0])
knn_correct_dist = np.array([1.25, 2.0])
for algorithm in ALGORITHMS:
# we don't test for weights=_weight_func since user will be expected
# to handle zero distances themselves in the function.
for weights in ['uniform', 'distance']:
rnn = neighbors.RadiusNeighborsRegressor(radius=radius,
weights=weights,
algorithm=algorithm)
rnn.fit(X, y)
assert_array_almost_equal(rnn_correct_labels, rnn.predict(z))
for weights, corr_labels in zip(['uniform', 'distance'],
[knn_correct_unif, knn_correct_dist]):
knn = neighbors.KNeighborsRegressor(n_neighbors=2,
weights=weights,
algorithm=algorithm)
knn.fit(X, y)
assert_array_almost_equal(corr_labels, knn.predict(z))
def test_radius_neighbors_boundary_handling():
"""Test whether points lying on boundary are handled consistently
Also ensures that even with only one query point, an object array
is returned rather than a 2d array.
"""
X = np.array([[1.5], [3.0], [3.01]])
radius = 3.0
for algorithm in ALGORITHMS:
nbrs = neighbors.NearestNeighbors(radius=radius,
algorithm=algorithm).fit(X)
results = nbrs.radius_neighbors([[0.0]], return_distance=False)
assert_equal(results.shape, (1,))
assert_equal(results.dtype, object)
assert_array_equal(results[0], [0, 1])
def test_RadiusNeighborsClassifier_multioutput():
# Test k-NN classifier on multioutput data
rng = check_random_state(0)
n_features = 2
n_samples = 40
n_output = 3
X = rng.rand(n_samples, n_features)
y = rng.randint(0, 3, (n_samples, n_output))
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
weights = [None, 'uniform', 'distance', _weight_func]
for algorithm, weights in product(ALGORITHMS, weights):
# Stack single output prediction
y_pred_so = []
for o in range(n_output):
rnn = neighbors.RadiusNeighborsClassifier(weights=weights,
algorithm=algorithm)
rnn.fit(X_train, y_train[:, o])
y_pred_so.append(rnn.predict(X_test))
y_pred_so = np.vstack(y_pred_so).T
assert_equal(y_pred_so.shape, y_test.shape)
# Multioutput prediction
rnn_mo = neighbors.RadiusNeighborsClassifier(weights=weights,
algorithm=algorithm)
rnn_mo.fit(X_train, y_train)
y_pred_mo = rnn_mo.predict(X_test)
assert_equal(y_pred_mo.shape, y_test.shape)
assert_array_almost_equal(y_pred_mo, y_pred_so)
def test_kneighbors_classifier_sparse(n_samples=40,
n_features=5,
n_test_pts=10,
n_neighbors=5,
random_state=0):
# Test k-NN classifier on sparse matrices
# Like the above, but with various types of sparse matrices
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
X *= X > .2
y = ((X ** 2).sum(axis=1) < .5).astype(np.int)
for sparsemat in SPARSE_TYPES:
knn = neighbors.KNeighborsClassifier(n_neighbors=n_neighbors,
algorithm='auto')
knn.fit(sparsemat(X), y)
epsilon = 1e-5 * (2 * rng.rand(1, n_features) - 1)
for sparsev in SPARSE_TYPES + (np.asarray,):
X_eps = sparsev(X[:n_test_pts] + epsilon)
y_pred = knn.predict(X_eps)
assert_array_equal(y_pred, y[:n_test_pts])
def test_KNeighborsClassifier_multioutput():
# Test k-NN classifier on multioutput data
rng = check_random_state(0)
n_features = 5
n_samples = 50
n_output = 3
X = rng.rand(n_samples, n_features)
y = rng.randint(0, 3, (n_samples, n_output))
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
weights = [None, 'uniform', 'distance', _weight_func]
for algorithm, weights in product(ALGORITHMS, weights):
# Stack single output prediction
y_pred_so = []
y_pred_proba_so = []
for o in range(n_output):
knn = neighbors.KNeighborsClassifier(weights=weights,
algorithm=algorithm)
knn.fit(X_train, y_train[:, o])
y_pred_so.append(knn.predict(X_test))
y_pred_proba_so.append(knn.predict_proba(X_test))
y_pred_so = np.vstack(y_pred_so).T
assert_equal(y_pred_so.shape, y_test.shape)
assert_equal(len(y_pred_proba_so), n_output)
# Multioutput prediction
knn_mo = neighbors.KNeighborsClassifier(weights=weights,
algorithm=algorithm)
knn_mo.fit(X_train, y_train)
y_pred_mo = knn_mo.predict(X_test)
assert_equal(y_pred_mo.shape, y_test.shape)
assert_array_almost_equal(y_pred_mo, y_pred_so)
# Check proba
y_pred_proba_mo = knn_mo.predict_proba(X_test)
assert_equal(len(y_pred_proba_mo), n_output)
for proba_mo, proba_so in zip(y_pred_proba_mo, y_pred_proba_so):
assert_array_almost_equal(proba_mo, proba_so)
def test_kneighbors_regressor(n_samples=40,
n_features=5,
n_test_pts=10,
n_neighbors=3,
random_state=0):
# Test k-neighbors regression
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = np.sqrt((X ** 2).sum(1))
y /= y.max()
y_target = y[:n_test_pts]
weight_func = _weight_func
for algorithm in ALGORITHMS:
for weights in ['uniform', 'distance', weight_func]:
knn = neighbors.KNeighborsRegressor(n_neighbors=n_neighbors,
weights=weights,
algorithm=algorithm)
knn.fit(X, y)
epsilon = 1E-5 * (2 * rng.rand(1, n_features) - 1)
y_pred = knn.predict(X[:n_test_pts] + epsilon)
assert_true(np.all(abs(y_pred - y_target) < 0.3))
def test_KNeighborsRegressor_multioutput_uniform_weight():
# Test k-neighbors in multi-output regression with uniform weight
rng = check_random_state(0)
n_features = 5
n_samples = 40
n_output = 4
X = rng.rand(n_samples, n_features)
y = rng.rand(n_samples, n_output)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for algorithm, weights in product(ALGORITHMS, [None, 'uniform']):
knn = neighbors.KNeighborsRegressor(weights=weights,
algorithm=algorithm)
knn.fit(X_train, y_train)
neigh_idx = knn.kneighbors(X_test, return_distance=False)
y_pred_idx = np.array([np.mean(y_train[idx], axis=0)
for idx in neigh_idx])
y_pred = knn.predict(X_test)
assert_equal(y_pred.shape, y_test.shape)
assert_equal(y_pred_idx.shape, y_test.shape)
assert_array_almost_equal(y_pred, y_pred_idx)
def test_kneighbors_regressor_multioutput(n_samples=40,
n_features=5,
n_test_pts=10,
n_neighbors=3,
random_state=0):
# Test k-neighbors in multi-output regression
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = np.sqrt((X ** 2).sum(1))
y /= y.max()
y = np.vstack([y, y]).T
y_target = y[:n_test_pts]
weights = ['uniform', 'distance', _weight_func]
for algorithm, weights in product(ALGORITHMS, weights):
knn = neighbors.KNeighborsRegressor(n_neighbors=n_neighbors,
weights=weights,
algorithm=algorithm)
knn.fit(X, y)
epsilon = 1E-5 * (2 * rng.rand(1, n_features) - 1)
y_pred = knn.predict(X[:n_test_pts] + epsilon)
assert_equal(y_pred.shape, y_target.shape)
assert_true(np.all(np.abs(y_pred - y_target) < 0.3))
def test_radius_neighbors_regressor(n_samples=40,
n_features=3,
n_test_pts=10,
radius=0.5,
random_state=0):
# Test radius-based neighbors regression
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = np.sqrt((X ** 2).sum(1))
y /= y.max()
y_target = y[:n_test_pts]
weight_func = _weight_func
for algorithm in ALGORITHMS:
for weights in ['uniform', 'distance', weight_func]:
neigh = neighbors.RadiusNeighborsRegressor(radius=radius,
weights=weights,
algorithm=algorithm)
neigh.fit(X, y)
epsilon = 1E-5 * (2 * rng.rand(1, n_features) - 1)
y_pred = neigh.predict(X[:n_test_pts] + epsilon)
assert_true(np.all(abs(y_pred - y_target) < radius / 2))
def test_RadiusNeighborsRegressor_multioutput_with_uniform_weight():
# Test radius neighbors in multi-output regression (uniform weight)
rng = check_random_state(0)
n_features = 5
n_samples = 40
n_output = 4
X = rng.rand(n_samples, n_features)
y = rng.rand(n_samples, n_output)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for algorithm, weights in product(ALGORITHMS, [None, 'uniform']):
rnn = neighbors. RadiusNeighborsRegressor(weights=weights,
algorithm=algorithm)
rnn.fit(X_train, y_train)
neigh_idx = rnn.radius_neighbors(X_test, return_distance=False)
y_pred_idx = np.array([np.mean(y_train[idx], axis=0)
for idx in neigh_idx])
y_pred_idx = np.array(y_pred_idx)
y_pred = rnn.predict(X_test)
assert_equal(y_pred_idx.shape, y_test.shape)
assert_equal(y_pred.shape, y_test.shape)
assert_array_almost_equal(y_pred, y_pred_idx)
def test_RadiusNeighborsRegressor_multioutput(n_samples=40,
n_features=5,
n_test_pts=10,
n_neighbors=3,
random_state=0):
# Test k-neighbors in multi-output regression with various weight
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = np.sqrt((X ** 2).sum(1))
y /= y.max()
y = np.vstack([y, y]).T
y_target = y[:n_test_pts]
weights = ['uniform', 'distance', _weight_func]
for algorithm, weights in product(ALGORITHMS, weights):
rnn = neighbors.RadiusNeighborsRegressor(n_neighbors=n_neighbors,
weights=weights,
algorithm=algorithm)
rnn.fit(X, y)
epsilon = 1E-5 * (2 * rng.rand(1, n_features) - 1)
y_pred = rnn.predict(X[:n_test_pts] + epsilon)
assert_equal(y_pred.shape, y_target.shape)
assert_true(np.all(np.abs(y_pred - y_target) < 0.3))
def test_kneighbors_regressor_sparse(n_samples=40,
n_features=5,
n_test_pts=10,
n_neighbors=5,
random_state=0):
# Test radius-based regression on sparse matrices
# Like the above, but with various types of sparse matrices
rng = np.random.RandomState(random_state)
X = 2 * rng.rand(n_samples, n_features) - 1
y = ((X ** 2).sum(axis=1) < .25).astype(np.int)
for sparsemat in SPARSE_TYPES:
knn = neighbors.KNeighborsRegressor(n_neighbors=n_neighbors,
algorithm='auto')
knn.fit(sparsemat(X), y)
for sparsev in SPARSE_OR_DENSE:
X2 = sparsev(X)
assert_true(np.mean(knn.predict(X2).round() == y) > 0.95)
def test_neighbors_iris():
# Sanity checks on the iris dataset
# Puts three points of each label in the plane and performs a
# nearest neighbor query on points near the decision boundary.
for algorithm in ALGORITHMS:
clf = neighbors.KNeighborsClassifier(n_neighbors=1,
algorithm=algorithm)
clf.fit(iris.data, iris.target)
assert_array_equal(clf.predict(iris.data), iris.target)
clf.set_params(n_neighbors=9, algorithm=algorithm)
clf.fit(iris.data, iris.target)
assert_true(np.mean(clf.predict(iris.data) == iris.target) > 0.95)
rgs = neighbors.KNeighborsRegressor(n_neighbors=5, algorithm=algorithm)
rgs.fit(iris.data, iris.target)
assert_greater(np.mean(rgs.predict(iris.data).round() == iris.target),
0.95)
def test_neighbors_digits():
# Sanity check on the digits dataset
# the 'brute' algorithm has been observed to fail if the input
# dtype is uint8 due to overflow in distance calculations.
X = digits.data.astype('uint8')
Y = digits.target
(n_samples, n_features) = X.shape
train_test_boundary = int(n_samples * 0.8)
train = np.arange(0, train_test_boundary)
test = np.arange(train_test_boundary, n_samples)
(X_train, Y_train, X_test, Y_test) = X[train], Y[train], X[test], Y[test]
clf = neighbors.KNeighborsClassifier(n_neighbors=1, algorithm='brute')
score_uint8 = clf.fit(X_train, Y_train).score(X_test, Y_test)
score_float = clf.fit(X_train.astype(float), Y_train).score(
X_test.astype(float), Y_test)
assert_equal(score_uint8, score_float)
def test_kneighbors_graph():
# Test kneighbors_graph to build the k-Nearest Neighbor graph.
X = np.array([[0, 1], [1.01, 1.], [2, 0]])
# n_neighbors = 1
A = neighbors.kneighbors_graph(X, 1, mode='connectivity',
include_self=True)
assert_array_equal(A.toarray(), np.eye(A.shape[0]))
A = neighbors.kneighbors_graph(X, 1, mode='distance')
assert_array_almost_equal(
A.toarray(),
[[0.00, 1.01, 0.],
[1.01, 0., 0.],
[0.00, 1.40716026, 0.]])
# n_neighbors = 2
A = neighbors.kneighbors_graph(X, 2, mode='connectivity',
include_self=True)
assert_array_equal(
A.toarray(),
[[1., 1., 0.],
[1., 1., 0.],
[0., 1., 1.]])
A = neighbors.kneighbors_graph(X, 2, mode='distance')
assert_array_almost_equal(
A.toarray(),
[[0., 1.01, 2.23606798],
[1.01, 0., 1.40716026],
[2.23606798, 1.40716026, 0.]])
# n_neighbors = 3
A = neighbors.kneighbors_graph(X, 3, mode='connectivity',
include_self=True)
assert_array_almost_equal(
A.toarray(),
[[1, 1, 1], [1, 1, 1], [1, 1, 1]])
def test_kneighbors_graph_sparse(seed=36):
# Test kneighbors_graph to build the k-Nearest Neighbor graph
# for sparse input.
rng = np.random.RandomState(seed)
X = rng.randn(10, 10)
Xcsr = csr_matrix(X)
for n_neighbors in [1, 2, 3]:
for mode in ["connectivity", "distance"]:
assert_array_almost_equal(
neighbors.kneighbors_graph(X,
n_neighbors,
mode=mode).toarray(),
neighbors.kneighbors_graph(Xcsr,
n_neighbors,
mode=mode).toarray())
def test_radius_neighbors_graph():
# Test radius_neighbors_graph to build the Nearest Neighbor graph.
X = np.array([[0, 1], [1.01, 1.], [2, 0]])
A = neighbors.radius_neighbors_graph(X, 1.5, mode='connectivity',
include_self=True)
assert_array_equal(
A.toarray(),
[[1., 1., 0.],
[1., 1., 1.],
[0., 1., 1.]])
A = neighbors.radius_neighbors_graph(X, 1.5, mode='distance')
assert_array_almost_equal(
A.toarray(),
[[0., 1.01, 0.],
[1.01, 0., 1.40716026],
[0., 1.40716026, 0.]])
def test_radius_neighbors_graph_sparse(seed=36):
# Test radius_neighbors_graph to build the Nearest Neighbor graph
# for sparse input.
rng = np.random.RandomState(seed)
X = rng.randn(10, 10)
Xcsr = csr_matrix(X)
for n_neighbors in [1, 2, 3]:
for mode in ["connectivity", "distance"]:
assert_array_almost_equal(
neighbors.radius_neighbors_graph(X,
n_neighbors,
mode=mode).toarray(),
neighbors.radius_neighbors_graph(Xcsr,
n_neighbors,
mode=mode).toarray())
def test_neighbors_badargs():
# Test bad argument values: these should all raise ValueErrors
assert_raises(ValueError,
neighbors.NearestNeighbors,
algorithm='blah')
X = rng.random_sample((10, 2))
Xsparse = csr_matrix(X)
y = np.ones(10)
for cls in (neighbors.KNeighborsClassifier,
neighbors.RadiusNeighborsClassifier,
neighbors.KNeighborsRegressor,
neighbors.RadiusNeighborsRegressor):
assert_raises(ValueError,
cls,
weights='blah')
assert_raises(ValueError,
cls, p=-1)
assert_raises(ValueError,
cls, algorithm='blah')
nbrs = cls(algorithm='ball_tree', metric='haversine')
assert_raises(ValueError,
nbrs.predict,
X)
assert_raises(ValueError,
ignore_warnings(nbrs.fit),
Xsparse, y)
nbrs = cls()
assert_raises(ValueError,
nbrs.fit,
np.ones((0, 2)), np.ones(0))
assert_raises(ValueError,
nbrs.fit,
X[:, :, None], y)
nbrs.fit(X, y)
assert_raises(ValueError,
nbrs.predict,
[[]])
if (isinstance(cls, neighbors.KNeighborsClassifier) or
isinstance(cls, neighbors.KNeighborsRegressor)):
nbrs = cls(n_neighbors=-1)
assert_raises(ValueError, nbrs.fit, X, y)
nbrs = neighbors.NearestNeighbors().fit(X)
assert_raises(ValueError, nbrs.kneighbors_graph, X, mode='blah')
assert_raises(ValueError, nbrs.radius_neighbors_graph, X, mode='blah')
def test_neighbors_metrics(n_samples=20, n_features=3,
n_query_pts=2, n_neighbors=5):
# Test computing the neighbors for various metrics
# create a symmetric matrix
V = rng.rand(n_features, n_features)
VI = np.dot(V, V.T)
metrics = [('euclidean', {}),
('manhattan', {}),
('minkowski', dict(p=1)),
('minkowski', dict(p=2)),
('minkowski', dict(p=3)),
('minkowski', dict(p=np.inf)),
('chebyshev', {}),
('seuclidean', dict(V=rng.rand(n_features))),
('wminkowski', dict(p=3, w=rng.rand(n_features))),
('mahalanobis', dict(VI=VI))]
algorithms = ['brute', 'ball_tree', 'kd_tree']
X = rng.rand(n_samples, n_features)
test = rng.rand(n_query_pts, n_features)
for metric, metric_params in metrics:
results = []
p = metric_params.pop('p', 2)
for algorithm in algorithms:
# KD tree doesn't support all metrics
if (algorithm == 'kd_tree' and
metric not in neighbors.KDTree.valid_metrics):
assert_raises(ValueError,
neighbors.NearestNeighbors,
algorithm=algorithm,
metric=metric, metric_params=metric_params)
continue
neigh = neighbors.NearestNeighbors(n_neighbors=n_neighbors,
algorithm=algorithm,
metric=metric, p=p,
metric_params=metric_params)
neigh.fit(X)
results.append(neigh.kneighbors(test, return_distance=True))
assert_array_almost_equal(results[0][0], results[1][0])
assert_array_almost_equal(results[0][1], results[1][1])
def test_callable_metric():
def custom_metric(x1, x2):
return np.sqrt(np.sum(x1 ** 2 + x2 ** 2))
X = np.random.RandomState(42).rand(20, 2)
nbrs1 = neighbors.NearestNeighbors(3, algorithm='auto',
metric=custom_metric)
nbrs2 = neighbors.NearestNeighbors(3, algorithm='brute',
metric=custom_metric)
nbrs1.fit(X)
nbrs2.fit(X)
dist1, ind1 = nbrs1.kneighbors(X)
dist2, ind2 = nbrs2.kneighbors(X)
assert_array_almost_equal(dist1, dist2)
def test_metric_params_interface():
assert_warns(SyntaxWarning, neighbors.KNeighborsClassifier,
metric_params={'p': 3})
def test_predict_sparse_ball_kd_tree():
rng = np.random.RandomState(0)
X = rng.rand(5, 5)
y = rng.randint(0, 2, 5)
nbrs1 = neighbors.KNeighborsClassifier(1, algorithm='kd_tree')
nbrs2 = neighbors.KNeighborsRegressor(1, algorithm='ball_tree')
for model in [nbrs1, nbrs2]:
model.fit(X, y)
assert_raises(ValueError, model.predict, csr_matrix(X))
def test_non_euclidean_kneighbors():
rng = np.random.RandomState(0)
X = rng.rand(5, 5)
# Find a reasonable radius.
dist_array = pairwise_distances(X).flatten()
np.sort(dist_array)
radius = dist_array[15]
# Test kneighbors_graph
for metric in ['manhattan', 'chebyshev']:
nbrs_graph = neighbors.kneighbors_graph(
X, 3, metric=metric, mode='connectivity',
include_self=True).toarray()
nbrs1 = neighbors.NearestNeighbors(3, metric=metric).fit(X)
assert_array_equal(nbrs_graph, nbrs1.kneighbors_graph(X).toarray())
# Test radiusneighbors_graph
for metric in ['manhattan', 'chebyshev']:
nbrs_graph = neighbors.radius_neighbors_graph(
X, radius, metric=metric, mode='connectivity',
include_self=True).toarray()
nbrs1 = neighbors.NearestNeighbors(metric=metric, radius=radius).fit(X)
assert_array_equal(nbrs_graph, nbrs1.radius_neighbors_graph(X).A)
# Raise error when wrong parameters are supplied,
X_nbrs = neighbors.NearestNeighbors(3, metric='manhattan')
X_nbrs.fit(X)
assert_raises(ValueError, neighbors.kneighbors_graph, X_nbrs, 3,
metric='euclidean')
X_nbrs = neighbors.NearestNeighbors(radius=radius, metric='manhattan')
X_nbrs.fit(X)
assert_raises(ValueError, neighbors.radius_neighbors_graph, X_nbrs,
radius, metric='euclidean')
def check_object_arrays(nparray, list_check):
for ind, ele in enumerate(nparray):
assert_array_equal(ele, list_check[ind])
def test_k_and_radius_neighbors_train_is_not_query():
# Test kneighbors et.al when query is not training data
for algorithm in ALGORITHMS:
nn = neighbors.NearestNeighbors(n_neighbors=1, algorithm=algorithm)
X = [[0], [1]]
nn.fit(X)
test_data = [[2], [1]]
# Test neighbors.
dist, ind = nn.kneighbors(test_data)
assert_array_equal(dist, [[1], [0]])
assert_array_equal(ind, [[1], [1]])
dist, ind = nn.radius_neighbors([[2], [1]], radius=1.5)
check_object_arrays(dist, [[1], [1, 0]])
check_object_arrays(ind, [[1], [0, 1]])
# Test the graph variants.
assert_array_equal(
nn.kneighbors_graph(test_data).A, [[0., 1.], [0., 1.]])
assert_array_equal(
nn.kneighbors_graph([[2], [1]], mode='distance').A,
np.array([[0., 1.], [0., 0.]]))
rng = nn.radius_neighbors_graph([[2], [1]], radius=1.5)
assert_array_equal(rng.A, [[0, 1], [1, 1]])
def test_k_and_radius_neighbors_X_None():
# Test kneighbors et.al when query is None
for algorithm in ALGORITHMS:
nn = neighbors.NearestNeighbors(n_neighbors=1, algorithm=algorithm)
X = [[0], [1]]
nn.fit(X)
dist, ind = nn.kneighbors()
assert_array_equal(dist, [[1], [1]])
assert_array_equal(ind, [[1], [0]])
dist, ind = nn.radius_neighbors(None, radius=1.5)
check_object_arrays(dist, [[1], [1]])
check_object_arrays(ind, [[1], [0]])
# Test the graph variants.
rng = nn.radius_neighbors_graph(None, radius=1.5)
kng = nn.kneighbors_graph(None)
for graph in [rng, kng]:
assert_array_equal(rng.A, [[0, 1], [1, 0]])
assert_array_equal(rng.data, [1, 1])
assert_array_equal(rng.indices, [1, 0])
X = [[0, 1], [0, 1], [1, 1]]
nn = neighbors.NearestNeighbors(n_neighbors=2, algorithm=algorithm)
nn.fit(X)
assert_array_equal(
nn.kneighbors_graph().A,
np.array([[0., 1., 1.], [1., 0., 1.], [1., 1., 0]]))
def test_k_and_radius_neighbors_duplicates():
# Test behavior of kneighbors when duplicates are present in query
for algorithm in ALGORITHMS:
nn = neighbors.NearestNeighbors(n_neighbors=1, algorithm=algorithm)
nn.fit([[0], [1]])
# Do not do anything special to duplicates.
kng = nn.kneighbors_graph([[0], [1]], mode='distance')
assert_array_equal(
kng.A,
np.array([[0., 0.], [0., 0.]]))
assert_array_equal(kng.data, [0., 0.])
assert_array_equal(kng.indices, [0, 1])
dist, ind = nn.radius_neighbors([[0], [1]], radius=1.5)
check_object_arrays(dist, [[0, 1], [1, 0]])
check_object_arrays(ind, [[0, 1], [0, 1]])
rng = nn.radius_neighbors_graph([[0], [1]], radius=1.5)
assert_array_equal(rng.A, np.ones((2, 2)))
rng = nn.radius_neighbors_graph([[0], [1]], radius=1.5,
mode='distance')
assert_array_equal(rng.A, [[0, 1], [1, 0]])
assert_array_equal(rng.indices, [0, 1, 0, 1])
assert_array_equal(rng.data, [0, 1, 1, 0])
# Mask the first duplicates when n_duplicates > n_neighbors.
X = np.ones((3, 1))
nn = neighbors.NearestNeighbors(n_neighbors=1)
nn.fit(X)
dist, ind = nn.kneighbors()
assert_array_equal(dist, np.zeros((3, 1)))
assert_array_equal(ind, [[1], [0], [1]])
# Test that zeros are explicitly marked in kneighbors_graph.
kng = nn.kneighbors_graph(mode='distance')
assert_array_equal(
kng.A, np.zeros((3, 3)))
assert_array_equal(kng.data, np.zeros(3))
assert_array_equal(kng.indices, [1., 0., 1.])
assert_array_equal(
nn.kneighbors_graph().A,
np.array([[0., 1., 0.], [1., 0., 0.], [0., 1., 0.]]))
def test_include_self_neighbors_graph():
# Test include_self parameter in neighbors_graph
X = [[2, 3], [4, 5]]
kng = neighbors.kneighbors_graph(X, 1, include_self=True).A
kng_not_self = neighbors.kneighbors_graph(X, 1, include_self=False).A
assert_array_equal(kng, [[1., 0.], [0., 1.]])
assert_array_equal(kng_not_self, [[0., 1.], [1., 0.]])
rng = neighbors.radius_neighbors_graph(X, 5.0, include_self=True).A
rng_not_self = neighbors.radius_neighbors_graph(
X, 5.0, include_self=False).A
assert_array_equal(rng, [[1., 1.], [1., 1.]])
assert_array_equal(rng_not_self, [[0., 1.], [1., 0.]])
def test_same_knn_parallel():
X, y = datasets.make_classification(n_samples=30, n_features=5,
n_redundant=0, random_state=0)
X_train, X_test, y_train, y_test = train_test_split(X, y)
def check_same_knn_parallel(algorithm):
clf = neighbors.KNeighborsClassifier(n_neighbors=3,
algorithm=algorithm)
clf.fit(X_train, y_train)
y = clf.predict(X_test)
dist, ind = clf.kneighbors(X_test)
graph = clf.kneighbors_graph(X_test, mode='distance').toarray()
clf.set_params(n_jobs=3)
clf.fit(X_train, y_train)
y_parallel = clf.predict(X_test)
dist_parallel, ind_parallel = clf.kneighbors(X_test)
graph_parallel = \
clf.kneighbors_graph(X_test, mode='distance').toarray()
assert_array_equal(y, y_parallel)
assert_array_almost_equal(dist, dist_parallel)
assert_array_equal(ind, ind_parallel)
assert_array_almost_equal(graph, graph_parallel)
for algorithm in ALGORITHMS:
yield check_same_knn_parallel, algorithm
def test_dtype_convert():
classifier = neighbors.KNeighborsClassifier(n_neighbors=1)
CLASSES = 15
X = np.eye(CLASSES)
y = [ch for ch in 'ABCDEFGHIJKLMNOPQRSTU'[:CLASSES]]
result = classifier.fit(X, y).predict(X)
assert_array_equal(result, y)
# ignore conversion to boolean in pairwise_distances
@ignore_warnings(category=DataConversionWarning)
def test_pairwise_boolean_distance():
# Non-regression test for #4523
# 'brute': uses scipy.spatial.distance through pairwise_distances
# 'ball_tree': uses sklearn.neighbors.dist_metrics
rng = np.random.RandomState(0)
X = rng.uniform(size=(6, 5))
NN = neighbors.NearestNeighbors
nn1 = NN(metric="jaccard", algorithm='brute').fit(X)
nn2 = NN(metric="jaccard", algorithm='ball_tree').fit(X)
assert_array_equal(nn1.kneighbors(X)[0], nn2.kneighbors(X)[0])
| |
# -*- coding: utf-8 -*-
"""
CMS
Simple Content Management System
"""
module = request.controller
resourcename = request.function
if not deployment_settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
# =============================================================================
def index():
"""
Application Home page
"""
module_name = deployment_settings.modules[module].name_nice
response.title = module_name
table = s3db.cms_post
item = db(table.module == module).select(table.body,
limitby=(0, 1)).first()
if item:
item = item.body
else:
item = H2(module_name)
# tbc
report = ""
return dict(item=item, report=report)
# -----------------------------------------------------------------------------
def series():
""" RESTful CRUD controller """
# Pre-process
def prep(r):
if r.component:
# Settings are defined at the series level
table = s3db.cms_post
_module = table.module
_module.readable = _module.writable = False
_avatar = table.avatar
_avatar.readable = _avatar.writable = False
_avatar.default = r.record.avatar
_replies = table.replies
_replies.readable = _replies.writable = False
_replies.default = r.record.replies
_roles_permitted = table.roles_permitted
_roles_permitted.readable = _roles_permitted.writable = False
_roles_permitted.default = r.record.roles_permitted
# Titles do show up
table.name.comment = ""
return True
response.s3.prep = prep
return s3_rest_controller(rheader=s3db.cms_rheader)
# -----------------------------------------------------------------------------
def blog():
"""
RESTful CRUD controller for display of a series of posts as a full-page
read-only showing last 5 items in reverse time order
"""
# Pre-process
def prep(r):
s3mgr.configure(r.tablename, listadd=False)
return True
response.s3.prep = prep
# Post-process
def postp(r, output):
if r.record:
response.view = "cms/blog.html"
return output
response.s3.postp = postp
output = s3_rest_controller("cms", "series")
return output
# -----------------------------------------------------------------------------
def post():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
table = s3db[tablename]
# Filter out those posts which are parts of a series
response.s3.filter = (table.series_id == None)
# Custom Method to add Comments
s3mgr.model.set_method(module, resourcename,
method="discuss",
action=discuss)
return s3_rest_controller(rheader=s3db.cms_rheader)
# -----------------------------------------------------------------------------
def page():
"""
RESTful CRUD controller for display of a post as a full-page read-only
- with optional Comments
"""
# Pre-process
def prep(r):
s3mgr.configure(r.tablename, listadd=False)
return True
response.s3.prep = prep
# Post-process
def postp(r, output):
if r.record:
output = {"item": r.record.body}
response.view = "cms/page.html"
if r.record.replies:
ckeditor = URL(c="static", f="ckeditor", args="ckeditor.js")
response.s3.scripts.append(ckeditor)
adapter = URL(c="static", f="ckeditor", args=["adapters",
"jquery.js"])
response.s3.scripts.append(adapter)
# Toolbar options: http://docs.cksource.com/CKEditor_3.x/Developers_Guide/Toolbar
js = "".join(("""
S3.i18n.reply = '""", str(T("Reply")), """';
var img_path = S3.Ap.concat('/static/img/jCollapsible/');
var ck_config = {toolbar:[['Bold','Italic','-','NumberedList','BulletedList','-','Link','Unlink','-','Smiley','-','Source','Maximize']],toolbarCanCollapse:false,removePlugins:'elementspath'};
function comment_reply(id) {
$('#cms_comment_post_id__row').hide();
$('#cms_comment_post_id__row1').hide();
$('#comment-title').html(S3.i18n.reply);
var editor = $('#cms_comment_body').ckeditorGet();
editor.destroy();
$('#cms_comment_body').ckeditor(ck_config);
$('#comment-form').insertAfter($('#comment-' + id));
$('#cms_comment_parent').val(id);
var post_id = $('#comment-' + id).attr('post_id');
$('#cms_comment_post_id').val(post_id);
}"""))
response.s3.js_global.append(js)
return output
response.s3.postp = postp
output = s3_rest_controller("cms", "post")
return output
# =============================================================================
# Comments
# =============================================================================
def discuss(r, **attr):
""" Custom Method to manage the discussion of a Post """
id = r.id
# Add the RHeader to maintain consistency with the other pages
rheader = s3db.cms_rheader(r)
ckeditor = URL(c="static", f="ckeditor", args="ckeditor.js")
response.s3.scripts.append(ckeditor)
adapter = URL(c="static", f="ckeditor", args=["adapters",
"jquery.js"])
response.s3.scripts.append(adapter)
# Toolbar options: http://docs.cksource.com/CKEditor_3.x/Developers_Guide/Toolbar
js = "".join(("""
S3.i18n.reply = '""", str(T("Reply")), """';
var img_path = S3.Ap.concat('/static/img/jCollapsible/');
var ck_config = {toolbar:[['Bold','Italic','-','NumberedList','BulletedList','-','Link','Unlink','-','Smiley','-','Source','Maximize']],toolbarCanCollapse:false,removePlugins:'elementspath'};
function comment_reply(id) {
$('#cms_comment_post_id__row').hide();
$('#cms_comment_post_id__row1').hide();
$('#comment-title').html(S3.i18n.reply);
var editor = $('#cms_comment_body').ckeditorGet();
editor.destroy();
$('#cms_comment_body').ckeditor(ck_config);
$('#comment-form').insertAfter($('#comment-' + id));
$('#cms_comment_parent').val(id);
var post_id = $('#comment-' + id).attr('post_id');
$('#cms_comment_post_id').val(post_id);
}"""))
response.s3.js_global.append(js)
response.view = "cms/discuss.html"
return dict(rheader=rheader,
id=id)
# -----------------------------------------------------------------------------
def comment_parse(comment, comments, post_id=None):
"""
Parse a Comment
@param: comment - a gluon.sql.Row: the current comment
@param: comments - a gluon.sql.Rows: full list of comments
@param: post_id - a reference ID: optional post commented on
"""
author = B(T("Anonymous"))
if comment.created_by:
utable = s3db.auth_user
ptable = s3db.pr_person
ltable = s3db.pr_person_user
query = (utable.id == comment.created_by)
left = [ltable.on(ltable.user_id == utable.id),
ptable.on(ptable.pe_id == ltable.pe_id)]
row = db(query).select(utable.email,
ptable.first_name,
ptable.middle_name,
ptable.last_name,
left=left, limitby=(0, 1)).first()
if row:
person = row.pr_person
user = row[utable._tablename]
username = s3_fullname(person)
email = user.email.strip().lower()
import md5
hash = md5.new(email).hexdigest()
url = "http://www.gravatar.com/%s" % hash
author = B(A(username, _href=url, _target="top"))
if not post_id and comment.post_id:
s3mgr.load("cms_post")
post = "re: %s" % db.cms_post[comment.post_id].name
header = DIV(author, " ", post)
post_id = comment.post_id
else:
header = author
thread = LI(DIV(s3_avatar_represent(comment.created_by),
DIV(DIV(header,
_class="comment-header"),
DIV(XML(comment.body)),
_class="comment-text"),
DIV(DIV(comment.created_on,
_class="comment-date"),
DIV(A(T("Reply"),
_class="action-btn"),
_onclick="comment_reply(%i);" % comment.id,
_class="comment-reply"),
_class="fright"),
_id="comment-%i" % comment.id,
_post_id=post_id,
_class="comment-box"))
# Add the children of this thread
children = UL(_class="children")
id = comment.id
count = 0
for comment in comments:
if comment.parent == id:
count = 1
child = comment_parse(comment, comments, post_id=post_id)
children.append(child)
if count == 1:
thread.append(children)
return thread
# -----------------------------------------------------------------------------
def comments():
"""
Function accessed by AJAX to handle Comments
- for discuss(() & page()
"""
try:
post_id = request.args[0]
except:
raise HTTP(400)
table = s3db.cms_comment
# Form to add a new Comment
table.post_id.default = post_id
table.post_id.writable = table.post_id.readable = False
form = crud.create(table)
# List of existing Comments
comments = db(table.post_id == post_id).select(table.id,
table.parent,
table.body,
table.created_by,
table.created_on)
output = UL(_id="comments")
for comment in comments:
if not comment.parent:
# Show top-level threads at top-level
thread = comment_parse(comment, comments, post_id=post_id)
output.append(thread)
# Also see the outer discuss()
script = "".join(("""
$('#comments').collapsible({xoffset:'-5',yoffset:'50',imagehide:img_path+'arrow-down.png',imageshow:img_path+'arrow-right.png',defaulthide:false});
$('#cms_comment_parent__row1').hide();
$('#cms_comment_parent__row').hide();
$('#cms_comment_body').ckeditor(ck_config);
$('#submit_record__row input').click(function(){$('#comment-form').hide();$('#cms_comment_body').ckeditorGet().destroy();return true;});
"""))
# No layout in this output!
#response.s3.jquery_ready.append(script)
output = DIV(output,
DIV(H4(T("New Post"),
_id="comment-title"),
form,
_id="comment-form",
_class="clear"),
SCRIPT(script))
return XML(output)
# -----------------------------------------------------------------------------
def posts():
"""
Function accessed by AJAX to handle a Series of Posts
"""
try:
series_id = request.args[0]
except:
raise HTTP(400)
try:
recent = request.args[1]
except:
recent = 5
table = s3db.cms_post
# List of Posts in this Series
query = (table.series_id == series_id)
posts = db(query).select(table.name,
table.body,
table.avatar,
table.created_by,
table.created_on,
limitby=(0, recent))
output = UL(_id="comments")
for post in posts:
author = B(T("Anonymous"))
if post.created_by:
utable = s3db.auth_user
ptable = s3db.pr_person
ltable = s3db.pr_person_user
query = (utable.id == post.created_by)
left = [ltable.on(ltable.user_id == utable.id),
ptable.on(ptable.pe_id == ltable.pe_id)]
row = db(query).select(utable.email,
ptable.first_name,
ptable.middle_name,
ptable.last_name,
left=left, limitby=(0, 1)).first()
if row:
person = row.pr_person
user = row[utable._tablename]
username = s3_fullname(person)
email = user.email.strip().lower()
import md5
hash = md5.new(email).hexdigest()
url = "http://www.gravatar.com/%s" % hash
author = B(A(username, _href=url, _target="top"))
header = H4(post.name)
if post.avatar:
avatar = s3_avatar_represent(post.created_by)
else:
avatar = ""
row = LI(DIV(avatar,
DIV(DIV(header,
_class="comment-header"),
DIV(XML(post.body)),
_class="comment-text"),
DIV(DIV(post.created_on,
_class="comment-date"),
_class="fright"),
DIV(author,
_class="comment-footer"),
_class="comment-box"))
output.append(row)
return XML(output)
# END =========================================================================
| |
#!/usr/bin/env python
#
# Azure Linux extension
#
# Linux Azure Diagnostic Extension (Current version is specified in manifest.xml)
# Copyright (c) Microsoft Corporation
# All rights reserved.
# MIT License
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import tempfile
import re
import string
import traceback
import xml.dom.minidom
import binascii
from Utils.WAAgentUtil import waagent
from Utils.lad_exceptions import LadLoggingConfigException
def get_extension_operation_type(command):
if re.match("^([-/]*)(enable)", command):
return waagent.WALAEventOperation.Enable
if re.match("^([-/]*)(daemon)", command): # LAD-specific extension operation (invoked from "./diagnostic.py -enable")
return "Daemon"
if re.match("^([-/]*)(install)", command):
return waagent.WALAEventOperation.Install
if re.match("^([-/]*)(disable)", command):
return waagent.WALAEventOperation.Disable
if re.match("^([-/]*)(uninstall)", command):
return waagent.WALAEventOperation.Uninstall
if re.match("^([-/]*)(update)", command):
return waagent.WALAEventOperation.Update
def wala_event_type_for_telemetry(ext_op_type):
return "HeartBeat" if ext_op_type == "Daemon" else ext_op_type
def get_storage_endpoint_with_account(account, endpoint_without_account):
endpoint = endpoint_without_account
if endpoint:
parts = endpoint.split('//', 1)
if len(parts) > 1:
endpoint = parts[0]+'//'+account+".table."+parts[1]
else:
endpoint = 'https://'+account+".table."+parts[0]
else:
endpoint = 'https://'+account+'.table.core.windows.net'
return endpoint
def check_suspected_memory_leak(pid, logger_err):
"""
Check suspected memory leak of a process, by inspecting /proc/<pid>/status's VmRSS value.
:param pid: ID of the process we are checking.
:param logger_err: Error logging function (e.g., hutil.error)
:return (bool, int): Bool indicating whether memory leak is suspected. Int for memory usage in KB in true case.
"""
memory_leak_threshold_in_KB = 2000000 # Roughly 2GB. TODO: Make it configurable or automatically calculated
memory_usage_in_KB = 0
memory_leak_suspected = False
try:
# Check /proc/[pid]/status file for "VmRSS" to find out the process's virtual memory usage
# Note: "VmSize" for some reason starts out very high (>2000000) at this moment, so can't use that.
with open("/proc/{0}/status".format(pid)) as proc_file:
for line in proc_file:
if line.startswith("VmRSS:"): # Example line: "VmRSS: 33904 kB"
memory_usage_in_KB = int(line.split()[1])
memory_leak_suspected = memory_usage_in_KB > memory_leak_threshold_in_KB
break
except Exception as e:
# Not to throw in case any statement above fails (e.g., invalid pid). Just log.
logger_err("Failed to check memory usage of pid={0}.\nError: {1}\nTrace:\n{2}".format(pid, e, traceback.format_exc()))
return memory_leak_suspected, memory_usage_in_KB
class LadLogHelper(object):
"""
Various LAD log helper functions encapsulated here, so that we don't have to tag along all the parameters.
"""
def __init__(self, logger_log, logger_error, waagent_event_adder, status_reporter, ext_name, ext_ver):
"""
Constructor
:param logger_log: Normal logging function (e.g., hutil.log)
:param logger_error: Error logging function (e.g., hutil.error)
:param waagent_event_adder: waagent event add function (waagent.AddExtensionEvent)
:param status_reporter: waagent/extension status report function (hutil.do_status_report)
:param ext_name: Extension name (hutil.get_name())
:param ext_ver: Extension version (hutil.get_extension_version())
"""
self._logger_log = logger_log
self._logger_error = logger_error
self._waagent_event_adder = waagent_event_adder
self._status_reporter = status_reporter
self._ext_name = ext_name
self._ext_ver = ext_ver
def log_suspected_memory_leak_and_kill_mdsd(self, memory_usage_in_KB, mdsd_process, ext_op):
"""
Log suspected-memory-leak message both in ext logs and as a waagent event.
:param memory_usage_in_KB: Memory usage in KB (to be included in the log)
:param mdsd_process: Python Process object for the mdsd process to kill
:param ext_op: Extension operation type to use for waagent event (waagent.WALAEventOperation.HeartBeat)
:return: None
"""
memory_leak_msg = "Suspected mdsd memory leak (Virtual memory usage: {0}MB). " \
"Recycling mdsd to self-mitigate.".format(int((memory_usage_in_KB + 1023) / 1024))
self._logger_log(memory_leak_msg)
# Add a telemetry for a possible statistical analysis
self._waagent_event_adder(name=self._ext_name,
op=ext_op,
isSuccess=True,
version=self._ext_ver,
message=memory_leak_msg)
mdsd_process.kill()
def report_mdsd_dependency_setup_failure(self, ext_event_type, failure_msg):
"""
Report mdsd dependency setup failure to 3 destinations (ext log, status report, agent event)
:param ext_event_type: Type of extension event being performed (e.g., 'HeartBeat')
:param failure_msg: Dependency setup failure message to be added to the logs
:return: None
"""
dependencies_err_log_msg = "Failed to set up mdsd dependencies: {0}".format(failure_msg)
self._logger_error(dependencies_err_log_msg)
self._status_reporter(ext_event_type, 'error', '1', dependencies_err_log_msg)
self._waagent_event_adder(name=self._ext_name,
op=ext_event_type,
isSuccess=False,
version=self._ext_ver,
message=dependencies_err_log_msg)
def log_and_report_failed_config_generation(self, ext_event_type, config_invalid_reason, redacted_handler_settings):
"""
Report failed config generation from configurator.generate_all_configs().
:param str ext_event_type: Type of extension event being performed (most likely 'HeartBeat')
:param str config_invalid_reason: Msg from configurator.generate_all_configs()
:param str redacted_handler_settings: JSON string for the extension's protected/public settings after redacting
secrets in the protected settings. This is for logging to Geneva for diagnostic purposes.
:return: None
"""
config_invalid_log = "Invalid config settings given: " + config_invalid_reason + \
". Can't proceed, although this install/enable operation is reported as successful so " \
"the VM can complete successful startup."
self._logger_log(config_invalid_log)
self._status_reporter(ext_event_type, 'success', '0', config_invalid_log)
self._waagent_event_adder(name=self._ext_name,
op=ext_event_type,
isSuccess=True, # Note this is True, because it is a user error.
version=self._ext_ver,
message="Invalid handler settings encountered: {0}".format(redacted_handler_settings))
def log_and_report_invalid_mdsd_cfg(self, ext_event_type, config_validate_cmd_msg, mdsd_cfg_xml):
"""
Report invalid result from 'mdsd -v -c xmlCfg.xml'
:param ext_event_type: Type of extension event being performed (most likely 'HeartBeat')
:param config_validate_cmd_msg: Output of 'mdsd -v -c xmlCfg.xml'
:param mdsd_cfg_xml: Content of xmlCfg.xml to be sent to Geneva
:return: None
"""
message = "Problem(s) detected in generated mdsd configuration. Can't enable, although this install/enable " \
"operation is reported as successful so the VM can complete successful startup. Linux Diagnostic " \
"Extension will exit. Config validation message: {0}".format(config_validate_cmd_msg)
self._logger_log(message)
self._status_reporter(ext_event_type, 'success', '0', message)
self._waagent_event_adder(name=self._ext_name,
op=ext_event_type,
isSuccess=True, # Note this is True, because it is a user error.
version=self._ext_ver,
message="Problem(s) detected in generated mdsd configuration: {0}".format(mdsd_cfg_xml))
def read_uuid():
uuid = ''
uuid_file_path = '/sys/class/dmi/id/product_uuid'
try:
with open(uuid_file_path) as f:
uuid = f.readline().strip()
except Exception as e:
raise LadLoggingConfigException('read_uuid() failed: Unable to open uuid file {0}'.format(uuid_file_path))
if not uuid:
raise LadLoggingConfigException('read_uuid() failed: Empty content in uuid file {0}'.format(uuid_file_path))
return uuid
def encrypt_secret_with_cert(run_command, logger, cert_path, secret):
"""
update_account_settings() helper.
:param run_command: Function to run an arbitrary command
:param logger: Function to log error messages
:param cert_path: Cert file path
:param secret: Secret to encrypt
:return: Encrypted secret string. None if openssl command exec fails.
"""
f = tempfile.NamedTemporaryFile(suffix='mdsd', delete=True)
# Have openssl write to our temporary file (on Linux we don't have an exclusive lock on the temp file).
# openssl smime, when asked to put output in a file, simply overwrites the file; it does not unlink/creat or
# creat/rename.
cmd = "echo -n '{0}' | openssl smime -encrypt -outform DER -out {1} {2}"
cmd_to_run = cmd.format(secret, f.name, cert_path)
ret_status, ret_msg = run_command(cmd_to_run, should_log=False)
if ret_status is not 0:
logger("Encrypting storage secret failed with the following message: " + ret_msg)
return None
encrypted_secret = f.read()
f.close() # Deletes the temp file
return binascii.b2a_hex(encrypted_secret).upper()
def tail(log_file, output_size=1024):
if not os.path.exists(log_file):
return ""
pos = min(output_size, os.path.getsize(log_file))
with open(log_file, "r") as log:
log.seek(-pos, 2)
buf = log.read(output_size)
buf = filter(lambda x: x in string.printable, buf)
return buf.decode("ascii", "ignore")
def update_selinux_settings_for_rsyslogomazuremds(run_command, ext_dir):
# This is still needed for Redhat-based distros, which still require SELinux to be allowed
# for even Unix domain sockets.
# Anyway, we no longer use 'semanage' (so no need to install policycoreutils-python).
# We instead compile from the bundled SELinux module def for lad_mdsd
# TODO Either check the output of these commands or run without capturing output
if os.path.exists("/usr/sbin/semodule") or os.path.exists("/sbin/semodule"):
run_command('checkmodule -M -m -o {0}/lad_mdsd.mod {1}/lad_mdsd.te'.format(ext_dir, ext_dir))
run_command('semodule_package -o {0}/lad_mdsd.pp -m {1}/lad_mdsd.mod'.format(ext_dir, ext_dir))
run_command('semodule -u {0}/lad_mdsd.pp'.format(ext_dir))
def get_mdsd_proxy_config(waagent_setting, ext_settings, logger):
# mdsd http proxy setting
proxy_setting_name = 'mdsdHttpProxy'
proxy_config = waagent_setting # waagent.HttpProxyConfigString from /etc/waagent.conf has highest priority
if not proxy_config:
proxy_config = ext_settings.read_protected_config(proxy_setting_name) # Protected setting has next priority
if not proxy_config:
proxy_config = ext_settings.read_public_config(proxy_setting_name)
if not isinstance(proxy_config, basestring):
logger('Error: mdsdHttpProxy config is not a string. Ignored.')
else:
proxy_config = proxy_config.strip()
if proxy_config:
logger("mdsdHttpProxy setting was given and will be passed to mdsd, "
"but not logged here in case there's a password in it")
return proxy_config
return ''
def escape_nonalphanumerics(data):
return ''.join([ch if ch.isalnum() else ":{0:04X}".format(ord(ch)) for ch in data])
# TODO Should this be placed in WAAgentUtil.py?
def get_deployment_id_from_hosting_env_cfg(waagent_dir, logger_log, logger_error):
"""
Get deployment ID from waagent dir's HostingEnvironmentConfig.xml.
:param waagent_dir: Waagent dir path (/var/lib/waagent)
:param logger_log: Normal logging function (hutil.log)
:param logger_error: Error logging function (hutil.error)
:return: Obtained deployment ID string if the hosting env cfg xml exists & deployment ID is found.
"unknown" if the xml exists, but deployment ID can't be found.
None if the xml does not exist.
"""
identity = "unknown"
env_cfg_path = os.path.join(waagent_dir, "HostingEnvironmentConfig.xml")
if not os.path.exists(env_cfg_path):
logger_log("No Deployment ID (not running in a hosted environment")
return identity
try:
with open(env_cfg_path, 'r') as env_cfg_file:
xml_text = env_cfg_file.read()
dom = xml.dom.minidom.parseString(xml_text)
deployment = dom.getElementsByTagName("Deployment")
name = deployment[0].getAttribute("name")
if name:
identity = name
logger_log("Deployment ID found: {0}.".format(identity))
except Exception as e:
# use fallback identity
logger_error("Failed to retrieve deployment ID. Error:{0}\nStacktrace: {1}".format(e, traceback.format_exc()))
return identity
def write_lad_pids_to_file(pid_file_path, py_pid, mdsd_pid=None):
"""
Write LAD process IDs to file
:param int py_pid: PID of diagnostic.py
:param int mdsd_pid: PID of mdsd or None (when called before mdsd is started)
:param str pid_file_path: Path of the file to be written
:return: None
"""
with open(pid_file_path, 'w') as f:
f.write(str(py_pid) + '\n')
if mdsd_pid is not None:
f.write(str(mdsd_pid) + '\n')
def append_string_to_file(string, filepath):
"""
Append string content to file
:param string: A str object that holds the content to be appended to the file
:param filepath: Path to the file to be appended
:return: None
"""
with open(filepath, 'a') as f:
f.write(string)
def read_file_to_string(filepath):
"""
Read entire file and return it as string. If file can't be read, return "Can't read <filepath>"
:param str filepath: Path of the file to read
:rtype: str
:return: Content of the file in a single string, or "Can't read <filepath>" if file can't be read.
"""
try:
with open(filepath) as f:
return f.read()
except Exception as e:
return "Can't read {0}. Exception thrown: {1}".format(filepath, e)
| |
from pyrep.backend import sim, utils
from pyrep.objects import Object
from pyrep.objects.dummy import Dummy
from pyrep.robots.configuration_paths.arm_configuration_path import (
ArmConfigurationPath)
from pyrep.robots.robot_component import RobotComponent
from pyrep.objects.cartesian_path import CartesianPath
from pyrep.errors import ConfigurationError, ConfigurationPathError, IKError
from pyrep.const import ConfigurationPathAlgorithms as Algos
from pyrep.const import PYREP_SCRIPT_TYPE
from typing import List, Union
import numpy as np
import warnings
class Arm(RobotComponent):
"""Base class representing a robot arm with path planning support.
"""
def __init__(self, count: int, name: str, num_joints: int,
base_name: str = None,
max_velocity=1.0, max_acceleration=4.0, max_jerk=1000):
"""Count is used for when we have multiple copies of arms"""
joint_names = ['%s_joint%d' % (name, i+1) for i in range(num_joints)]
super().__init__(count, name, joint_names, base_name)
# Used for motion planning
self.max_velocity = max_velocity
self.max_acceleration = max_acceleration
self.max_jerk = max_jerk
# Motion planning handles
suffix = '' if count == 0 else '#%d' % (count - 1)
self._ik_target = Dummy('%s_target%s' % (name, suffix))
self._ik_tip = Dummy('%s_tip%s' % (name, suffix))
self._ik_group = sim.simGetIkGroupHandle('%s_ik%s' % (name, suffix))
self._collision_collection = sim.simGetCollectionHandle(
'%s_arm%s' % (name, suffix))
def set_ik_element_properties(self, constraint_x=True, constraint_y=True,
constraint_z=True,
constraint_alpha_beta=True,
constraint_gamma=True) -> None:
constraints = 0
if constraint_x:
constraints |= sim.sim_ik_x_constraint
if constraint_y:
constraints |= sim.sim_ik_y_constraint
if constraint_z:
constraints |= sim.sim_ik_z_constraint
if constraint_alpha_beta:
constraints |= sim.sim_ik_alpha_beta_constraint
if constraint_gamma:
constraints |= sim.sim_ik_gamma_constraint
sim.simSetIkElementProperties(
ikGroupHandle=self._ik_group,
tipDummyHandle=self._ik_tip.get_handle(),
constraints=constraints,
precision=None,
weight=None,
)
def set_ik_group_properties(self, resolution_method='pseudo_inverse', max_iterations=6, dls_damping=0.1) -> None:
try:
res_method = {'pseudo_inverse': sim.sim_ik_pseudo_inverse_method,
'damped_least_squares': sim.sim_ik_damped_least_squares_method,
'jacobian_transpose': sim.sim_ik_jacobian_transpose_method}[resolution_method]
except KeyError:
raise Exception('Invalid resolution method,'
'Must be one of ["pseudo_inverse" | "damped_least_squares" | "jacobian_transpose"]')
sim.simSetIkGroupProperties(
ikGroupHandle=self._ik_group,
resolutionMethod=res_method,
maxIterations=max_iterations,
damping=dls_damping
)
def solve_ik_via_sampling(self,
position: Union[List[float], np.ndarray],
euler: Union[List[float], np.ndarray] = None,
quaternion: Union[List[float], np.ndarray] = None,
ignore_collisions: bool = False,
trials: int = 300,
max_configs: int = 1,
distance_threshold: float = 0.65,
max_time_ms: int = 10,
relative_to: Object = None
) -> np.ndarray:
"""Solves an IK group and returns the calculated joint values.
This IK method performs a random searches for manipulator configurations
that matches the given end-effector pose in space. When the tip pose
is close enough then IK is computed in order to try to bring the
tip onto the target. This is the method that should be used when
the start pose is far from the end pose.
We generate 'max_configs' number of samples within X number of 'trials',
before ranking them according to angular distance.
Must specify either rotation in euler or quaternions, but not both!
:param position: The x, y, z position of the target.
:param euler: The x, y, z orientation of the target (in radians).
:param quaternion: A list containing the quaternion (x,y,z,w).
:param ignore_collisions: If collision checking should be disabled.
:param trials: The maximum number of attempts to reach max_configs.
:param max_configs: The maximum number of configurations we want to
generate before sorting them.
:param distance_threshold: Distance indicating when IK should be
computed in order to try to bring the tip onto the target.
:param max_time_ms: Maximum time in ms spend searching for
each configuation.
:param relative_to: Indicates relative to which reference frame we want
the target pose. Specify None to retrieve the absolute pose,
or an Object relative to whose reference frame we want the pose.
:raises: ConfigurationError if no joint configuration could be found.
:return: 'max_configs' number of joint configurations, ranked according
to angular distance.
"""
if not ((euler is None) ^ (quaternion is None)):
raise ConfigurationError(
'Specify either euler or quaternion values, but not both.')
prev_pose = self._ik_target.get_pose()
self._ik_target.set_position(position, relative_to)
if euler is not None:
self._ik_target.set_orientation(euler, relative_to)
elif quaternion is not None:
self._ik_target.set_quaternion(quaternion, relative_to)
handles = [j.get_handle() for j in self.joints]
cyclics, intervals = self.get_joint_intervals()
low_limits, max_limits = list(zip(*intervals))
# If there are huge intervals, then limit them
low_limits = np.maximum(low_limits, -np.pi*2).tolist()
max_limits = np.minimum(max_limits, np.pi*2).tolist()
collision_pairs = []
if not ignore_collisions:
collision_pairs = [self._collision_collection, sim.sim_handle_all]
metric = joint_options = None
valid_joint_positions = []
for i in range(trials):
config = sim.simGetConfigForTipPose(
self._ik_group, handles, distance_threshold, int(max_time_ms),
metric, collision_pairs, joint_options, low_limits, max_limits)
if len(config) > 0:
valid_joint_positions.append(config)
if len(valid_joint_positions) >= max_configs:
break
self._ik_target.set_pose(prev_pose)
if len(valid_joint_positions) == 0:
raise ConfigurationError(
'Could not find a valid joint configuration for desired '
'end effector pose.')
if len(valid_joint_positions) > 1:
current_config = np.array(self.get_joint_positions())
# Sort based on angular distance
valid_joint_positions.sort(
key=lambda x: np.linalg.norm(current_config - x))
return np.array(valid_joint_positions)
def get_configs_for_tip_pose(self,
position: Union[List[float], np.ndarray],
euler: Union[List[float], np.ndarray] = None,
quaternion: Union[List[float], np.ndarray] = None,
ignore_collisions=False,
trials=300, max_configs=60,
relative_to: Object = None
) -> List[List[float]]:
"""Gets a valid joint configuration for a desired end effector pose.
Must specify either rotation in euler or quaternions, but not both!
:param position: The x, y, z position of the target.
:param euler: The x, y, z orientation of the target (in radians).
:param quaternion: A list containing the quaternion (x,y,z,w).
:param ignore_collisions: If collision checking should be disabled.
:param trials: The maximum number of attempts to reach max_configs
:param max_configs: The maximum number of configurations we want to
generate before ranking them.
:param relative_to: Indicates relative to which reference frame we want
the target pose. Specify None to retrieve the absolute pose,
or an Object relative to whose reference frame we want the pose.
:raises: ConfigurationError if no joint configuration could be found.
:return: A list of valid joint configurations for the desired
end effector pose.
"""
warnings.warn("Please use 'solve_ik_via_sampling' instead.",
DeprecationWarning)
return list(self.solve_ik_via_sampling(
position, euler, quaternion, ignore_collisions, trials,
max_configs, relative_to=relative_to))
def solve_ik_via_jacobian(
self, position: Union[List[float], np.ndarray],
euler: Union[List[float], np.ndarray] = None,
quaternion: Union[List[float], np.ndarray] = None,
relative_to: Object = None) -> List[float]:
"""Solves an IK group and returns the calculated joint values.
This IK method performs a linearisation around the current robot
configuration via the Jacobian. The linearisation is valid when the
start and goal pose are not too far away, but after a certain point,
linearisation will no longer be valid. In that case, the user is better
off using 'solve_ik_via_sampling'.
Must specify either rotation in euler or quaternions, but not both!
:param position: The x, y, z position of the target.
:param euler: The x, y, z orientation of the target (in radians).
:param quaternion: A list containing the quaternion (x,y,z,w).
:param relative_to: Indicates relative to which reference frame we want
the target pose. Specify None to retrieve the absolute pose,
or an Object relative to whose reference frame we want the pose.
:return: A list containing the calculated joint values.
"""
self._ik_target.set_position(position, relative_to)
if euler is not None:
self._ik_target.set_orientation(euler, relative_to)
elif quaternion is not None:
self._ik_target.set_quaternion(quaternion, relative_to)
ik_result, joint_values = sim.simCheckIkGroup(
self._ik_group, [j.get_handle() for j in self.joints])
if ik_result == sim.sim_ikresult_fail:
raise IKError('IK failed. Perhaps the distance was between the tip '
' and target was too large.')
elif ik_result == sim.sim_ikresult_not_performed:
raise IKError('IK not performed.')
return joint_values
def solve_ik(self, position: Union[List[float], np.ndarray],
euler: Union[List[float], np.ndarray] = None,
quaternion: Union[List[float], np.ndarray] = None,
relative_to: Object = None) -> List[float]:
"""Solves an IK group and returns the calculated joint values.
Must specify either rotation in euler or quaternions, but not both!
:param position: The x, y, z position of the target.
:param euler: The x, y, z orientation of the target (in radians).
:param quaternion: A list containing the quaternion (x,y,z,w).
:param relative_to: Indicates relative to which reference frame we want
the target pose. Specify None to retrieve the absolute pose,
or an Object relative to whose reference frame we want the pose.
:return: A list containing the calculated joint values.
"""
warnings.warn("Please use 'solve_ik_via_jacobian' instead.",
DeprecationWarning)
return self.solve_ik_via_jacobian(
position, euler, quaternion, relative_to)
def get_path_from_cartesian_path(self, path: CartesianPath
) -> ArmConfigurationPath:
"""Translate a path from cartesian space, to arm configuration space.
Note: It must be possible to reach the start of the path via a linear
path, otherwise an error will be raised.
:param path: A :py:class:`CartesianPath` instance to be translated to
a configuration-space path.
:raises: ConfigurationPathError if no path could be created.
:return: A path in the arm configuration space.
"""
handles = [j.get_handle() for j in self.joints]
_, ret_floats, _, _ = utils.script_call(
'getPathFromCartesianPath@PyRep', PYREP_SCRIPT_TYPE,
ints=[path.get_handle(), self._ik_group,
self._ik_target.get_handle()] + handles)
if len(ret_floats) == 0:
raise ConfigurationPathError(
'Could not create a path from cartesian path.')
return ArmConfigurationPath(self, ret_floats)
def get_linear_path(self, position: Union[List[float], np.ndarray],
euler: Union[List[float], np.ndarray] = None,
quaternion: Union[List[float], np.ndarray] = None,
steps=50, ignore_collisions=False,
relative_to: Object = None) -> ArmConfigurationPath:
"""Gets a linear configuration path given a target pose.
Generates a path that drives a robot from its current configuration
to its target dummy in a straight line (i.e. shortest path in Cartesian
space).
Must specify either rotation in euler or quaternions, but not both!
:param position: The x, y, z position of the target.
:param euler: The x, y, z orientation of the target (in radians).
:param quaternion: A list containing the quaternion (x,y,z,w).
:param steps: The desired number of path points. Each path point
contains a robot configuration. A minimum of two path points is
required. If the target pose distance is large, a larger number
of steps leads to better results for this function.
:param ignore_collisions: If collision checking should be disabled.
:param relative_to: Indicates relative to which reference frame we want
the target pose. Specify None to retrieve the absolute pose,
or an Object relative to whose reference frame we want the pose.
:raises: ConfigurationPathError if no path could be created.
:return: A linear path in the arm configuration space.
"""
if not ((euler is None) ^ (quaternion is None)):
raise ConfigurationPathError(
'Specify either euler or quaternion values, but not both.')
prev_pose = self._ik_target.get_pose()
self._ik_target.set_position(position, relative_to)
if euler is not None:
self._ik_target.set_orientation(euler, relative_to)
elif quaternion is not None:
self._ik_target.set_quaternion(quaternion, relative_to)
handles = [j.get_handle() for j in self.joints]
collision_pairs = []
if not ignore_collisions:
collision_pairs = [self._collision_collection, sim.sim_handle_all]
joint_options = None
ret_floats = sim.generateIkPath(
self._ik_group, handles, steps, collision_pairs, joint_options)
self._ik_target.set_pose(prev_pose)
if len(ret_floats) == 0:
raise ConfigurationPathError('Could not create path.')
return ArmConfigurationPath(self, ret_floats)
def get_nonlinear_path(self, position: Union[List[float], np.ndarray],
euler: Union[List[float], np.ndarray] = None,
quaternion: Union[List[float], np.ndarray] = None,
ignore_collisions=False,
trials=300,
max_configs=1,
distance_threshold: float = 0.65,
max_time_ms: int = 10,
trials_per_goal=1,
algorithm=Algos.SBL,
relative_to: Object = None
) -> ArmConfigurationPath:
"""Gets a non-linear (planned) configuration path given a target pose.
A path is generated by finding several configs for a pose, and ranking
them according to the distance in configuration space (smaller is
better).
Must specify either rotation in euler or quaternions, but not both!
:param position: The x, y, z position of the target.
:param euler: The x, y, z orientation of the target (in radians).
:param quaternion: A list containing the quaternion (x,y,z,w).
:param ignore_collisions: If collision checking should be disabled.
:param trials: The maximum number of attempts to reach max_configs.
See 'solve_ik_via_sampling'.
:param max_configs: The maximum number of configurations we want to
generate before sorting them. See 'solve_ik_via_sampling'.
:param distance_threshold: Distance indicating when IK should be
computed in order to try to bring the tip onto the target.
See 'solve_ik_via_sampling'.
:param max_time_ms: Maximum time in ms spend searching for
each configuation. See 'solve_ik_via_sampling'.
:param trials_per_goal: The number of paths per config we want to trial.
:param algorithm: The algorithm for path planning to use.
:param relative_to: Indicates relative to which reference frame we want
the target pose. Specify None to retrieve the absolute pose,
or an Object relative to whose reference frame we want the pose.
:raises: ConfigurationPathError if no path could be created.
:return: A non-linear path in the arm configuration space.
"""
handles = [j.get_handle() for j in self.joints]
try:
configs = self.solve_ik_via_sampling(
position, euler, quaternion, ignore_collisions, trials,
max_configs, distance_threshold, max_time_ms, relative_to)
except ConfigurationError as e:
raise ConfigurationPathError('Could not create path.') from e
_, ret_floats, _, _ = utils.script_call(
'getNonlinearPath@PyRep', PYREP_SCRIPT_TYPE,
ints=[self._collision_collection, int(ignore_collisions),
trials_per_goal] + handles,
floats=configs.flatten().tolist(),
strings=[algorithm.value])
if len(ret_floats) == 0:
raise ConfigurationPathError('Could not create path.')
return ArmConfigurationPath(self, ret_floats)
def get_path(self, position: Union[List[float], np.ndarray],
euler: Union[List[float], np.ndarray] = None,
quaternion: Union[List[float], np.ndarray] = None,
ignore_collisions=False,
trials=300,
max_configs=1,
distance_threshold: float = 0.65,
max_time_ms: int = 10,
trials_per_goal=1,
algorithm=Algos.SBL,
relative_to: Object = None
) -> ArmConfigurationPath:
"""Tries to get a linear path, failing that tries a non-linear path.
Must specify either rotation in euler or quaternions, but not both!
:param position: The x, y, z position of the target.
:param euler: The x, y, z orientation of the target (in radians).
:param quaternion: A list containing the quaternion (x,y,z,w).
:param ignore_collisions: If collision checking should be disabled.
:param trials: The maximum number of attempts to reach max_configs.
See 'solve_ik_via_sampling'.
:param max_configs: The maximum number of configurations we want to
generate before sorting them. See 'solve_ik_via_sampling'.
:param distance_threshold: Distance indicating when IK should be
computed in order to try to bring the tip onto the target.
See 'solve_ik_via_sampling'.
:param max_time_ms: Maximum time in ms spend searching for
each configuation. See 'solve_ik_via_sampling'.
:param trials_per_goal: The number of paths per config we want to trial.
:param algorithm: The algorithm for path planning to use.
:param relative_to: Indicates relative to which reference frame we want
the target pose. Specify None to retrieve the absolute pose,
or an Object relative to whose reference frame we want the pose.
:raises: ConfigurationPathError if neither a linear or non-linear path
can be created.
:return: A linear or non-linear path in the arm configuration space.
"""
try:
p = self.get_linear_path(position, euler, quaternion,
ignore_collisions=ignore_collisions,
relative_to=relative_to)
return p
except ConfigurationPathError:
pass # Allowed. Try again, but with non-linear.
# This time if an exception is thrown, we dont want to catch it.
p = self.get_nonlinear_path(
position, euler, quaternion, ignore_collisions, trials, max_configs,
distance_threshold, max_time_ms, trials_per_goal, algorithm,
relative_to)
return p
def get_tip(self) -> Dummy:
"""Gets the tip of the arm.
Each arm is required to have a tip for path planning.
:return: The tip of the arm.
"""
return self._ik_tip
def get_jacobian(self):
"""Calculates the Jacobian.
:return: the row-major Jacobian matix.
"""
self._ik_target.set_matrix(self._ik_tip.get_matrix())
sim.simCheckIkGroup(self._ik_group,
[j.get_handle() for j in self.joints])
jacobian, (rows, cols) = sim.simGetIkGroupMatrix(self._ik_group, 0)
jacobian = np.array(jacobian).reshape((rows, cols), order='F')
return jacobian
def check_arm_collision(self, obj: 'Object' = None) -> bool:
"""Checks whether two entities are colliding.
:param obj: The other collidable object to check collision against,
or None to check against all collidable objects. Note that objects
must be marked as collidable!
:return: If the object is colliding.
"""
handle = sim.sim_handle_all if obj is None else obj.get_handle()
return sim.simCheckCollision(self._collision_collection, handle) == 1
| |
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for nova.compute.rpcapi
"""
import mock
from oslo_serialization import jsonutils
from nova.compute import rpcapi as compute_rpcapi
import nova.conf
from nova import context
from nova import exception
from nova.objects import block_device as objects_block_dev
from nova.objects import migrate_data as migrate_data_obj
from nova.objects import migration as migration_obj
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
CONF = nova.conf.CONF
class ComputeRpcAPITestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeRpcAPITestCase, self).setUp()
self.context = context.get_admin_context()
self.fake_flavor_obj = fake_flavor.fake_flavor_obj(self.context)
self.fake_flavor = jsonutils.to_primitive(self.fake_flavor_obj)
instance_attr = {'host': 'fake_host',
'instance_type_id': self.fake_flavor_obj['id'],
'instance_type': self.fake_flavor_obj}
self.fake_instance_obj = fake_instance.fake_instance_obj(self.context,
**instance_attr)
self.fake_instance = jsonutils.to_primitive(self.fake_instance_obj)
self.fake_volume_bdm = objects_block_dev.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict(
{'source_type': 'volume', 'destination_type': 'volume',
'instance_uuid': self.fake_instance_obj.uuid,
'volume_id': 'fake-volume-id'}))
# FIXME(melwitt): Temporary while things have no mappings
self.patcher1 = mock.patch('nova.objects.InstanceMapping.'
'get_by_instance_uuid')
self.patcher2 = mock.patch('nova.objects.HostMapping.get_by_host')
mock_inst_mapping = self.patcher1.start()
mock_host_mapping = self.patcher2.start()
mock_inst_mapping.side_effect = exception.InstanceMappingNotFound(
uuid=self.fake_instance_obj.uuid)
mock_host_mapping.side_effect = exception.HostMappingNotFound(
name=self.fake_instance_obj.host)
def tearDown(self):
super(ComputeRpcAPITestCase, self).tearDown()
self.patcher1.stop()
self.patcher2.stop()
@mock.patch('nova.objects.Service.get_minimum_version')
def test_auto_pin(self, mock_get_min):
mock_get_min.return_value = 1
self.flags(compute='auto', group='upgrade_levels')
compute_rpcapi.LAST_VERSION = None
rpcapi = compute_rpcapi.ComputeAPI()
self.assertEqual('4.4', rpcapi.router.version_cap)
mock_get_min.assert_called_once_with(mock.ANY, 'nova-compute')
@mock.patch('nova.objects.Service.get_minimum_version')
def test_auto_pin_fails_if_too_old(self, mock_get_min):
mock_get_min.return_value = 1955
self.flags(compute='auto', group='upgrade_levels')
compute_rpcapi.LAST_VERSION = None
self.assertRaises(exception.ServiceTooOld,
compute_rpcapi.ComputeAPI)
@mock.patch('nova.objects.Service.get_minimum_version')
def test_auto_pin_with_service_version_zero(self, mock_get_min):
mock_get_min.return_value = 0
self.flags(compute='auto', group='upgrade_levels')
compute_rpcapi.LAST_VERSION = None
rpcapi = compute_rpcapi.ComputeAPI()
self.assertEqual('4.11', rpcapi.router.version_cap)
mock_get_min.assert_called_once_with(mock.ANY, 'nova-compute')
self.assertIsNone(compute_rpcapi.LAST_VERSION)
@mock.patch('nova.objects.Service.get_minimum_version')
def test_auto_pin_caches(self, mock_get_min):
mock_get_min.return_value = 1
self.flags(compute='auto', group='upgrade_levels')
compute_rpcapi.LAST_VERSION = None
compute_rpcapi.ComputeAPI()
compute_rpcapi.ComputeAPI()
mock_get_min.assert_called_once_with(mock.ANY, 'nova-compute')
self.assertEqual('4.4', compute_rpcapi.LAST_VERSION)
def _test_compute_api(self, method, rpc_method,
expected_args=None, **kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = kwargs.pop('rpcapi_class', compute_rpcapi.ComputeAPI)()
self.assertIsNotNone(rpcapi.router)
self.assertEqual(rpcapi.router.target.topic, CONF.compute_topic)
# This test wants to run the real prepare function, so must use
# a real client object
default_client = rpcapi.router.default_client
orig_prepare = default_client.prepare
base_version = rpcapi.router.target.version
expected_version = kwargs.pop('version', base_version)
expected_kwargs = kwargs.copy()
if expected_args:
expected_kwargs.update(expected_args)
if 'host_param' in expected_kwargs:
expected_kwargs['host'] = expected_kwargs.pop('host_param')
else:
expected_kwargs.pop('host', None)
cast_and_call = ['confirm_resize', 'stop_instance']
if rpc_method == 'call' and method in cast_and_call:
if method == 'confirm_resize':
kwargs['cast'] = False
else:
kwargs['do_cast'] = False
if 'host' in kwargs:
host = kwargs['host']
elif 'instances' in kwargs:
host = kwargs['instances'][0]['host']
else:
host = kwargs['instance']['host']
if method == 'rebuild_instance' and 'node' in expected_kwargs:
expected_kwargs['scheduled_node'] = expected_kwargs.pop('node')
with test.nested(
mock.patch.object(default_client, rpc_method),
mock.patch.object(default_client, 'prepare'),
mock.patch.object(default_client, 'can_send_version'),
) as (
rpc_mock, prepare_mock, csv_mock
):
prepare_mock.return_value = default_client
if '_return_value' in kwargs:
rpc_mock.return_value = kwargs.pop('_return_value')
del expected_kwargs['_return_value']
elif rpc_method == 'call':
rpc_mock.return_value = 'foo'
else:
rpc_mock.return_value = None
csv_mock.side_effect = (
lambda v: orig_prepare(version=v).can_send_version())
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, rpc_mock.return_value)
prepare_mock.assert_called_once_with(version=expected_version,
server=host)
rpc_mock.assert_called_once_with(ctxt, method, **expected_kwargs)
def test_add_aggregate_host(self):
self._test_compute_api('add_aggregate_host', 'cast',
aggregate={'id': 'fake_id'}, host_param='host', host='host',
slave_info={})
def test_add_fixed_ip_to_instance(self):
self._test_compute_api('add_fixed_ip_to_instance', 'cast',
instance=self.fake_instance_obj, network_id='id',
version='4.0')
def test_attach_interface(self):
self._test_compute_api('attach_interface', 'call',
instance=self.fake_instance_obj, network_id='id',
port_id='id2', version='4.0', requested_ip='192.168.1.50')
def test_attach_volume(self):
self._test_compute_api('attach_volume', 'cast',
instance=self.fake_instance_obj, bdm=self.fake_volume_bdm,
version='4.0')
def test_change_instance_metadata(self):
self._test_compute_api('change_instance_metadata', 'cast',
instance=self.fake_instance_obj, diff={}, version='4.0')
def test_check_instance_shared_storage(self):
self._test_compute_api('check_instance_shared_storage', 'call',
instance=self.fake_instance_obj, data='foo',
version='4.0')
def test_confirm_resize_cast(self):
self._test_compute_api('confirm_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'foo'},
host='host', reservations=list('fake_res'))
def test_confirm_resize_call(self):
self._test_compute_api('confirm_resize', 'call',
instance=self.fake_instance_obj, migration={'id': 'foo'},
host='host', reservations=list('fake_res'))
def test_detach_interface(self):
self._test_compute_api('detach_interface', 'cast',
version='4.0', instance=self.fake_instance_obj,
port_id='fake_id')
def test_detach_volume(self):
self._test_compute_api('detach_volume', 'cast',
instance=self.fake_instance_obj, volume_id='id',
attachment_id='fake_id', version='4.7')
def test_detach_volume_no_attachment_id(self):
ctxt = context.RequestContext('fake_user', 'fake_project')
instance = self.fake_instance_obj
rpcapi = compute_rpcapi.ComputeAPI()
cast_mock = mock.Mock()
cctxt_mock = mock.Mock(cast=cast_mock)
rpcapi.router.by_instance = mock.Mock()
mock_client = mock.Mock()
rpcapi.router.by_instance.return_value = mock_client
with test.nested(
mock.patch.object(mock_client, 'can_send_version',
return_value=False),
mock.patch.object(mock_client, 'prepare',
return_value=cctxt_mock)
) as (
can_send_mock, prepare_mock
):
rpcapi.detach_volume(ctxt, instance=instance,
volume_id='id', attachment_id='fake_id')
# assert our mocks were called as expected
can_send_mock.assert_called_once_with('4.7')
prepare_mock.assert_called_once_with(server=instance['host'],
version='4.0')
cast_mock.assert_called_once_with(ctxt, 'detach_volume',
instance=instance,
volume_id='id')
def test_finish_resize(self):
self._test_compute_api('finish_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'foo'},
image='image', disk_info='disk_info', host='host',
reservations=list('fake_res'))
def test_finish_revert_resize(self):
self._test_compute_api('finish_revert_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
host='host', reservations=list('fake_res'))
def test_get_console_output(self):
self._test_compute_api('get_console_output', 'call',
instance=self.fake_instance_obj, tail_length='tl',
version='4.0')
def test_get_console_pool_info(self):
self._test_compute_api('get_console_pool_info', 'call',
console_type='type', host='host')
def test_get_console_topic(self):
self._test_compute_api('get_console_topic', 'call', host='host')
def test_get_diagnostics(self):
self._test_compute_api('get_diagnostics', 'call',
instance=self.fake_instance_obj, version='4.0')
def test_get_instance_diagnostics(self):
expected_args = {'instance': self.fake_instance_obj}
self._test_compute_api('get_instance_diagnostics', 'call',
expected_args, instance=self.fake_instance_obj,
version='4.13')
def test_get_vnc_console(self):
self._test_compute_api('get_vnc_console', 'call',
instance=self.fake_instance_obj, console_type='type',
version='4.0')
def test_get_spice_console(self):
self._test_compute_api('get_spice_console', 'call',
instance=self.fake_instance_obj, console_type='type',
version='4.0')
def test_get_rdp_console(self):
self._test_compute_api('get_rdp_console', 'call',
instance=self.fake_instance_obj, console_type='type',
version='4.0')
def test_get_serial_console(self):
self._test_compute_api('get_serial_console', 'call',
instance=self.fake_instance_obj, console_type='serial',
version='4.0')
def test_get_mks_console(self):
self._test_compute_api('get_mks_console', 'call',
instance=self.fake_instance_obj, console_type='webmks',
version='4.3')
def test_validate_console_port(self):
self._test_compute_api('validate_console_port', 'call',
instance=self.fake_instance_obj, port="5900",
console_type="novnc", version='4.0')
def test_host_maintenance_mode(self):
self._test_compute_api('host_maintenance_mode', 'call',
host_param='param', mode='mode', host='host')
def test_host_power_action(self):
self._test_compute_api('host_power_action', 'call', action='action',
host='host')
def test_inject_network_info(self):
self._test_compute_api('inject_network_info', 'cast',
instance=self.fake_instance_obj)
def test_live_migration(self):
self._test_compute_api('live_migration', 'cast',
instance=self.fake_instance_obj, dest='dest',
block_migration='blockity_block', host='tsoh',
migration='migration',
migrate_data={}, version='4.8')
def test_live_migration_force_complete(self):
migration = migration_obj.Migration()
migration.id = 1
migration.source_compute = 'fake'
ctxt = context.RequestContext('fake_user', 'fake_project')
version = '4.12'
rpcapi = compute_rpcapi.ComputeAPI()
rpcapi.router.by_host = mock.Mock()
mock_client = mock.MagicMock()
rpcapi.router.by_host.return_value = mock_client
mock_client.can_send_version.return_value = True
mock_cctx = mock.MagicMock()
mock_client.prepare.return_value = mock_cctx
rpcapi.live_migration_force_complete(ctxt, self.fake_instance_obj,
migration)
mock_client.prepare.assert_called_with(server=migration.source_compute,
version=version)
mock_cctx.cast.assert_called_with(ctxt,
'live_migration_force_complete',
instance=self.fake_instance_obj)
def test_live_migration_force_complete_backward_compatibility(self):
migration = migration_obj.Migration()
migration.id = 1
migration.source_compute = 'fake'
version = '4.9'
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = compute_rpcapi.ComputeAPI()
rpcapi.router.by_host = mock.Mock()
mock_client = mock.MagicMock()
rpcapi.router.by_host.return_value = mock_client
mock_client.can_send_version.return_value = False
mock_cctx = mock.MagicMock()
mock_client.prepare.return_value = mock_cctx
rpcapi.live_migration_force_complete(ctxt, self.fake_instance_obj,
migration)
mock_client.prepare.assert_called_with(server=migration.source_compute,
version=version)
mock_cctx.cast.assert_called_with(ctxt,
'live_migration_force_complete',
instance=self.fake_instance_obj,
migration_id=migration.id)
def test_live_migration_abort(self):
self._test_compute_api('live_migration_abort', 'cast',
instance=self.fake_instance_obj,
migration_id='1', version='4.10')
def test_post_live_migration_at_destination(self):
self._test_compute_api('post_live_migration_at_destination', 'call',
instance=self.fake_instance_obj,
block_migration='block_migration', host='host', version='4.0')
def test_pause_instance(self):
self._test_compute_api('pause_instance', 'cast',
instance=self.fake_instance_obj)
def test_soft_delete_instance(self):
self._test_compute_api('soft_delete_instance', 'cast',
instance=self.fake_instance_obj,
reservations=['uuid1', 'uuid2'])
def test_swap_volume(self):
self._test_compute_api('swap_volume', 'cast',
instance=self.fake_instance_obj, old_volume_id='oldid',
new_volume_id='newid')
def test_restore_instance(self):
self._test_compute_api('restore_instance', 'cast',
instance=self.fake_instance_obj, version='4.0')
def test_pre_live_migration(self):
self._test_compute_api('pre_live_migration', 'call',
instance=self.fake_instance_obj,
block_migration='block_migration', disk='disk', host='host',
migrate_data=None, version='4.8')
def test_prep_resize(self):
self._test_compute_api('prep_resize', 'cast',
instance=self.fake_instance_obj,
instance_type=self.fake_flavor_obj,
image='fake_image', host='host',
reservations=list('fake_res'),
request_spec='fake_spec',
filter_properties={'fakeprop': 'fakeval'},
node='node', clean_shutdown=True, version='4.1')
self.flags(compute='4.0', group='upgrade_levels')
expected_args = {'instance_type': self.fake_flavor}
self._test_compute_api('prep_resize', 'cast', expected_args,
instance=self.fake_instance_obj,
instance_type=self.fake_flavor_obj,
image='fake_image', host='host',
reservations=list('fake_res'),
request_spec='fake_spec',
filter_properties={'fakeprop': 'fakeval'},
node='node', clean_shutdown=True, version='4.0')
def test_reboot_instance(self):
self.maxDiff = None
self._test_compute_api('reboot_instance', 'cast',
instance=self.fake_instance_obj,
block_device_info={},
reboot_type='type')
def test_rebuild_instance(self):
self._test_compute_api('rebuild_instance', 'cast', new_pass='None',
injected_files='None', image_ref='None', orig_image_ref='None',
bdms=[], instance=self.fake_instance_obj, host='new_host',
orig_sys_metadata=None, recreate=True, on_shared_storage=True,
preserve_ephemeral=True, migration=None, node=None,
limits=None, version='4.5')
def test_rebuild_instance_downgrade(self):
self.flags(group='upgrade_levels', compute='4.0')
self._test_compute_api('rebuild_instance', 'cast', new_pass='None',
injected_files='None', image_ref='None', orig_image_ref='None',
bdms=[], instance=self.fake_instance_obj, host='new_host',
orig_sys_metadata=None, recreate=True, on_shared_storage=True,
preserve_ephemeral=True, version='4.0')
def test_reserve_block_device_name(self):
self._test_compute_api('reserve_block_device_name', 'call',
instance=self.fake_instance_obj, device='device',
volume_id='id', disk_bus='ide', device_type='cdrom',
version='4.0',
_return_value=objects_block_dev.BlockDeviceMapping())
def test_refresh_instance_security_rules(self):
expected_args = {'instance': self.fake_instance_obj}
self._test_compute_api('refresh_instance_security_rules', 'cast',
expected_args, host='fake_host',
instance=self.fake_instance_obj, version='4.4')
def test_remove_aggregate_host(self):
self._test_compute_api('remove_aggregate_host', 'cast',
aggregate={'id': 'fake_id'}, host_param='host', host='host',
slave_info={})
def test_remove_fixed_ip_from_instance(self):
self._test_compute_api('remove_fixed_ip_from_instance', 'cast',
instance=self.fake_instance_obj, address='addr',
version='4.0')
def test_remove_volume_connection(self):
self._test_compute_api('remove_volume_connection', 'call',
instance=self.fake_instance_obj, volume_id='id', host='host',
version='4.0')
def test_rescue_instance(self):
self._test_compute_api('rescue_instance', 'cast',
instance=self.fake_instance_obj, rescue_password='pw',
rescue_image_ref='fake_image_ref',
clean_shutdown=True, version='4.0')
def test_reset_network(self):
self._test_compute_api('reset_network', 'cast',
instance=self.fake_instance_obj)
def test_resize_instance(self):
self._test_compute_api('resize_instance', 'cast',
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
image='image', instance_type=self.fake_flavor_obj,
reservations=list('fake_res'),
clean_shutdown=True, version='4.1')
self.flags(compute='4.0', group='upgrade_levels')
expected_args = {'instance_type': self.fake_flavor}
self._test_compute_api('resize_instance', 'cast', expected_args,
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
image='image', instance_type=self.fake_flavor_obj,
reservations=list('fake_res'),
clean_shutdown=True, version='4.0')
def test_resume_instance(self):
self._test_compute_api('resume_instance', 'cast',
instance=self.fake_instance_obj)
def test_revert_resize(self):
self._test_compute_api('revert_resize', 'cast',
instance=self.fake_instance_obj, migration={'id': 'fake_id'},
host='host', reservations=list('fake_res'))
def test_set_admin_password(self):
self._test_compute_api('set_admin_password', 'call',
instance=self.fake_instance_obj, new_pass='pw',
version='4.0')
def test_set_host_enabled(self):
self._test_compute_api('set_host_enabled', 'call',
enabled='enabled', host='host')
def test_get_host_uptime(self):
self._test_compute_api('get_host_uptime', 'call', host='host')
def test_backup_instance(self):
self._test_compute_api('backup_instance', 'cast',
instance=self.fake_instance_obj, image_id='id',
backup_type='type', rotation='rotation')
def test_snapshot_instance(self):
self._test_compute_api('snapshot_instance', 'cast',
instance=self.fake_instance_obj, image_id='id')
def test_start_instance(self):
self._test_compute_api('start_instance', 'cast',
instance=self.fake_instance_obj)
def test_stop_instance_cast(self):
self._test_compute_api('stop_instance', 'cast',
instance=self.fake_instance_obj,
clean_shutdown=True, version='4.0')
def test_stop_instance_call(self):
self._test_compute_api('stop_instance', 'call',
instance=self.fake_instance_obj,
clean_shutdown=True, version='4.0')
def test_suspend_instance(self):
self._test_compute_api('suspend_instance', 'cast',
instance=self.fake_instance_obj)
def test_terminate_instance(self):
self._test_compute_api('terminate_instance', 'cast',
instance=self.fake_instance_obj, bdms=[],
reservations=['uuid1', 'uuid2'], version='4.0')
def test_unpause_instance(self):
self._test_compute_api('unpause_instance', 'cast',
instance=self.fake_instance_obj)
def test_unrescue_instance(self):
self._test_compute_api('unrescue_instance', 'cast',
instance=self.fake_instance_obj, version='4.0')
def test_shelve_instance(self):
self._test_compute_api('shelve_instance', 'cast',
instance=self.fake_instance_obj, image_id='image_id',
clean_shutdown=True, version='4.0')
def test_shelve_offload_instance(self):
self._test_compute_api('shelve_offload_instance', 'cast',
instance=self.fake_instance_obj,
clean_shutdown=True, version='4.0')
def test_unshelve_instance(self):
self._test_compute_api('unshelve_instance', 'cast',
instance=self.fake_instance_obj, host='host', image='image',
filter_properties={'fakeprop': 'fakeval'}, node='node',
version='4.0')
def test_volume_snapshot_create(self):
self._test_compute_api('volume_snapshot_create', 'cast',
instance=self.fake_instance_obj, volume_id='fake_id',
create_info={}, version='4.0')
def test_volume_snapshot_delete(self):
self._test_compute_api('volume_snapshot_delete', 'cast',
instance=self.fake_instance_obj, volume_id='fake_id',
snapshot_id='fake_id2', delete_info={}, version='4.0')
def test_external_instance_event(self):
self._test_compute_api('external_instance_event', 'cast',
instances=[self.fake_instance_obj],
events=['event'],
version='4.0')
def test_build_and_run_instance(self):
self._test_compute_api('build_and_run_instance', 'cast',
instance=self.fake_instance_obj, host='host', image='image',
request_spec={'request': 'spec'}, filter_properties=[],
admin_password='passwd', injected_files=None,
requested_networks=['network1'], security_groups=None,
block_device_mapping=None, node='node', limits=[],
version='4.0')
def test_quiesce_instance(self):
self._test_compute_api('quiesce_instance', 'call',
instance=self.fake_instance_obj, version='4.0')
def test_unquiesce_instance(self):
self._test_compute_api('unquiesce_instance', 'cast',
instance=self.fake_instance_obj, mapping=None, version='4.0')
def test_trigger_crash_dump(self):
self._test_compute_api('trigger_crash_dump', 'cast',
instance=self.fake_instance_obj, version='4.6')
def test_trigger_crash_dump_incompatible(self):
self.flags(compute='4.0', group='upgrade_levels')
self.assertRaises(exception.TriggerCrashDumpNotSupported,
self._test_compute_api,
'trigger_crash_dump', 'cast',
instance=self.fake_instance_obj, version='4.6')
def _test_simple_call(self, method, inargs, callargs, callret,
calltype='call', can_send=False):
rpc = compute_rpcapi.ComputeAPI()
mock_client = mock.Mock()
rpc.router.by_instance = mock.Mock()
rpc.router.by_instance.return_value = mock_client
rpc.router.by_host = mock.Mock()
rpc.router.by_host.return_value = mock_client
@mock.patch.object(compute_rpcapi, '_compute_host')
def _test(mock_ch):
mock_client.can_send_version.return_value = can_send
call = getattr(mock_client.prepare.return_value, calltype)
call.return_value = callret
ctxt = context.RequestContext()
result = getattr(rpc, method)(ctxt, **inargs)
call.assert_called_once_with(ctxt, method, **callargs)
# Get the target of the prepare call: prepare(server=<target>, ...)
prepare_target = mock_client.prepare.call_args[1]['server']
# If _compute_host(None, instance) was called, then by_instance
# should have been called with the instance. Otherwise by_host
# should have been called with the same host as the prepare target.
if mock_ch.called and mock_ch.call_args[0][0] is None:
instance = mock_ch.call_args[0][1]
rpc.router.by_instance.assert_called_once_with(ctxt, instance)
rpc.router.by_host.assert_not_called()
else:
rpc.router.by_host.assert_called_once_with(ctxt,
prepare_target)
rpc.router.by_instance.assert_not_called()
return result
return _test()
def test_check_can_live_migrate_source_converts_objects(self):
obj = migrate_data_obj.LiveMigrateData()
inst = self.fake_instance_obj
result = self._test_simple_call('check_can_live_migrate_source',
inargs={'instance': inst,
'dest_check_data': obj},
callargs={'instance': inst,
'dest_check_data': {}},
callret=obj)
self.assertEqual(obj, result)
result = self._test_simple_call('check_can_live_migrate_source',
inargs={'instance': inst,
'dest_check_data': obj},
callargs={'instance': inst,
'dest_check_data': {}},
callret={'foo': 'bar'})
self.assertIsInstance(result, migrate_data_obj.LiveMigrateData)
@mock.patch('nova.objects.migrate_data.LiveMigrateData.'
'detect_implementation')
def test_check_can_live_migrate_destination_converts_dict(self,
mock_det):
inst = self.fake_instance_obj
result = self._test_simple_call('check_can_live_migrate_destination',
inargs={'instance': inst,
'destination': 'bar',
'block_migration': False,
'disk_over_commit': False},
callargs={'instance': inst,
'block_migration': False,
'disk_over_commit': False},
callret={'foo': 'bar'})
self.assertEqual(mock_det.return_value, result)
def test_live_migration_converts_objects(self):
obj = migrate_data_obj.LiveMigrateData()
inst = self.fake_instance_obj
self._test_simple_call('live_migration',
inargs={'instance': inst,
'dest': 'foo',
'block_migration': False,
'host': 'foo',
'migration': None,
'migrate_data': obj},
callargs={'instance': inst,
'dest': 'foo',
'block_migration': False,
'migrate_data': {
'pre_live_migration_result': {}}},
callret=None,
calltype='cast')
@mock.patch('nova.objects.migrate_data.LiveMigrateData.from_legacy_dict')
def test_pre_live_migration_converts_objects(self, mock_fld):
obj = migrate_data_obj.LiveMigrateData()
inst = self.fake_instance_obj
result = self._test_simple_call('pre_live_migration',
inargs={'instance': inst,
'block_migration': False,
'disk': None,
'host': 'foo',
'migrate_data': obj},
callargs={'instance': inst,
'block_migration': False,
'disk': None,
'migrate_data': {}},
callret=obj)
self.assertFalse(mock_fld.called)
self.assertEqual(obj, result)
result = self._test_simple_call('pre_live_migration',
inargs={'instance': inst,
'block_migration': False,
'disk': None,
'host': 'foo',
'migrate_data': obj},
callargs={'instance': inst,
'block_migration': False,
'disk': None,
'migrate_data': {}},
callret={'foo': 'bar'})
mock_fld.assert_called_once_with(
{'pre_live_migration_result': {'foo': 'bar'}})
self.assertIsInstance(result, migrate_data_obj.LiveMigrateData)
def test_rollback_live_migration_at_destination_converts_objects(self):
obj = migrate_data_obj.LiveMigrateData()
inst = self.fake_instance_obj
method = 'rollback_live_migration_at_destination'
self._test_simple_call(method,
inargs={'instance': inst,
'host': 'foo',
'destroy_disks': False,
'migrate_data': obj},
callargs={'instance': inst,
'destroy_disks': False,
'migrate_data': {}},
callret=None,
calltype='cast')
def test_check_can_live_migrate_destination_old_compute(self):
self.flags(compute='4.10', group='upgrade_levels')
self.assertRaises(exception.LiveMigrationWithOldNovaNotSupported,
self._test_compute_api,
'check_can_live_migrate_destination', 'call',
instance=self.fake_instance_obj,
block_migration=None,
destination='dest',
disk_over_commit=None, version='4.11')
| |
# Copyright (c) 2005-2008, California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Andrew D. Straw
from __future__ import division, print_function
import cgtypes # cgkit 1.x
import math, warnings
import numpy
import numpy as np
import scipy, scipy.io
cube_order = ['posx','negx','posy','negy','posz','negz']
def mag(vec):
vec = numpy.asarray(vec)
assert len(vec.shape)==1
return math.sqrt(numpy.sum(vec**2.0))
def normalize(vec):
denom = mag(vec)
return numpy.asarray(vec)/denom
def get_mean_interommatidial_distance( receptor_dirs, triangles ):
"""returns values in radians"""
# this is not efficient...
mean_thetas = []
for iv,v in enumerate(receptor_dirs):
neighbors = set()
for tri in triangles:
if iv in tri:
for it in tri:
neighbors.add(it)
neighbors = list(neighbors)
neighbors.remove( iv )
neighbor_dirs = [ receptor_dirs[int(n)] for n in neighbors ]
cos_theta_neighbors = [numpy.dot(n,v) for n in neighbor_dirs]
theta_neighbors = [numpy.arccos( c ) for c in cos_theta_neighbors]
mean_theta = numpy.mean(theta_neighbors)
mean_thetas.append(mean_theta)
return mean_thetas
def make_receptor_sensitivities(all_d_q,delta_rho_q=None,res=64):
"""
all_d_q are visual element directions as a 3-vector
delta_rho_q (angular sensitivity) is in radians
"""
if delta_rho_q is None:
raise ValueError('must specify delta_rho_q (in radians)')
if isinstance( delta_rho_q, float):
all_delta_rho_qs = delta_rho_q*numpy.ones( (len(all_d_q),), dtype=numpy.float64)
else:
all_delta_rho_qs = numpy.asarray(delta_rho_q)
if len(all_delta_rho_qs.shape) != 1:
raise ValueError("delta_rho_q must be scalar or vector")
if all_delta_rho_qs.shape[0] != len(all_d_q):
raise ValueError("if delta_rho_q is a vector, "
"it must have the same number of "
"elements as receptors")
def G_q(zeta,delta_rho_q):
# gaussian
# From Snyder (1979) as cited in Burton & Laughlin (2003)
return numpy.exp( -4*math.log(2)*abs(zeta)**2 / delta_rho_q**2 )
half_res = res//2
vals = (numpy.arange(res)-half_res)/half_res
weight_maps = []
# setup vectors for initial face (posx)
face_vecs = {}
face_vecs['posx'] = []
x = 1
for z in vals:
this_row_vecs = []
for y in vals:
on_cube_3d = (x,y,z)
#print('on_cube_3d %5.2f %5.2f %5.2f'%on_cube_3d)
v3norm = normalize(on_cube_3d) # get direction of each pixel
p_p = cgtypes.quat(0.0, v3norm[0], v3norm[1], v3norm[2])
this_row_vecs.append(p_p)
this_row_vecs.reverse()
face_vecs['posx'].append( this_row_vecs )
def rot_face( facedict, facename, rotq):
facedict[facename] = []
for row in facedict['posx']:
this_row_vecs = []
for col in row:
this_row_vecs.append( rotq*col*rotq.inverse() )
facedict[facename].append( this_row_vecs )
rotq = cgtypes.quat()
rotq = rotq.fromAngleAxis(math.pi/2.0,cgtypes.vec3(0,0,1))
rot_face( face_vecs, 'posy', rotq)
rotq = cgtypes.quat()
rotq = rotq.fromAngleAxis(math.pi,cgtypes.vec3(0,0,1))
rot_face( face_vecs, 'negx', rotq)
rotq = cgtypes.quat()
rotq = rotq.fromAngleAxis(-math.pi/2.0,cgtypes.vec3(0,0,1))
rot_face( face_vecs, 'negy', rotq)
rotq = cgtypes.quat()
rotq = rotq.fromAngleAxis(math.pi/2.0,cgtypes.vec3(0,-1,0))
rot_face( face_vecs, 'posz', rotq)
rotq = cgtypes.quat()
rotq = rotq.fromAngleAxis(math.pi/2.0,cgtypes.vec3(0,1,0))
rot_face( face_vecs, 'negz', rotq)
# convert from quat to vec3
rfv = {}
for key in face_vecs:
rows = face_vecs[key]
rfv[key] = []
for row in rows:
this_row = [ cgtypes.vec3(col.x, col.y, col.z) for col in row ] # convert to vec3
rfv[key].append( this_row )
def get_weight_map(fn, rfv, d_q, delta_rho_q):
angles = numpy.zeros( (vals.shape[0], vals.shape[0]), dtype=numpy.float64 )
for i, row_vecs in enumerate(rfv[fn]):
for j, ovec in enumerate(row_vecs):
angles[i,j] = d_q.angle(ovec)
wm = G_q(angles,delta_rho_q)
return wm
for dqi,(d_q,this_delta_rho_q) in enumerate(zip(all_d_q,all_delta_rho_qs)):
weight_maps_d_q = {}
ssf = 0.0
for fn in cube_order:
wm = get_weight_map(fn, rfv, d_q, this_delta_rho_q)
weight_maps_d_q[fn] = wm
ssf += numpy.sum( wm.flat )
# normalize
for mapname in weight_maps_d_q:
wm = weight_maps_d_q[mapname]
weight_maps_d_q[mapname] = wm/ssf
# save maps by receptor direction
weight_maps.append( weight_maps_d_q )
return weight_maps
def flatten_cubemap( cubemap ):
rank1 = numpy.concatenate( [ numpy.ravel(cubemap[dir]) for dir in cube_order], axis=0 )
return rank1
def unflatten_cubemap( rank1 ):
rank1 = np.asarray(rank1)
assert rank1.ndim==1
total_n_pixels = rank1.shape[0]
n_pixels_per_face = total_n_pixels//6
n_pixels_per_side = int(np.sqrt(n_pixels_per_face))
assert 6*n_pixels_per_side**2==total_n_pixels
cubemap = {}
for count,dir in enumerate(cube_order):
start_idx = count*n_pixels_per_face
this_face_pixels = rank1[ start_idx:start_idx+n_pixels_per_face ]
this_face_pixels = np.reshape(this_face_pixels,(n_pixels_per_side,n_pixels_per_side))
cubemap[dir]=this_face_pixels
return cubemap
def make_repr_able(x):
if isinstance(x, cgtypes.vec3):
return repr_vec3(x)
elif isinstance(x, cgtypes.quat):
return repr_quat(x)
elif isinstance(x, list):
# recurse into
y = list(map( make_repr_able,x))
return y
else:
return x
class repr_vec3(cgtypes.vec3):
def __repr__(self):
return 'vec3(%s, %s, %s)'%( repr(self.x),
repr(self.y),
repr(self.z) )
class repr_quat(cgtypes.quat):
def __repr__(self):
return 'quat(%s, %s, %s, %s)'%( repr(self.w),
repr(self.x),
repr(self.y),
repr(self.z) )
def test_repr():
x = repr_vec3(1,2,3.0000001)
ra = repr(x)
x2 = eval(ra)
assert x2.z == x.z
y = [cgtypes.vec3(1,2,3.0000001)]
y2 = map(make_repr_able,y)
assert y[0].z == y2[0].z
x = repr_quat(0.1,1,2,3.0000001)
ra = repr(x)
x2 = eval(ra)
assert x2.z == x.z
y = [cgtypes.quat(0.1,1,2,3.0000001)]
y2 = map(make_repr_able,y)
assert y[0].z == y2[0].z
y3 = [y]
y4 = map(make_repr_able,y3)
assert y3[0][0].z == y4[0][0].z
def save_as_python( fd, var, varname, fname_extra=None ):
if fname_extra is None:
fname_extra = ''
fname_prefix = varname + fname_extra
buf = get_code_for_var( varname, fname_prefix, var)
fd.write(buf)
def get_code_for_var( name, fname_prefix, var):
if (isinstance(var,numpy.ndarray) or
scipy.sparse.issparse(var)):
if 0:
# save as Matrix Market file
fname = fname_prefix + '.mtx'
scipy.io.mmwrite( fname, var )
result = '%s = scipy.io.mmread(os.path.join(datadir,"%s"))\n'%(name,fname)
else:
# save as compressed MATLAB .mat file
fname = fname_prefix + '.mat'
fd = open( fname, mode='wb' )
savedict = {name:var}
#scipy.io.savemat(fname, savedict, format='5' )
scipy.io.savemat(fd, savedict)
result = '%s = scipy.io.loadmat(open(os.path.join(datadir,"%s"),mode="rb"),struct_as_record=False)["%s"]\n'%(name,fname,name)
return result
if 1:
ra = repr(var)
# now check that conversion worked
# put these in the namespace
vec3 = cgtypes.vec3
quat = cgtypes.quat
try:
cmp = eval(ra)
except Exception as err:
import traceback
print('the following exception will trigger a RuntimeError("eval failed") call:')
traceback.print_exc()
raise RuntimeError("eval failed, check other traceback printed above")
else:
if cmp==var:
return '%s = '%(name,)+ra+'\n'
else:
if 1:
# This is a crazy test because equality testing in
# cgkit 1.x doesn't seem to work very well.
is_sequence = False
try:
len(var)
is_sequence = True
except:
pass
if is_sequence:
assert len(var) == len(cmp)
for idx in range(len(var)):
if var[idx] != cmp[idx]:
if repr(var[idx]) == repr(cmp[idx]):
continue
warnings.warn('equality failure')
continue
## raise RuntimeError("equality failure at idx %d. Original = %s, new = %s"%(
## idx,repr(var[idx]),repr(cmp[idx])))
# hmm, why weren't these equal? i guess there's more precision than repr() checks?
return '%s = '%(name,)+ra+'\n'
else:
raise RuntimeError("failed conversion for %s (type %s)"%(repr(var),str(type(var))))
def xyz2lonlat(x,y,z):
R2D = 180.0/math.pi
try:
lat = math.asin(z)*R2D
except ValueError as err:
if z>1 and z < 1.1:
lat = math.asin(1.0)*R2D
else:
raise
lon1 = math.atan2(y,x)*R2D
return lon1,lat
| |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""RealNVP generative model for molecule nets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import rev_GNN
import real_NVP
flags = tf.app.flags
flags.DEFINE_bool('trainable_variance', False,
'Whether to use trainable variance for the prior or not.')
flags.DEFINE_bool('use_discrete_dist', False,
'Whether to use discrete distribution for the prior.')
flags.DEFINE_bool('dirichlet', False,
'Whether to use Dirichlet prior or not for the omegas.')
flags.DEFINE_bool('beta_adj', False,
'Whether to use beta prior for the adjacency matrix.')
flags.DEFINE_bool('perturb_latent', False,
'Whether to make perturbations in the latent space.')
flags.DEFINE_bool('use_node_embedding', False,
'Whether to first convert nodes into a small embedding.')
flags.DEFINE_bool('time_dependent_prior', True,
'Whether to use prior which is time dependent.')
FLAGS = flags.FLAGS
class GraphGenerator(object):
def __init__(self,
hparams,
params,
name='graph-gen'):
self.hparams = hparams
self.node_dim = hparams.node_dim
self.num_upper_nvp = hparams.num_upper_nvp
self.num_lower_nvp = hparams.num_lower_nvp
self.n_node_types = params['n_node_types']
self.n_edge_types = params['n_edge_types']
self.omega_alpha = 4.0
self.omega_beta = 8.0
self.z_alpha = 4.0
self.z_beta = 8.0
if FLAGS.use_edge_features:
self.ef_beta = 4.0
self.ef_alpha = 8.0
# If we want to have a schedule over the prior shaprness, make it a new
# variable.
if not FLAGS.time_dependent_prior:
return
with tf.variable_scope('time_prior', reuse=tf.AUTO_REUSE):
self.omega_alpha=tf.get_variable('omega_alpha', trainable=False,
initializer=tf.constant(value=2.0))
self.omega_beta = tf.get_variable('omega_beta', trainable=False,
initializer=tf.constant(value=4.0))
self.z_alpha = tf.get_variable('z_alpha', trainable=False,
initializer=tf.constant(value=2.0))
self.z_beta =tf.get_variable('z_beta', trainable=False,
initializer=tf.constant(value=4.0))
if FLAGS.use_edge_features:
self.ef_alpha = tf.get_variable('ef_alpha', trainable=False,
initializer=tf.constant(value=2.0))
self.ef_beta = tf.get_variable('ef_beta', trainable=False,
initializer=tf.constant(value=4.0))
def assign_op(self, assign_ph):
assgn_op1 = tf.assign(self.omega_alpha, assign_ph['omega_alpha'])
assgn_op2 = tf.assign(self.omega_beta, assign_ph['omega_beta'])
assgn_op3 = tf.assign(self.z_alpha, assign_ph['z_alpha'])
assgn_op4 = tf.assign(self.z_beta, assign_ph['z_beta'])
assgn_list = [assgn_op1, assgn_op2, assgn_op3, assgn_op4]
if FLAGS.use_edge_features:
assgn_op5 = tf.assign(self.ef_alpha, assign_ph['ef_alpha'])
assgn_op6 = tf.assign(self.ef_beta, assign_ph['ef_beta'])
assgn_list.append(assgn_op5)
assgn_list.append(assgn_op6)
return assgn_list
def set_assign_placeholders(self):
placeholders = dict()
placeholders['omega_alpha'] = tf.placeholder(tf.float32)
placeholders['omega_beta'] = tf.placeholder(tf.float32)
placeholders['z_alpha'] = tf.placeholder(tf.float32)
placeholders['z_beta'] = tf.placeholder(tf.float32)
assgn_var_list = [self.omega_alpha, self.omega_beta,
self.z_alpha, self.z_beta]
if FLAGS.use_edge_features:
placeholders['ef_alpha'] = tf.placeholder(tf.float32)
placeholders['ef_beta'] = tf.placeholder(tf.float32)
assgn_var_list.append(self.ef_alpha)
assgn_var_list.append(self.ef_beta)
return placeholders, assgn_var_list
def set_inputs(self):
"""Placeholders to be used while training the network."""
self.z_in = tf.placeholder(tf.float32, [None, None, None])
self.omega_in = tf.placeholder(tf.float32, [None, None, 2*self.node_dim])
# Assumed to be just a number; inflate the matrix to use in case needed
self.edge_features = tf.placeholder(tf.float32, [None, None,
None, self.n_edge_types+1])
self.dropout_rate = tf.placeholder(tf.float32)
self.mask = tf.placeholder(tf.float32, [None, None])
placeholders = dict()
placeholders['z_in'] = self.z_in
placeholders['omega_in'] = self.omega_in
placeholders['edge_in'] = self.edge_features
placeholders['mask_in'] = self.mask
return placeholders
def setup(self, is_training=True, sample=False):
real_nvp_estimator = real_NVP.RealNVP(
num_coupling_layers=self.num_upper_nvp,
event_ndims=0,
name='real-nvp-estimator')
params = dict()
params['is_training'] = is_training
if FLAGS.use_node_embedding:
# First convert the node vectors into an embedding representation in
# continuous space and then use it for the generation process. Note
# that we need an invertible matrix transformation.
omega_in = self.omega_in
with tf.variable_scope('node_embedding', reuse=tf.AUTO_REUSE):
shape = (2*self.node_dim, 2*self.node_dim)
emb1 = tf.get_variable('emb1', shape=shape)
emb2 = tf.get_variable('emb2', shape=shape)
emb1 = emb1 * tf.convert_to_tensor(np.triu(np.ones(shape), k=1),
dtype=tf.float32)
emb2 = emb2 * tf.convert_to_tensor(np.tril(np.ones(shape), k=1),
dtype=tf.float32)
lower = tf.check_numerics(tf.exp(emb2),
"tf.exp(emb2) is not numerically stable.")
upper = tf.check_numerics(tf.exp(emb1),
"tf.exp(emb1) is not numerically stable.")
batch_size = tf.shape(omega_in)[0]
num_nodes = tf.shape(omega_in)[1]
temp_omega_in = tf.matmul(
tf.reshape(self.omega_in, [-1, 2*self.node_dim]),
lower)
temp_omega_in = tf.matmul(temp_omega_in, upper)
temp_omega_in = tf.reshape(temp_omega_in, [batch_size,
num_nodes,
2*self.node_dim])
mask = tf.diag(tf.ones([2*self.node_dim]))
log_det = tf.expand_dims(tf.reduce_sum(emb1*mask), 0)
log_det += tf.expand_dims(tf.reduce_sum(emb2*mask), 0)
self.log_det_inverse = log_det
self.lower = lower
self.upper = upper
self.omega_val = temp_omega_in
real_nvp_estimator.build(params, self.hparams,
adj_fn=self._adj_fn,
translate_fn=self._translation_fn,
scale_fn=self._scale_fn,
is_training=is_training)
if sample:
log_prob, out, edge_feat = self.sample_fn(real_nvp_estimator)
return log_prob, out, edge_feat
log_prob = self.model_fn(real_nvp_estimator, is_training)
return log_prob
def input_prior(self, x, is_training, **kwargs):
z, omega = x
if FLAGS.perturb_latent:
z += tf.random_normal(tf.shape(z),
mean=0.0, stddev=0.05, dtype=tf.float32)
omega += tf.random_normal(tf.shape(omega),
mean=0.0, stddev=0.05, dtype=tf.float32)
batch_size = tf.shape(z)[0]
num_nodes = tf.shape(omega)[1]
mask_col = tf.reshape(self.mask, [batch_size, num_nodes, 1])
mask_again = tf.multiply(mask_col,
tf.transpose(mask_col, (0, 2, 1)))
# beta prior over transformed z
# z -> log sigmoid(z)
# Assuming sigmoid(z) is a sample from the beta prior, we get that
# p(z) = p(sigmoid(z)) + log(sigmoid(z)) + log (1 - sigmoid(z))
# where p(sigmoid(z)) is drawn from beta distribution.
beta_dist = tf.distributions.Beta(concentration1=2.0,
concentration0=2.0)
log_sigmoid_z = -tf.nn.softplus(-z)
unnorm_prob = ((beta_dist.concentration1 - 1.) * log_sigmoid_z
+ (beta_dist.concentration0 - 1.) * (-z + log_sigmoid_z))
norm_const = (tf.lgamma(beta_dist.concentration1)
+ tf.lgamma(beta_dist.concentration0)
- tf.lgamma(beta_dist.total_concentration))
# beta prior value
log_prob = unnorm_prob - norm_const
# log (sigmoid(z)) term
log_prob += log_sigmoid_z
# log(1 - sigmoid(z)) term
log_prob += (-z + log_sigmoid_z)
log_prob = log_prob*mask_again
log_density_z = tf.reduce_sum(log_prob, axis=[1,2])
# We use the beta distribution prior on the input features z too,Log
# where we try to enforce the fact that the model is unable to predict any
# nodes initially, and only by virtue of the transformations it is able
# to generate nodes and labels.
beta_dist = tf.distributions.Beta(concentration1=2.0,
concentration0=4.0)
log_sigmoid_omega = -tf.nn.softplus(-omega)
unnorm_prob = ((beta_dist.concentration1 - 1.) * log_sigmoid_omega
+ (beta_dist.concentration0 - 1.) * (-omega + log_sigmoid_omega))
norm_const = (tf.lgamma(beta_dist.concentration1)
+ tf.lgamma(beta_dist.concentration0)
- tf.lgamma(beta_dist.total_concentration))
# beta prior value
log_prob = unnorm_prob - norm_const
# log (sigmoid(omega)) term
log_prob += log_sigmoid_omega
# log(1 - sigmoid(omega)) term
log_prob += (-omega + log_sigmoid_omega)
log_prob = log_prob*mask_col
log_density_omega = tf.reduce_sum(log_prob, axis=[1,2])
# We use the beta distribution for the edge features too, this means that
# we draw sigmoid probabilities for each of the labels for the
# edge features independently.
log_density_edge = 0.0
if FLAGS.use_edge_features:
edge = kwargs['edge_feat']
beta_dist = tf.distributions.Beta(concentration1=2.0,
concentration0=2.0*(self.n_edge_types+1))
log_sigmoid_edge = -tf.nn.softplus(-edge)
unnorm_prob = ((beta_dist.concentration1 - 1.) * log_sigmoid_edge
+ (beta_dist.concentration0 - 1.) * (-edge + log_sigmoid_edge))
norm_const = (tf.lgamma(beta_dist.concentration1)
+ tf.lgamma(beta_dist.concentration0)
- tf.lgamma(beta_dist.total_concentration))
# beta prior value
log_prob = unnorm_prob - norm_const
# log (sigmoid(omega)) term
log_prob += log_sigmoid_edge
# log(1 - sigmoid(omega)) term
log_prob += (-edge + log_sigmoid_edge)
log_prob = log_prob*tf.expand_dims(mask_again, 3)
# log_prob = log_prob*tf.stop_gradient(tf.expand_dims(tf.nn.sigmoid(z),
# axis=3))
log_density_edge = tf.reduce_sum(log_prob, axis=[1,2, 3])
total_log_prob = log_density_z + log_density_omega
if FLAGS.use_edge_features:
total_log_prob += log_density_edge
total_log_prob = tf.Print(total_log_prob,
[tf.reduce_mean(total_log_prob),
tf.reduce_mean(log_density_z),
tf.reduce_mean(log_density_omega),
tf.reduce_mean(log_density_edge)],
message='prior_density', summarize=30)
return total_log_prob
def model_fn(self, real_nvp_estimator, is_training):
omega_in = self.omega_in
if FLAGS.use_node_embedding:
omega_in = self.omega_val
log_prob = real_NVP.real_nvp_model_fn(real_nvp_estimator,
self.z_in,
omega_in,
self.input_prior,
is_training,
mask=self.mask,
edge_feat=self.edge_features)
if FLAGS.use_node_embedding:
log_prob += self.log_det_inverse
return log_prob
def sample_fn(self, real_nvp_estimator):
"""Sample fn but needs to handle mask here too."""
log_prob, out, edge_feat = real_NVP.real_nvp_sample_fn(real_nvp_estimator,
self.z_in,
self.omega_in,
self.input_prior,
is_training=False,
mask=self.mask,
edge_feat=self.edge_features)
return log_prob, out, edge_feat
def _translation_fn(self, omega, z_dims):
"""Now accounts for both an update in Z as well as an update in edge
features."""
if self.hparams.use_dot_product_distance:
omega_t = tf.transpose(omega, perm=[0, 2, 1])
similarity = tf.matmul(omega, omega_t)
return similarity
elif self.hparams.use_similarity_in_space:
with tf.variable_scope('translation_fn', reuse=tf.AUTO_REUSE):
h_omega = real_NVP.mlp(omega,
[self.hparams.omega_hidden1, self.hparams.omega_hidden2],
activation_fn=tf.nn.tanh,
output_nonlinearity=None,
regularizer=None)
if FLAGS.l2_normalize:
h_omega = tf.nn.l2_normalize(h_omega, dim=2)
h_omega_t = tf.transpose(h_omega, perm=[0, 2, 1])
similarity = tf.matmul(h_omega, h_omega_t)
return similarity
elif True:
# Two headed neural net which gives me a score as well as a softmax
# over edge features. We want to learn to generate Z using a combination
# of the current node states as well as a context vector for all the
# nodes.
omega = tf.nn.sigmoid(omega)
mask = tf.expand_dims(self.mask, 2)
# Interpret omega as a distribution over labels.
with tf.variable_scope('translation_fn', reuse=tf.AUTO_REUSE):
h_omega = real_NVP.mlp(omega,
[self.hparams.omega_hidden1, self.hparams.omega_hidden2],
activation_fn=tf.nn.relu,
output_nonlinearity=None,
regularizer=None)
with tf.variable_scope('context_scope', reuse=tf.AUTO_REUSE):
context_omega = real_NVP.mlp(
omega,
[self.hparams.omega_hidden1, self.hparams.omega_hidden2],
activation_fn=tf.nn.relu,
output_nonlinearity=None,
regularizer=None)
context_omega = tf.reduce_sum(context_omega*mask, axis=1)
h_omega_new = tf.expand_dims(h_omega*mask, axis=2)
h_omega_perm = h_omega_new + tf.transpose(h_omega_new, perm=[0,2,1,3])
batch_size = tf.shape(h_omega_perm)[0]
num_nodes = tf.shape(h_omega_perm)[1]
node_feat = tf.reshape(h_omega_perm, [batch_size,
num_nodes*num_nodes,
self.hparams.omega_hidden2])
node_feat = tf.concat([node_feat,
tf.tile(tf.expand_dims(context_omega, 1),
[1, num_nodes*num_nodes, 1])], axis=2)
node_feat = tf.reshape(node_feat, [batch_size, num_nodes*num_nodes,
2*self.hparams.omega_hidden2])
with tf.variable_scope('node_features', reuse=tf.AUTO_REUSE):
z_mat = real_NVP.mlp(node_feat,
[self.hparams.combiner_hidden1, 1],
activation_fn=tf.nn.tanh,
output_nonlinearity=tf.log_sigmoid,
regularizer=None)
z_mat = tf.reshape(z_mat, [batch_size, num_nodes, num_nodes, 1])
# Now the edge features, we know they come from n_node_dims
if FLAGS.use_edge_features:
with tf.variable_scope('edge_translation', reuse=tf.AUTO_REUSE):
edge_feat = real_NVP.mlp(
node_feat,
[self.hparams.combiner_hidden1, self.n_edge_types+1],
activation_fn=tf.nn.tanh,
output_nonlinearity=tf.nn.log_softmax,
regularizer=None)
edge_feat = tf.reshape(edge_feat,
[batch_size, num_nodes,
num_nodes, self.n_edge_types+1])
return tf.squeeze(z_mat, 3), edge_feat
return tf.squeeze(z_mat, 3)
def _scale_fn(self, omega, z_dims):
return tf.zeros([tf.shape(omega)[0], tf.shape(omega)[1],
tf.shape(omega)[1]], dtype=tf.float32)
with tf.variable_scope('scale_fn', reuse=tf.AUTO_REUSE):
# omega = tf.reshape(omega, [tf.shape(omega)[0], -1])
s_omega = real_NVP.mlp(omega,
[self.hparams.omega_scale1, self.hparams.omega_scale2],
activation_fn=tf.nn.tanh,
output_nonlinearity=None,
regularizer=None)
s_omega_new = tf.expand_dims(s_omega, axis=2)
s_omega_perm = s_omega_new + tf.transpose(s_omega_new, perm=[0, 2, 1, 3])
batch_size = tf.shape(s_omega_perm)[0]
num_nodes = tf.shape(s_omega_perm)[1]
node_feat = tf.reshape(s_omega_perm, [batch_size, num_nodes*num_nodes,
self.hparams.omega_scale2])
with tf.variable_scope('final_scaling', reuse=tf.AUTO_REUSE):
s_mat = real_NVP.mlp(
node_feat,
[self.hparams.combiner_hidden1, 1],
activation_fn=tf.nn.tanh,
output_nonlinearity=tf.log_sigmoid,
regularizer=None)
s_mat = tf.reshape(s_mat, [batch_size, num_nodes, num_nodes])
return s_mat
def _adj_fn(self, z_matrix):
return tf.nn.sigmoid(z_matrix)
| |
# Copyright (c) 2011 Jeff Garzik
#
# Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
#
# Copyright (c) 2007 Jan-Klaas Kollhof
#
# This file is part of jsonrpc.
#
# jsonrpc is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""HTTP proxy for opening RPC connection to bitcoind.
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
"""
import base64
import decimal
from http import HTTPStatus
import http.client
import json
import logging
import os
import socket
import time
import urllib.parse
HTTP_TIMEOUT = 30
USER_AGENT = "AuthServiceProxy/0.1"
log = logging.getLogger("BitcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error, http_status=None):
try:
errmsg = '%(message)s (%(code)i)' % rpc_error
except (KeyError, TypeError):
errmsg = ''
super().__init__(errmsg)
self.error = rpc_error
self.http_status = http_status
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy():
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urllib.parse.urlparse(service_url)
user = None if self.__url.username is None else self.__url.username.encode('utf8')
passwd = None if self.__url.password is None else self.__url.password.encode('utf8')
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
self.timeout = timeout
self._set_conn(connection)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
if os.name == 'nt':
# Windows somehow does not like to re-use connections
# TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows
self._set_conn()
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except http.client.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
except (BrokenPipeError, ConnectionResetError):
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
# ConnectionResetError happens on FreeBSD with Python 3.4
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
def get_request(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
log.debug("-{}-> {} {}".format(
AuthServiceProxy.__id_count,
self._service_name,
json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
))
if args and argsn:
raise ValueError('Cannot handle both named and positional arguments')
return {'version': '1.1',
'method': self._service_name,
'params': args or argsn,
'id': AuthServiceProxy.__id_count}
def __call__(self, *args, **argsn):
postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'], status)
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'}, status)
elif status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
else:
return response['result']
def batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> " + postdata)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
return response
def _get_response(self):
req_start_time = time.time()
try:
http_response = self.__conn.getresponse()
except socket.timeout:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
'using larger timeout for calls that take '
'longer to return.' % (self._service_name,
self.__conn.timeout)})
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException(
{'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)},
http_response.status)
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
elapsed = time.time() - req_start_time
if "error" in response and response["error"] is None:
log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- [%.6f] %s" % (elapsed, responsedata))
return response, http_response.status
def __truediv__(self, relative_uri):
return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn)
def _set_conn(self, connection=None):
port = 80 if self.__url.port is None else self.__url.port
if connection:
self.__conn = connection
self.timeout = connection.timeout
elif self.__url.scheme == 'https':
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=self.timeout)
else:
self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=self.timeout)
| |
#!/usr/bin/env python
"""This program is a five in row game, which is used for the coding
camp 2015 in WindRiver.com"""
import os
import sys, getopt
import pygame as pg
import threading
import json
from toolbox import button
from toolbox import tools
# Cloud API
from CloudAPI.node import Node
from CloudAPI.config import *
# R G B
GRAY = (100, 100, 100)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = ( 0, 255, 0)
BLUE = ( 0, 0, 255)
YELLOW = (255, 255, 0)
ORANGE = (255, 128, 0)
PURPLE = (255, 0, 255)
CYAN = ( 0, 255, 255)
BLACK = ( 0, 0, 0)
BRIGHT_GREEN = ( 0, 255, 0)
BRIGHT_RED = (255, 0, 0)
NAVYBLUE = ( 60, 60, 100)
DRAW = 0
CONTINUE = 1
WIN = 2
ERROR = 3
cloud_service = "Mashery"
def usage():
USAGE = """\
Usage: %s <-i config_file> [options]
-i, --ifile= Input the config file, which contains player user name,
screen width and hight, and input method, etc.
Options:
-h, --help Show this message
-g, --gameid= Enter into watching mode and watch the game of [gameid]
Examples:
%s -i config.json_pc
%s -i config.json_touch
%s -i config.json_watch -g 1
"""
print (USAGE % ((os.path.basename(__file__),) * 4))
if __name__ == "__main__":
inputfile = ''
watch_mode = 0
watch_game = -1
try:
opts, args = getopt.getopt(sys.argv[1:],"hi:g:",["help","ifile=", "gameid="])
except getopt.GetoptError as err:
print str(err)
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-i", "--ifile="):
inputfile = arg
elif opt in ("-g", "--gameid="):
watch_mode = 1
try:
watch_game = int(arg)
except:
print "Find latest game to watch", arg
if inputfile == '':
usage()
sys.exit(2)
else:
if not os.path.isfile(inputfile):
print "The file of input doesn't exit"
sys.exit(2)
#config = {'CHESS BOARD BLOCK COUNTS': 10, 'SCREEN WIDTH': 320, 'SCREEN HIGHT': 240,
# 'USER NAME': 'Charles', 'TOUCH SCREEN': True,
# 'CLOUD_SERVICE': 'Mashery',
# 'BOARD MARGIN LEFT': 15, 'BOARD MARGIN TOP': 15, 'CHESS RADIUS': 10,
# 'CLIENT ROLE': 1 # (BLACK) First Start
# }
#with open('config.json', 'w') as f:
# json.dump(config, f)
#exit()
#with open('config.json', 'r') as f:
with open(inputfile, 'r') as f:
config = json.load(f)
print "config:", config
CHESS_BOARD_BLOCK_COUNTS = config['CHESS BOARD BLOCK COUNTS']
SCREEN_WIDTH = config['SCREEN WIDTH']
SCREEN_HIGHT = config['SCREEN HIGHT']
TOUCH_SCREEN = config['TOUCH SCREEN']
BOARD_MARGIN_LEFT = config['BOARD MARGIN LEFT']
BOARD_MARGIN_TOP = config['BOARD MARGIN TOP']
CHESS_RADIUS = config['CHESS RADIUS']
USER_NAME = config['USER NAME']
SHOW_MOUSEMOTION = False
KEYBOARD_INPUT = config['KEYBOARD INPUT']
USER_NAME_TEXT_COLOR = config['USER NAME TEXT COLOR']
BOARD_GRID_LINE_COLOR = config['BOARD GRID LINE COLOR']
REFERENCE_POINTS_COLOR = config['REFERENCE POINTS COLOR']
REFERENCE_POINTS_RADIUS = config['REFERENCE POINTS RADIUS']
class Game(tools.States):
def __init__(self):
#if TOUCH_SCREEN == True:
#os.putenv('SDL_MOUSEDEV' , '/dev/input/event2')
pg.init()
if TOUCH_SCREEN == True:
pg.mouse.set_visible(0)
self.done = False
self.scr = pg.display.set_mode((SCREEN_WIDTH,SCREEN_HIGHT))
# Guest1
if TOUCH_SCREEN == True:
waiting_font_size = 12
else:
waiting_font_size = 20
text = "Connecting cloud server ..."
self.waiting_text, self.waiting_rect = self.make_text(text, GREEN,
(SCREEN_WIDTH // 2 ,
SCREEN_HIGHT // 2), waiting_font_size)
self.scr.blit(self.waiting_text, self.waiting_rect)
pg.display.update()
# 1 Regist and start game get game ID, client role
self.debug = True
#self.role_id = '0' # Host as default
self.seq_id = 0
self.init_for_cloud()
if watch_mode >0:
self.role_id = "2"
if watch_game >= 0:
self.game_id = str(watch_game)
else:
#find lastest game
self.game_id = str(self.findlatest_game())
else:
r = self.client_register()
if not r:
print("fails to first player register")
else:
r = json.loads(r)
#print "### r", r
#print("First player register: role id %s, game id %s" % (r["roleId"], r["gameId"]))
self.game_id = r["gameId"]
self.role_id = r["roleId"]
self.clock = pg.time.Clock()
# load background image
self.board = pg.image.load('REC/resources/images/Board.png')
self.black = pg.image.load('REC/resources/images/Black.png')
self.white = pg.image.load('REC/resources/images/White.png')
self.scr = pg.display.set_mode((SCREEN_WIDTH,SCREEN_HIGHT))
self.board_margin_left = BOARD_MARGIN_LEFT
self.board_margin_top = BOARD_MARGIN_TOP
self.chess_radius = CHESS_RADIUS
self.block_width = ((SCREEN_WIDTH * 833 // 1000) - self.board_margin_left * 2) // ( CHESS_BOARD_BLOCK_COUNTS + 1 )
self.block_hight = self.block_width
self.shrinkx = SCREEN_WIDTH
self.shrinky = SCREEN_HIGHT
self.black_image = pg.transform.smoothscale(self.black, (self.chess_radius * 2 , self.chess_radius * 2))
self.white_image = pg.transform.smoothscale(self.white, (self.chess_radius * 2 , self.chess_radius * 2))
tools.States.__init__(self)
self.won_game = False
self.screen_rect = self.scr.get_rect()
self.overlay = pg.Surface((self.screen_rect.width, self.screen_rect.height))
self.overlay.fill(0)
self.overlay.set_alpha(0)
self.X = 0
self.Y = 0
if TOUCH_SCREEN == True:
self.last_put_X = 8
self.last_put_Y = 8
else:
self.last_put_X = 16
self.last_put_Y = 16
# 3 Show Board
#self.board_width = 833
#self.bg_width = 1000
#self.block_width = (self.shrinkx * (self.board_width / self.bg_width) - self.board_margin_left * 2) / 15 - 1
self.board_image = pg.transform.smoothscale(self.board, (self.shrinkx, self.shrinky))
self.scr.blit(self.board_image, (0,0))
if TOUCH_SCREEN == True:
self.grid_width = 1
else:
self.grid_width = 2
self.draw_grid(CHESS_BOARD_BLOCK_COUNTS)
pg.display.flip()
self.setup_btns()
self.right_board_x = CHESS_BOARD_BLOCK_COUNTS*self.block_width+self.board_margin_left * 2
# TODO (enabling quit in thread)
# Get player 2 user name (blocking)
self.competitor_name = self.get_competitor_name(self.game_id,self.role_id)
self.draw_user_info()
# Init chess focus
self.cur_x = CHESS_BOARD_BLOCK_COUNTS // 2
self.cur_y = self.cur_x
if self.role_id == '0':
self.set_last_chess_prompt(self.cur_x,self.cur_y)
self.last_put_X = CHESS_BOARD_BLOCK_COUNTS + 10 #not exits
self.last_put_Y = CHESS_BOARD_BLOCK_COUNTS + 10
pg.display.update()
self.grid = [[0 for x in range(CHESS_BOARD_BLOCK_COUNTS + 1)] for y in range(CHESS_BOARD_BLOCK_COUNTS + 1)]
### Your turn: Put down the first chess at the center of the board
if self.role_id == '0':
self.your_turn = True
else:
self.your_turn = False
# WATCHING MODE
self.fetch_data = True
if self.role_id == "2":
self.get_history_from_cloud()
if self.fetch_data == True:
self.T = threading.Thread(target=self.read_from_cloud)
self.T.start()
def draw_user_info(self):
# Guest1
if TOUCH_SCREEN == True:
name_font_size = 12
else:
name_font_size = 20
# Competitor chess
x1 = self.right_board_x + (SCREEN_WIDTH - self.right_board_x)/2 - self.chess_radius
pg.display.update(self.scr.blit(self.black_image if self.role_id == '1' else self.white_image,
(x1,
1*self.block_hight + self.board_margin_top)))
x1 = self.right_board_x + (SCREEN_WIDTH - self.right_board_x)/2
text = self.competitor_name
self.guest_text, self.guest_rect = self.make_text(text, USER_NAME_TEXT_COLOR,
(x1,
1*self.block_hight + self.board_margin_top - self.chess_radius), name_font_size)
# Your chess
x1 = self.right_board_x + (SCREEN_WIDTH - self.right_board_x)/2 - self.chess_radius
pg.display.update(self.scr.blit(self.white_image if self.role_id == '1' else self.black_image,
(x1,
5*self.block_hight + self.board_margin_top)))
text = self.user_name
x1 = self.right_board_x + (SCREEN_WIDTH - self.right_board_x)/2
self.host_text, self.host_rect = self.make_text(text, USER_NAME_TEXT_COLOR,
(x1,
5*self.block_hight + self.board_margin_top - self.chess_radius), name_font_size)
def set_dataitem(self,node, data_name, data_val):
data_id = node.dataId(data_name)
if self.debug:
print("setting data item %s = %s" % (data_id, str(data_val)))
if not node.setData(data_id, json.dumps(data_val)):
print("Fail to set data item %s = %s" % (data_id, data_val))
return False
return True
def get_dataitem(self, node, data_id):
val = node.getData(data_id)
if not val:
print("Fail to query data item %s" % data_id)
return None
if self.debug:
print("fetch data item %s = %s" % (data_id, str(val)))
return val
def __update_role_id(self):
r = self.role_id
self.role_id += 1
self.role_id &= 1
if self.debug:
print("assign new role id %d" % r)
return r
def init_for_cloud(self):
self.node = Node(cloud_service, cloud_configs[cloud_service])
def client_register(self):
scripto = self.node.cloud.scripto()
registration = json.dumps({
"playerName": USER_NAME,
})
data = {
"registration": registration
}
r = scripto.execute('vlvRegistration', data)
return r
def draw_grid(self, n):
for i in range(0, n + 1):
# Rows
x1 = self.board_margin_left
y1 = self.board_margin_top + i * self.block_width
x2 = self.board_margin_left + n * self.block_width
y2 = self.board_margin_top + i * self.block_width
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
x1 = self.board_margin_left + i * self.block_width
y1 = self.board_margin_top
x2 = self.board_margin_left + i * self.block_width
y2 = self.board_margin_top + n * self.block_width
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Reference points
## left top
x1 = self.board_margin_left + 2 * self.block_width
y1 = self.board_margin_top + 2 * self.block_width
pg.draw.circle(self.scr, REFERENCE_POINTS_COLOR, (x1, y1), REFERENCE_POINTS_RADIUS, 0)
## right top
x1 = self.board_margin_left + (n - 2) * self.block_width
y1 = self.board_margin_top + 2 * self.block_width
pg.draw.circle(self.scr, REFERENCE_POINTS_COLOR, (x1, y1), REFERENCE_POINTS_RADIUS, 0)
## left bottom
x1 = self.board_margin_left + 2 * self.block_width
y1 = self.board_margin_top + (n - 2) * self.block_width
pg.draw.circle(self.scr, REFERENCE_POINTS_COLOR, (x1, y1), REFERENCE_POINTS_RADIUS, 0)
## right bottom
x1 = self.board_margin_left + (n - 2) * self.block_width
y1 = self.board_margin_top + (n - 2) * self.block_width
pg.draw.circle(self.scr, REFERENCE_POINTS_COLOR, (x1, y1), REFERENCE_POINTS_RADIUS, 0)
def patch_grid(self, n, x, y):
self.patch_grid_x0_xn(n, x, y)
self.patch_grid_y0_yn(n, x, y)
self.patch_grid_inner(n, x, y)
def patch_grid_x0_xn(self, n, x, y):
if x == 0:
x1 = self.board_margin_left
if y == 0:
y1 = self.board_margin_top
# Rows
x2 = self.board_margin_left + self.chess_radius
y2 = self.board_margin_top
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
x2 = self.board_margin_left
y2 = self.board_margin_top + self.chess_radius
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
elif y == n:
# Rows
y1 = self.board_margin_top + (y * self.block_width)
x2 = self.board_margin_left + self.chess_radius
y2 = self.board_margin_top + (y * self.block_width)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
y1 = self.board_margin_top + (y * self.block_width - self.chess_radius)
x2 = self.board_margin_left
y2 = self.board_margin_top + (y * self.block_width)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
else:
# Rows
y1 = self.board_margin_top + (y * self.block_width)
x2 = self.board_margin_left + self.chess_radius
y2 = self.board_margin_top + (y * self.block_width)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
y1 = self.board_margin_top + (y * self.block_width - self.chess_radius)
x2 = self.board_margin_left
y2 = self.board_margin_top + (y * self.block_width + self.chess_radius)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
elif x == n:
x1 = self.board_margin_left + (x * self.block_width)
if y == 0:
# Rows
x2 = self.board_margin_left + (x * self.block_width) - self.chess_radius
y2 = self.board_margin_top
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
x2 = self.board_margin_left + (x * self.block_width)
y2 = self.board_margin_top + self.chess_radius
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
elif y == n:
# Rows
y1 = self.board_margin_top + (y * self.block_width)
x2 = self.board_margin_left + (x * self.block_width) - self.chess_radius
y2 = self.board_margin_top + (y * self.block_width)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
y1 = self.board_margin_top + (y * self.block_width - self.chess_radius)
x2 = self.board_margin_left + (x * self.block_width)
y2 = self.board_margin_top + (y * self.block_width)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
else:
# Rows
y1 = self.board_margin_top + (y * self.block_width)
x2 = self.board_margin_left + (x * self.block_width) - self.chess_radius
y2 = self.board_margin_top + (y * self.block_width)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
y1 = self.board_margin_top + (y * self.block_width - self.chess_radius)
x2 = self.board_margin_left + (x * self.block_width)
y2 = self.board_margin_top + (y * self.block_width + self.chess_radius)
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
def patch_grid_y0_yn(self, n, x, y):
if y == 0:
if not x == 0 and not x == n:
y1 = self.board_margin_top
x1 = self.board_margin_left + (x * self.block_width) - self.chess_radius
# Rows
x2 = self.board_margin_left + (x * self.block_width) + self.chess_radius
y2 = self.board_margin_top
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
x1 = self.board_margin_left + (x * self.block_width)
x2 = self.board_margin_left + (x * self.block_width)
y2 = self.board_margin_top + self.chess_radius
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
elif y == n:
if not x == 0 and not x == n:
y1 = self.board_margin_top + (y * self.block_width)
x1 = self.board_margin_left + (x * self.block_width) - self.chess_radius
# Rows
x2 = self.board_margin_left + (x * self.block_width) + self.chess_radius
y2 = y1
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
x1 = self.board_margin_left + (x * self.block_width)
x2 = x1
y2 = y1 - self.chess_radius
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
def patch_grid_inner(self, n, x, y):
if x > 0 and x < n and y > 0 and y < n:
# Rows
x1 = self.board_margin_left + (x * self.block_width) - self.chess_radius
y1 = self.board_margin_top + (y * self.block_width)
x2 = self.board_margin_left + (x * self.block_width) + self.chess_radius
y2 = y1
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
# Columns
x1 = self.board_margin_left + (x * self.block_width)
x2 = x1
y1 = self.board_margin_top + (y * self.block_width) - self.chess_radius
y2 = self.board_margin_top + (y * self.block_width) + self.chess_radius
pg.draw.line(self.scr, BOARD_GRID_LINE_COLOR, (x1,y1), (x2,y2), self.grid_width)
def init_client_conn_socket(self):
self.soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.soc.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.soc.settimeout(5.0)
foo = True # pour break un double-while
while foo:
#ip = entry('host ip : <15,15>',width = 280)
ip = ["127.0.0.1"]
if not ip or not ip[0]: print('exit');exit()
while True:
try:
print('try to connect...')
self.soc.connect((ip[0],50007))
foo = False
print('connected')
break
except socket.timeout:
print('good ip ... ?')
break
except socket.error:
print('...refused')
pg.time.wait(1000)
for ev in pg.event.get():
if ev.type == pg.QUIT:
print('exit game')
exit()
self.conn = self.soc
self.soc.settimeout(None)
def set_last_chess_prompt(self, x, y):
return
print "set_last_chess_prompt (", "x:", x, "y:", y, ")"
if x <= CHESS_BOARD_BLOCK_COUNTS and y <= CHESS_BOARD_BLOCK_COUNTS and x >= 0 and y >= 0:
self.cur_x = x
self.cur_y = y
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left - self.chess_radius - 1,
y*self.block_hight + self.board_margin_top - self.chess_radius - 1,8,2)))
# |
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left - self.chess_radius - 1,
y*self.block_hight + self.board_margin_top - self.chess_radius - 1,2,8)))
# right down
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left + self.chess_radius + 1 - 2,
y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 8,2,8)))
# -
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left + self.chess_radius + 1 - 8,
y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 2,8,2)))
# ----------------------------------------------------
# left down
# -
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left - self.chess_radius - 1,
y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 2, 8,2)))
# |
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left - self.chess_radius - 1,
y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 8, 2,8)))
# right top
# -
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left + self.chess_radius + 1 - 8,
y*self.block_hight + self.board_margin_top - self.chess_radius - 1,8,2)))
# |
pg.display.update(self.scr.fill(pg.Color('red'),
(x*self.block_width + self.board_margin_left + self.chess_radius + 1 - 2,
y*self.block_hight + self.board_margin_top - self.chess_radius - 1,2,8)))
self.clear_last_chess_prompt()
def clear_last_chess_prompt(self):
print "clear_last_chess_prompt (", "x:", self.last_put_X, "y:", self.last_put_Y, ")"
# Clean chess focus
if self.last_put_X <= CHESS_BOARD_BLOCK_COUNTS and self.last_put_Y <= CHESS_BOARD_BLOCK_COUNTS and self.last_put_X >= 0 and self.last_put_Y >= 0:
# left top
r1 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left - self.chess_radius - 1,
self.last_put_Y*self.block_hight + self.board_margin_top - self.chess_radius - 1,8,2)
self.scr.blit(self.board_image,r1,r1)
pg.display.update(r1)
r2 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left - self.chess_radius - 1,
self.last_put_Y*self.block_hight + self.board_margin_top - self.chess_radius - 1,2,8)
self.scr.blit(self.board_image,r2,r2)
pg.display.update(r2)
# right top
r3 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left + self.chess_radius + 1 - 2,
self.last_put_Y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 8,2,8)
self.scr.blit(self.board_image,r3,r3)
pg.display.update(r3)
r4 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left + self.chess_radius + 1 - 8,
self.last_put_Y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 2,8,2)
self.scr.blit(self.board_image,r4,r4)
pg.display.update(r4)
# ----------------------------------------------
# left down
r5 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left - self.chess_radius - 1,
self.last_put_Y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 2,8,2)
self.scr.blit(self.board_image,r5,r5)
pg.display.update(r5)
r6 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left - self.chess_radius - 1,
self.last_put_Y*self.block_hight + self.board_margin_top + self.chess_radius + 1 - 8 ,2,8)
self.scr.blit(self.board_image,r6,r6)
pg.display.update(r6)
# right down
r7 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left + self.chess_radius + 1 - 8,
self.last_put_Y*self.block_hight + self.board_margin_top - self.chess_radius - 1 ,8,2)
self.scr.blit(self.board_image,r7,r7)
pg.display.update(r7)
r8 = pg.Rect(self.last_put_X*self.block_width + self.board_margin_left + self.chess_radius + 1 - 2,
self.last_put_Y*self.block_hight + self.board_margin_top - self.chess_radius - 1 ,2,8)
self.scr.blit(self.board_image,r8,r8)
pg.display.update(r8)
def put_pawn(self,x,y,color):
print ("### put chess (x: %s, y: %s)" % (x,y))
pg.display.update(self.scr.blit(color,
(x*self.block_width + self.board_margin_left - self.chess_radius,
y*self.block_hight + self.board_margin_top - self.chess_radius)))
self.set_last_chess_prompt(x,y)
self.last_put_X = x
self.last_put_Y = y
def setup_btns(self):
if TOUCH_SCREEN == True:
button_font_size = 18
else:
button_font_size = 22
button_config = {
"clicked_font_color" : (0,0,0),
"hover_font_color" : (205,195, 0),
'font_color' : (255,255,255),
'font' : tools.Font.load('impact.ttf', button_font_size),
'border_color' : (0,0,0),
'border_hover_color' : (100,100,100),
}
self.right_board_x = CHESS_BOARD_BLOCK_COUNTS*self.block_width+self.board_margin_left * 2
button_hight = self.board_margin_top * 2
button_width = SCREEN_WIDTH - self.right_board_x - self.board_margin_left * 2
self.btn1 = button.Button((self.right_board_x + self.board_margin_left,
SCREEN_HIGHT - self.board_margin_left - button_hight,
button_width,button_hight), (0,0,100),
self.quit_click, text='QUIT',
clicked_color=(255,255,255), hover_color=(0,0,130), **button_config)
# self.btn2 = button.Button((self.board_margin_left * 2 + self.block_width * 14 + 10, 200, 50,35), (0,0,100),
# self.test_click, text='TEST',
# clicked_color=(255,255,255), hover_color=(0,0,130), **button_config)
#self.btn1 = button.Button((self.board_margin_left * 2 + self.block_width * CHESS_BOARD_BLOCK_COUNTS + 10, 500, 100,35), (0,0,100),
# self.quit_click, text='QUIT',
# clicked_color=(255,255,255), hover_color=(0,0,130), **button_config)
# self.btn2 = button.Button((self.board_margin_left * 2 + self.block_width * CHESS_BOARD_BLOCK_COUNTS + 10, 10, 100,35), (0,0,100),
# self.test_click, text='TEST',
# clicked_color=(255,255,255), hover_color=(0,0,130), **button_config)
self.buttons = [self.btn1]
#self.buttons = [self.btn1, self.btn2]
def get_competitor_name(self, game_id, role_id):
data_id = self.node.dataId("vlv_GMETA_" + game_id)
while not self.done:
gmeta = self.get_dataitem(self.node, data_id)
competitor_name = ''
if gmeta:
data = json.loads(gmeta)
print "### data : ", data
if role_id == '0':
competitor_name = data['player2']
self.user_name = USER_NAME
elif role_id == '1':
competitor_name = data['player1']
self.user_name = USER_NAME
elif role_id == '2':
self.user_name = data['player1']
competitor_name = data['player2']
if not competitor_name == '':
print "### competitor_name", competitor_name
return competitor_name
def read_from_cloud(self):
data_name = "vlv_GMOVE_" + str(self.game_id)
data_id = self.node.dataId(data_name)
old_data = ''
while not self.done:
try:
data = json.loads(self.node.getData(data_id))
if not data['Status'] == 0 and data['Status'] and data != old_data:
old_data = data
try: pg.event.post(pg.event.Event(pg.USEREVENT+1,{'data':data}))
except:
print("Fail to post event ")
break
except:
print("Fail to get data %s" % data_name)
print "## read_from_cloud thread exit"
def findlatest_game(self):
data_name = "vlv_game_id"
data_id = self.node.dataId(data_name)
vlv_GAME_S_ID = self.node.getData(data_id)
vlv_GAME_ID = int(vlv_GAME_S_ID)
return vlv_GAME_ID - 1
def get_history_from_cloud(self):
self.his_data = []
data_name = "vlv_GMOVE_" + str(self.game_id)
data_id = self.node.dataId(data_name)
datas = self.node.getHistoricalData(data_id, pageSize=1000)
if len(datas) == 0:
print "No game data"
self.fetch_data = False
self.done = True
return
j = 0
for i in range(2, len(datas)):
print("Raw move ", datas[i]);
try:data = json.loads(datas[i])
except:
continue
if data['Status'] > 0 and data['Status']:
print("Got " ,data['SeqID'],"move", datas[i]);
j = data['SeqID']
self.his_data.insert(j - 1,data)
#Only last entry to judge if game is over
if j == 0:
# print "No game data"
self.fetch_data = True
# self.done = True
return
if self.his_data[j-1]['Status'] == 2:
self.fetch_data = False
#print("Got End @ %s", str(data['SeqID']))
self.his_data_len=j;
#debug
#self.fetch_data = False
#Draw current status
print("his data total %d,move"%self.his_data_len)
if self.fetch_data == True:
for i in range(0, self.his_data_len):
print('his data %s'%str(self.his_data[i]))
pg.event.post(pg.event.Event(pg.USEREVENT+1,{'data':self.his_data[i]}))
self.events()
else:
pg.event.post(pg.event.Event(pg.USEREVENT+1,{'data':self.his_data[0]}))
self.his_data_move = 1
self.events()
def history_next_move(self):
print("history_next_move %d" %self.his_data_move)
if self.his_data_move < self.his_data_len:
pg.event.post(pg.event.Event(pg.USEREVENT+1,{'data':self.his_data[self.his_data_move]}))
self.events()
self.his_data_move = self.his_data_move + 1
else:
print("Max move")
def quit_click(self):
self.done = True
def show_how_won(self, (x1, y1), (x2, y2)):
x1_pos = x1*self.block_width + self.board_margin_left
y1_pos = y1*self.block_hight + self.board_margin_top
x2_pos = x2*self.block_width + self.board_margin_left
y2_pos = y2*self.block_hight + self.board_margin_top
r = pg.draw.line(self.scr, RED, (x1_pos,y1_pos), (x2_pos,y2_pos), 2)
pg.display.update(r)
def test_click(self):
self.won_game = True
start_pos = (2,2)
end_pos = (6,6)
self.show_how_won(start_pos, end_pos)
print('TEST button pressed')
def easefocus(self,x,y):
r = pg.Rect(x*self.block_width + self.board_margin_left - self.chess_radius, y*self.block_hight + self.board_margin_top - self.chess_radius,self.chess_radius * 2,self.chess_radius * 2)
self.scr.blit(self.board_image,r,r)
self.patch_grid(CHESS_BOARD_BLOCK_COUNTS, x, y)
# Rows
return r
def events(self):
for ev in pg.event.get():
if ev.type == pg.KEYDOWN and ev.key == pg.K_ESCAPE or ev.type == pg.QUIT:
self.done = True
#break
for button in self.buttons:
button.check_event(ev)
if ev.type == pg.USEREVENT+1:
#print "# new user event!"
#print "---------------ev.data[seqid]=" + str(ev.data['SeqID'])
print "---------------ev.data" + str(ev.data)
self.turn = ev.data['SeqID'] % 2
self.pawn = self.turn^1
result = ev.data['Status']
if result == WIN:
start_pos, end_pos = ev.data['WinSpawns']
#print "## start_pos:", start_pos
#print "## end_pos:", end_pos
self.show_how_won(start_pos, end_pos)
self.won_game = True
if int(self.role_id) == self.pawn:
if result == CONTINUE:
pass
else:
# TODO
# generated error
# To be done
pass
else: #peer draw
X = ev.data['PosX']
Y = ev.data['PosY']
self.seq_id = ev.data['SeqID']
self.put_pawn(X, Y, self.black_image if self.seq_id % 2 == 1 else self.white_image)
if not self.role_id == "2":
self.your_turn = True
self.grid[X][Y] = 2 if self.role_id == "1" else 1
#print "### 1 ### grid[X][Y]", str(self.grid[X][Y])
# else:
# print ('Unhandled other USER event %s' % str(ev.data))
elif self.fetch_data == False and ev.type == pg.MOUSEBUTTONUP and ev.button == 1:
self.history_next_move()
elif self.fetch_data == True and self.your_turn == True and ev.type == pg.MOUSEBUTTONUP and ev.button == 1 and not self.won_game == True:
x,y = ev.pos[0]//self.block_width,ev.pos[1]//self.block_hight
self.put_my_chess(x, y)
#elif ev.type == pg.KEYDOWN:
elif self.fetch_data == False and ev.type == pg.KEYDOWN and KEYBOARD_INPUT == True:
self.history_next_move()
elif self.fetch_data == True and ev.type == pg.KEYDOWN and KEYBOARD_INPUT == True:
print "### print key press"
if ev.key == pg.K_SPACE:
print "### print space"
if self.your_turn == True and not self.won_game == True:
print "### Pressed space key ###", self.cur_x, self.cur_y
self.put_my_chess(self.cur_x, self.cur_y)
elif ev.key == pg.K_DOWN:
print "### print down"
if self.your_turn == True and not self.won_game == True:
if self.cur_x <= CHESS_BOARD_BLOCK_COUNTS and self.cur_y + 1 <= CHESS_BOARD_BLOCK_COUNTS and self.cur_x >= 0 and self.cur_y >= 0:
self.last_put_X = self.cur_x
self.last_put_Y = self.cur_y
self.cur_y += 1
self.set_last_chess_prompt(self.cur_x,self.cur_y)
elif ev.key == pg.K_UP:
print "### print up"
if self.your_turn == True and not self.won_game == True:
if self.cur_x <= CHESS_BOARD_BLOCK_COUNTS and self.cur_y <= CHESS_BOARD_BLOCK_COUNTS and self.cur_x >= 0 and self.cur_y - 1 >= 0:
self.last_put_X = self.cur_x
self.last_put_Y = self.cur_y
self.cur_y -= 1
self.set_last_chess_prompt(self.cur_x,self.cur_y)
elif ev.key == pg.K_RIGHT:
print "### print right"
if self.your_turn == True and not self.won_game == True:
if self.cur_x + 1 <= CHESS_BOARD_BLOCK_COUNTS and self.cur_y <= CHESS_BOARD_BLOCK_COUNTS and self.cur_x >= 0 and self.cur_y >= 0:
self.last_put_X = self.cur_x
self.last_put_Y = self.cur_y
self.cur_x += 1
self.set_last_chess_prompt(self.cur_x,self.cur_y)
elif ev.key == pg.K_LEFT:
print "### print left"
if self.your_turn == True and not self.won_game == True:
if self.cur_x <= CHESS_BOARD_BLOCK_COUNTS and self.cur_y <= CHESS_BOARD_BLOCK_COUNTS and self.cur_x - 1 >= 0 and self.cur_y >= 0:
self.last_put_X = self.cur_x
self.last_put_Y = self.cur_y
self.cur_x -= 1
self.set_last_chess_prompt(self.cur_x,self.cur_y)
elif self.your_turn == True and ev.type == pg.MOUSEMOTION:
# TODO
#if TOUCH_SCREEN == False and self.your_turn == True:
if SHOW_MOUSEMOTION == True:
x,y = ev.pos[0]//self.block_width,ev.pos[1]//self.block_hight
if x < CHESS_BOARD_BLOCK_COUNTS + 1 and y < CHESS_BOARD_BLOCK_COUNTS + 1 and not self.won_game:
if self.grid[self.X][self.Y] == 0:
r = self.easefocus(self.X,self.Y)
if self.grid[x][y] == 0:
pg.display.update(self.scr.blit(self.white_image if self.role_id == "1" else self.black_image,
(x*self.block_width+self.board_margin_left - self.chess_radius,
y*self.block_hight + self.board_margin_top - self.chess_radius)))
self.X = x
self.Y = y
#else:
# print "#### ev.type:", str(ev.type)
def put_my_chess(self, x, y):
if x < CHESS_BOARD_BLOCK_COUNTS + 1 and y < CHESS_BOARD_BLOCK_COUNTS + 1:
if self.grid[x][y] == 0:
self.put_pawn(x,y, self.white_image if self.role_id == "1" else self.black_image)
self.put_chess_to_cloud((x,y))
self.your_turn = False
self.grid[x][y] = 1 if self.role_id == "1" else 2
def put_chess_to_cloud(self, (x,y)):
data_name="vlv_GMOVE_" + str(self.game_id)
data_id = self.node.dataId(data_name)
self.seq_id += 1
data_val = {'SeqID': self.seq_id, 'PosX': x, 'PosY': y, 'Status': DRAW}
if not self.node.setData(data_id, json.dumps(data_val)):
print("Fail to set data %s = %s" % (data_name, data_val))
else:
print("Data set chess pos (x:%s, y%s) to cloud" % (str(x), str(y))),
def update(self):
msg = 'Game Over'
if self.won_game:
x = self.right_board_x // 2
y = SCREEN_HIGHT // 2
if TOUCH_SCREEN == True:
msg_font_size = 50
else:
msg_font_size = 120
if self.role_id == "2":
msg = 'Got Winner!'
self.game_over, self.game_over_rect = self.make_text(msg, RED, (x,y), msg_font_size)
elif int(self.role_id) == self.pawn:
msg = 'You Win!'
self.game_over, self.game_over_rect = self.make_text(msg, RED, (x,y), msg_font_size)
else:
msg = 'You Lose!'
self.game_over, self.game_over_rect = self.make_text(msg, BLUE, (x,y), msg_font_size)
def render(self):
#self.screen.fill((255,255,255))
for button in self.buttons:
button.render(self.scr)
self.scr.blit(self.host_text, self.host_rect)
self.scr.blit(self.guest_text, self.guest_rect)
#self.scr.blit(self.games_won_text, self.games_won_rect)
#self.scr.blit(self.games_lost_text, self.games_lost_rect)
#self.scr.blit(self.sec_timelapse, self.sec_timelapse_rect)
#if self.lost_game or self.won_game():
if self.won_game:
self.scr.blit(self.overlay, (0,0))
self.scr.blit(self.game_over, self.game_over_rect)
#self.scr.blit(self.chess_cursor, self.chess_cursor_rect)
#pg.draw.rect(self.scr, (255, 255, 255, 127), pg.Rect(0, 0, 100, 75))
#self.scr.blit(self.sec_timelapse, self.sec_timelapse_rect)
#pg.draw.rect(self.scr, (255, 255, 255, 127), pg.Rect(0, 0, 100, 75))
#pg.draw.rect(self.scr, (255, 255, 255, 127), pg.Rect(0, 0, 100, 75))
def run(self):
while not self.done:
self.events()
self.update()
self.render()
pg.display.update()
#self.clock.tick(60)
def clean(self):
if self.fetch_data == True:
self.T.join(1)
pg.quit()
exit()
app = Game()
app.run()
app.clean()
| |
"""
Django settings for cognitive atlas project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import sys
import os
from distutils.util import strtobool
from os.path import join, abspath, dirname
from py2neo import Graph
# Just for local development - will read this from secrets
graph = Graph("http://graphdb:7474", auth=("neo4j", "test"))
DOMAIN = "http://www.cognitiveatlas.org"
# PATH vars
PROJECT_ROOT = join(abspath(dirname(__file__)), ".")
sys.path.insert(0, join(abspath(PROJECT_ROOT), 'apps'))
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
ALLOWED_HOSTS = ["*"]
SITE_ID = 1
# Application definition
INSTALLED_APPS = [
'cognitive.apps.main',
'cognitive.apps.atlas',
'cognitive.apps.users',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.admin',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'crispy_forms',
]
# 'allauth',
# 'allauth.account',
# 'allauth.socialaccount',
# 'allauth.socialaccount.providers.dropbox',
# 'allauth.socialaccount.providers.dropbox_oauth2',
# 'allauth.socialaccount.providers.facebook',
# 'allauth.socialaccount.providers.github',
# 'allauth.socialaccount.providers.gitlab',
# 'allauth.socialaccount.providers.google',
# 'allauth.socialaccount.providers.linkedin',
# 'allauth.socialaccount.providers.linkedin_oauth2',
# 'allauth.socialaccount.providers.openid',
# 'allauth.socialaccount.providers.orcid',
# 'allauth.socialaccount.providers.stackexchange',
# 'allauth.socialaccount.providers.twitter',
INSTALLED_APPS += THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cognitive.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.request',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# CUSTOM CONTEXT PROCESSORS
TEMPLATES[0]['OPTIONS']['context_processors'].append(
"main.context_processors.counts_processor")
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
# 'allauth.account.auth_backends.AuthenticationBackend',
)
WSGI_APPLICATION = 'cognitive.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('POSTGRES_NAME', ''),
'USER': os.environ.get('POSTGRES_USER', ''),
'PASSWORD': os.environ.get('POSTGRES_PASSWORD', ''),
'HOST': os.environ.get('POSTGRES_HOST', ''),
'PORT': os.environ.get('POSTGRES_PORT', ''),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher',
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
MEDIA_ROOT = '/var/www/assets'
MEDIA_URL = '/assets/'
STATIC_ROOT = '/var/www/static'
STATIC_URL = '/static/'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
CRISPY_TEMPLATE_PACK = 'bootstrap3'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
AUTH_USER_MODEL = 'users.User'
SECRET_KEY = os.environ.get(
'DJANGO_SECRET_KEY', 'verybadnotgoodsecretkeythatisntsecret')
DEBUG = strtobool(os.environ.get('DJANGO_DEBUG', 'False'))
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/logged_out/'
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticatedOrReadOnly',
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
}
USE_RECAPTCHA = strtobool(os.environ.get('USE_RECAPTCHA', 'False'))
GOOGLE_RECAPTCHA_SECRET_KEY = os.environ.get('GOOGLE_RECAPTCHA_SECRET_KEY', '')
NOTIFY_EMAILS = [i for i in os.environ.get("NOTIFY_EMAILS", "").split(" ")]
EMAIL_HOST = os.environ.get("EMAIL_HOST", '')
EMAIL_PORT = os.environ.get("EMAIL_PORT", '')
EMAIL_HOST_USER = os.environ.get("EMAIL_HOST_USER", '')
EMAIL_HOST_PASSWORD = os.environ.get("EMAIL_HOST_PASSWORD", '')
EMAIL_USE_TSL = True
| |
import numpy
import sys
import matplotlib.pylab as pt
import matplotlib.cm
import numpy.random
import matplotlib.ticker as ticker
from matplotlib.lines import Line2D
import scipy.stats
from matplotlib.transforms import Affine2D
import mpl_toolkits.axisartist.floating_axes as floating_axes
import matplotlib
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['font.size'] = 10.0
matplotlib.rcParams['lines.markeredgewidth'] = 0
matplotlib.rcParams['lines.markersize'] = 3.5
matplotlib.rcParams['lines.linewidth'] = .5
matplotlib.rcParams['legend.fontsize'] = 8.0
matplotlib.rcParams['axes.linewidth']=.5
matplotlib.rcParams['patch.linewidth']=.5
def permute_within_categories(categories, cat_inds):
#categories: 1d array where each item has an index indicating which category it belongs to. The category indices need not be consecutive.
#cat_inds: list of category indices.
n = len(categories)
inds = numpy.arange(n) #Original order
permuted_order = numpy.zeros((n,),dtype='int')
for i in range(len(cat_inds)):
items_in_cat_unpermuted = inds[categories == cat_inds[i]]
permuted_order[items_in_cat_unpermuted] = numpy.random.permutation(items_in_cat_unpermuted)
return permuted_order
def permute_within_categories_preserve_num_muts(mutation_table, categories, cat_inds):
#categories: 1d array where each item has an index indicating which category it belongs to. The category indices need not be consecutive.
#cat_inds: list of category indices.
n = len(categories)
inds = numpy.arange(n) #Original order
n_muts = mutation_table.shape[1]
mut_inds = numpy.arange(n_muts)
permuted_mutation_table = numpy.zeros_like(mutation_table)
for i in range(len(cat_inds)):
category_indices = inds[categories == cat_inds[i]]
#Construct ordered pair list of which mutations occurred in which clones in this category
pop_mut_list = []
for index in category_indices:
muts = mut_inds[mutation_table[index,:] > .5]
for mut in muts:
pop_mut_list.append([index, mut])
#Permute mutations
n_muts_category = len(pop_mut_list)
perm = numpy.random.permutation(numpy.arange(n_muts_category))
pop_mut_list = numpy.array(pop_mut_list)
pop_mut_list_permuted = pop_mut_list
pop_mut_list_permuted[:,1] = pop_mut_list[perm,1]
#Construct the section of the permuted mutation table for this category
for j in range(len(pop_mut_list_permuted[:,0])):
mut_loc = pop_mut_list_permuted[j,:]
permuted_mutation_table[mut_loc[0],mut_loc[1]] = 1
return permuted_mutation_table
def calculate_cat_inds(categories):
categories = numpy.array(categories)
return numpy.unique(categories)
def calculate_helper_matrix(categories, cat_inds):
#The helper matrix is a utility for quickly summing over specified rows in a table. It is intended to be matrix multiplied by the original mutation table; hence it is n_cats x n_pops
num_cats = len(cat_inds)
num_pops = len(categories)
helper_matrix = numpy.zeros((num_cats,num_pops))
for i in range(num_cats):
specific_cat_inds = numpy.where(categories == cat_inds[i])
helper_matrix[i, specific_cat_inds] = 1
return helper_matrix
def calculate_entropy_statistic(mutation_table, helper_matrix):
muts_per_gene = numpy.sum(mutation_table, axis = 0)
collapsed_table = numpy.dot(helper_matrix,mutation_table)
pops_per_category = numpy.dot(helper_matrix,helper_matrix.T)
#print pops_per_category
probs = numpy.dot(numpy.linalg.inv(pops_per_category),collapsed_table)
num_genes = mutation_table.shape[1]
entropies = numpy.zeros((num_genes,))
total_pops = numpy.float(numpy.sum(pops_per_category))
for i in range(num_genes):
nonzero_inds = numpy.all([probs[:,i] > 0 , probs[:,i]< 1], axis = 0)
nonzero_p_hit = probs[:,i][nonzero_inds]
nonzero_p_no_hit = 1. - nonzero_p_hit
pops_per_cat_temp = numpy.diag(pops_per_category)[nonzero_inds]
entropies[i] = numpy.sum(-1*pops_per_cat_temp/total_pops*(nonzero_p_hit*numpy.log2(nonzero_p_hit) + nonzero_p_no_hit*numpy.log2(nonzero_p_no_hit)))
return numpy.sum(entropies)
def calculate_entropy_statistic2(mutation_table, helper_matrix):
#This function can be used to weight double-hit mutations less than other mutations, since they carry less information.
#However, for this dataset including the 2-hit mutations with equal weight was equivalently sensitive.
muts_per_gene = numpy.sum(mutation_table, axis = 0)
collapsed_table = numpy.dot(helper_matrix,mutation_table)
pops_per_category = numpy.dot(helper_matrix,helper_matrix.T)
probs = numpy.dot(numpy.linalg.inv(pops_per_category),collapsed_table) #probability that a population in this category got a mutation
#print probs
num_genes = mutation_table.shape[1]
entropies = numpy.zeros((num_genes,))
weight = 1.
total_pops = numpy.float(numpy.sum(pops_per_category))
for i in range(num_genes):
if muts_per_gene[i] > 2.1:
nonzero_inds = numpy.all([probs[:,i] > 0 , probs[:,i]< 1], axis = 0)
nonzero_p_hit = probs[:,i][nonzero_inds]
nonzero_p_no_hit = 1. - nonzero_p_hit
pops_per_cat_temp = numpy.diag(pops_per_category)[nonzero_inds]
entropies[i] = numpy.sum(-1*pops_per_cat_temp/total_pops*(nonzero_p_hit*numpy.log2(nonzero_p_hit) + nonzero_p_no_hit*numpy.log2(nonzero_p_no_hit)))
else:
nonzero_inds = numpy.all([probs[:,i] > 0 , probs[:,i]< 1], axis = 0)
nonzero_p_hit = probs[:,i][nonzero_inds]
nonzero_p_no_hit = 1. - nonzero_p_hit
pops_per_cat_temp = numpy.diag(pops_per_category)[nonzero_inds]
entropies[i] = weight*numpy.sum(-1*pops_per_cat_temp/total_pops*(nonzero_p_hit*numpy.log2(nonzero_p_hit) + nonzero_p_no_hit*numpy.log2(nonzero_p_no_hit)))
return numpy.sum(entropies)
def calculate_presence_absence_statistic(mutation_table, helper_matrix):
#This is a simple test statistic based on whether or not a particular mutation was or wasn't hit in some category.
collapsed_table = numpy.dot(helper_matrix,mutation_table)
num_zeros = numpy.sum(collapsed_table < .5)
return num_zeros
#Read in the list of mutations that fixed in each population. Filter out snps that occur in multiple descendants of the same founder--these were SGV from the passaging of this segregant well.
input_file = 'data/mutation_lists_with_aa_positions_reannotated.txt'
#First loop to find any mutations that are shared among descendants of the same segregant
file = open(input_file,'r')
file_lines = file.readlines()
file.close()
segregant_mut_dict = {}
common_mut_dict = {}
for line in file_lines:
linelist = line.strip().split('\t')
if len(linelist) < 1.5:
#Go to the next clone
clone_name = linelist[0]
segregant = clone_name.split('_')[0]
if segregant not in segregant_mut_dict:
segregant_mut_dict[segregant] = []
else:
mutation = ('_').join(str(i) for i in linelist)
if len(linelist) > 5.5:
if linelist[6] == 'Non':
if mutation in segregant_mut_dict[segregant]:
print segregant, mutation
if segregant in common_mut_dict:
common_mut_dict[segregant].append(mutation)
else:
common_mut_dict[segregant] = [mutation]
if mutation not in segregant_mut_dict[segregant]:
segregant_mut_dict[segregant].append(mutation)
##Second loop to identify all de novo nonsynonymous mutations (and indels)
gene_dict_by_sample = {}
mutation_dict_by_sample = {}
for line in file_lines:
linelist = line.strip().split('\t')
if len(linelist) < 1.5:
#Go to the next clone
clone_name = linelist[0]
gene_dict_by_sample[clone_name] = []
mutation_dict_by_sample[clone_name] = []
local_gene_names = []
segregant = clone_name.split('_')[0]
else:
gene_name = linelist[4]
mutation = ('_').join(str(i) for i in linelist)
if len(linelist) > 5.5:
if linelist[6] == 'Non':
if segregant in common_mut_dict: #There might be shared ancestral snps
if ((gene_name not in local_gene_names) and (len(gene_name) < 6.5) and (mutation not in common_mut_dict[segregant])): #We have not already counted this mutation, it is not an ancestral mutation, and it is not in a dubious ORF
local_gene_names.append(gene_name)
gene_dict_by_sample[clone_name].append(gene_name)
mutation_dict_by_sample[clone_name].append(mutation)
elif ((gene_name not in local_gene_names) and (len(gene_name) < 6.5)): #We have not already counted this mutation, it is not an ancestral mutation, and it is not in a dubious ORF
local_gene_names.append(gene_name)
gene_dict_by_sample[clone_name].append(gene_name)
mutation_dict_by_sample[clone_name].append(mutation)
#Determine how many independent times each gene was mutated, and make list of genes for each segregant.
gene_name_counts = []
gene_names = []
samples = sorted(gene_dict_by_sample.keys())
for sample in samples:
for gene in gene_dict_by_sample[sample]:
if gene in gene_names:
index = numpy.where(numpy.array(gene_names) == gene)[0]
gene_name_counts[index] += 1
else:
gene_names.append(gene)
gene_name_counts.append(1)
gene_name_counts_sc = numpy.zeros_like( numpy.array(gene_name_counts) )
gene_name_counts_ypd = numpy.zeros_like( numpy.array(gene_name_counts) )
for sample in samples:
env = sample.split('_')[2]
if env == 'sc':
for gene in gene_dict_by_sample[sample]:
index = numpy.where(numpy.array(gene_names) == gene)[0]
gene_name_counts_sc[index] += 1
elif env == 'ypd':
for gene in gene_dict_by_sample[sample]:
index = numpy.where(numpy.array(gene_names) == gene)[0]
gene_name_counts_ypd[index] += 1
print gene_name_counts_sc
print gene_name_counts_ypd
##Import fitness and founder genotype data
#Import fitness and genotype data
filename1 = 'data/fitness_measurements_with_population_names_12_29_2016.csv'
filename2 = 'data/control_replicate_measurements.csv'
filename3 = 'data/segregant_genotypes_deduplicated_with_header.csv'
segregant_vector = []
init_fits_ypd = []
init_std_errs_ypd = []
init_fits_sc = []
init_std_errs_sc = []
final_fits_ypd_pops_in_ypd = []
segregant_vector_ypd_pops = []
final_fits_sc_pops_in_sc = []
segregant_vector_sc_pops = []
final_fits_sc_pops_in_ypd = []
final_fits_ypd_pops_in_sc = []
file1 = open(filename1,'r')
firstline = 0
for line in file1:
if firstline < .5:
firstline += 1
continue
linestrs = line.strip().split(';')
segregant_vector.append(linestrs[0])
init_fits_ypd.append(float(linestrs[1]))
init_std_errs_ypd.append(float(linestrs[2]))
init_fits_sc.append(float(linestrs[3]))
init_std_errs_sc.append(float(linestrs[4]))
ypd_evolved_pops = linestrs[5].split(',')
for entry in ypd_evolved_pops:
segregant_vector_ypd_pops.append(linestrs[0])
final_fits_ypd_pops_in_ypd.append(float(entry.split()[1]))
final_fits_ypd_pops_in_sc.append(float(entry.split()[2]))
sc_evolved_pops = linestrs[6].split(',')
for entry in sc_evolved_pops:
segregant_vector_sc_pops.append(linestrs[0])
final_fits_sc_pops_in_ypd.append(float(entry.split()[1]))
final_fits_sc_pops_in_sc.append(float(entry.split()[2]))
file1.close()
init_fits_ypd = numpy.array(init_fits_ypd)
init_std_errs_ypd = numpy.array(init_std_errs_ypd)
init_fits_sc = numpy.array(init_fits_sc)
init_std_errs_sc = numpy.array(init_std_errs_sc)
final_fits_ypd_pops_in_ypd = numpy.array(final_fits_ypd_pops_in_ypd)
final_fits_ypd_pops_in_sc = numpy.array(final_fits_ypd_pops_in_sc)
segregant_vector_ypd_pops = numpy.array(segregant_vector_ypd_pops)
segregant_vector_sc_pops = numpy.array(segregant_vector_sc_pops)
final_fits_sc_pops_in_ypd = numpy.array(final_fits_sc_pops_in_ypd)
final_fits_ypd_pops_in_sc = numpy.array(final_fits_ypd_pops_in_sc)
ypd_controls = {}
sc_controls = {}
file2 = open(filename2,'r')
firstline = 0
for line in file2:
if firstline < .5:
firstline += 1
continue
linestrs = line.strip().split(';')
ypd_controls[linestrs[0]] = [float(i) for i in linestrs[1].split(',')]
sc_controls[linestrs[0]] = [float(i) for i in linestrs[2].split(',')]
file2.close()
genotype_mat = []
file3 = open(filename3,'r')
firstline = 0
for line in file3:
if firstline < .5:
firstline += 1
continue
linelist = line.strip().split(';')
genotype = [int(i) for i in linelist[1].split(',')]
genotype_mat.append(genotype)
genotype_mat = numpy.array(genotype_mat)
rm_allele = numpy.array(genotype_mat[:,3777],dtype='Bool')
by_allele = numpy.array(1 - genotype_mat[:,3777],dtype='Bool')
####Set up mutation table
gene_names = numpy.array(gene_names)
double_hit_genes = gene_names[numpy.array(gene_name_counts) > 1.5]
#print seg_samples
#print samples
num_double_hit_genes = len(double_hit_genes)
print double_hit_genes
gene_names_reordered = ['KRE33', 'ENP2', 'BFR2', 'BMS1', 'UTP20', 'RPS8A', 'RPS6A','CRM1', 'ECM16', 'BUD23', 'IRA1', 'IRA2', 'GPB1', 'GPB2', 'PDE2','SIR2', 'SIR3', 'SIR4', 'RXT3', 'NNK1', 'YPK9', 'LTE1', 'SRS2','PAR32','STE11','RRP3','RQC2']
print set(double_hit_genes) == set(gene_names_reordered)
new_gene_order = []
for i in range(len(gene_names_reordered)):
index = numpy.where(numpy.array(double_hit_genes) == gene_names_reordered[i])[0][0]
new_gene_order.append(index)
mutation_table = numpy.zeros((254,num_double_hit_genes))
indel_table = numpy.zeros((254,num_double_hit_genes))
for i in range(len(samples)):
for j in range(num_double_hit_genes):
if double_hit_genes[new_gene_order[j]] in gene_dict_by_sample[samples[i]]:
mutation_table[i,j] = 1
gene_ind = gene_dict_by_sample[samples[i]].index(double_hit_genes[new_gene_order[j]])
mutation = mutation_dict_by_sample[samples[i]][gene_ind]
mutation_list = mutation.split('_')
#print mutation_list
if (len(mutation_list[3].split(':')) > 1.5 or 'Stop' in mutation_list[-1]): #indels and premature stops
indel_table[i,j] = 1
mutation_table[i,j] -= 1
###Determine genotype of sequenced populations
genotype_mat_sequenced_populations = numpy.zeros((len(samples),len(genotype_mat[0,:])))
i = 0
seg_samples = []
for clone in samples:
name_strs = clone.split('_')
seg = name_strs[0]
seg_samples.append(seg)
genotype_index = segregant_vector.index(seg)
genotype_mat_sequenced_populations[i,:] = genotype_mat[genotype_index,:]
i += 1
###Set up an indicator variable for the environment
env_list = []
for sample in samples:
env = sample.split('_')[2]
if env=='sc':
env_list.append(1)
elif env=='ypd':
env_list.append(0)
env_list = numpy.array(env_list, dtype='Bool')
##Set up an indicator variable for the Kre33 allele
kre33_allele = genotype_mat_sequenced_populations[:, 9596]
kre33_allele = numpy.array(kre33_allele, dtype='Bool')
###Determine mutations per gene for 4 categories: Kre33-RM/30C; Kre33-BY/30C; Kre33-RM/37C; Kre33-BY/37C
group4 = numpy.array(env_list*kre33_allele, dtype='Bool')
group3 = numpy.array(env_list*(1 - kre33_allele), dtype='Bool')
group2 = numpy.array((1 - env_list)*kre33_allele, dtype='Bool')
group1 = numpy.array((1 - env_list)*(1 - kre33_allele), dtype='Bool')
counts_grp1_mutations = numpy.sum(mutation_table[group1, :], axis=0)
counts_grp1_indels = numpy.sum(indel_table[group1,:], axis=0)
counts_grp2_mutations = numpy.sum(mutation_table[group2, :], axis=0)
counts_grp2_indels = numpy.sum(indel_table[group2,:], axis=0)
counts_grp3_mutations = numpy.sum(mutation_table[group3, :], axis=0)
counts_grp3_indels = numpy.sum(indel_table[group3,:], axis=0)
counts_grp4_mutations = numpy.sum(mutation_table[group4, :], axis=0)
counts_grp4_indels = numpy.sum(indel_table[group4,:], axis=0)
print counts_grp1_mutations + counts_grp1_indels
print counts_grp2_mutations + counts_grp2_indels
print counts_grp3_mutations + counts_grp3_indels
print counts_grp4_mutations + counts_grp4_indels
print numpy.sum(group1)
print numpy.sum(group2)
print numpy.sum(group3)
print numpy.sum(group4)
###Basic counting
num_nonsyn_muts_sc = numpy.sum(gene_name_counts_sc)
num_nonsyn_muts_ypd = numpy.sum(gene_name_counts_ypd)
num_kre33_ass_muts_sc = numpy.sum( counts_grp1_mutations[0:10] ) + numpy.sum( counts_grp2_mutations[0:10] )
num_kre33_ass_muts_ypd = numpy.sum( counts_grp3_mutations[0:10] ) + numpy.sum( counts_grp4_mutations[0:10] )
frac_kre33_ass_muts_sc = num_kre33_ass_muts_sc/float(num_nonsyn_muts_sc)
frac_kre33_ass_muts_ypd = num_kre33_ass_muts_ypd/float(num_nonsyn_muts_ypd)
print 'kre33_muts_sc', num_kre33_ass_muts_sc
print 'kre33_muts_ypd', num_kre33_ass_muts_ypd
print 'kre33 frac muts sc', frac_kre33_ass_muts_sc
print 'kre33 frac muts ypd', frac_kre33_ass_muts_ypd
###Basic counting per population
num_pops_kre33_mut = numpy.sum( mutation_table[:, 0] > .5 )
frac_pops_kre33_mut = num_pops_kre33_mut/float(mutation_table.shape[0])
print num_pops_kre33_mut, frac_pops_kre33_mut
print numpy.sum( mutation_table )
print numpy.sum( mutation_table[:,0:10] )
####
space = .01
middle_space = .07
buffer = 4*space + middle_space
grp1_width = (1 - buffer)*numpy.sum(group1)/254.
grp2_width = (1 - buffer)*numpy.sum(group2)/254.
grp3_width = (1 - buffer)*numpy.sum(group3)/254.
grp4_width = (1 - buffer)*numpy.sum(group4)/254.
####
bg_alpha = .6
kre_border = num_double_hit_genes + .4 - 10
camp_border = num_double_hit_genes + .4 - 10 - 5
sir_border = num_double_hit_genes + .4 - 10 - 5 - 3
x_max = numpy.max([numpy.max(counts_grp1_mutations + counts_grp1_indels), numpy.max(counts_grp2_mutations + counts_grp2_indels), numpy.max(counts_grp3_mutations + counts_grp3_indels), numpy.max(counts_grp4_mutations + counts_grp4_indels)])
#color1 = [0,158/255.,115/255.]
#color2 = [230./255.,159/255.,0./255.]
color1 = 'k'
color2 = 'grey'
my_cmap = matplotlib.colors.ListedColormap([[1,1,1],color1,color2],name='my_colormap')
bg_color = 'MediumSlateBlue'
fig = pt.figure(figsize=(8,4))
#bounding_rect = [.02, .02, .2, .98]
bounding_rect = [space, .02, grp2_width, .98]
ax = fig.add_axes(bounding_rect,axis_bgcolor= bg_color, alpha= bg_alpha)#, frame_on=False)
#ax.axvline(x_max,color='k',linewidth=2)
for i in range(len(gene_names_reordered)):
nmut = int(counts_grp2_mutations[::-1][i])
nindel = int(counts_grp2_indels[::-1][i])
for j in range(nmut):
bar1 = ax.barh(left = x_max - j - 1, bottom = i + .5, height=.8, width = .8, color = color1, linewidth=0)
for j in range(nindel):
bar2 = ax.barh(left = x_max - nmut - j - 1, bottom = i + .5, height=.8, width = .8, color = color2, linewidth=.5)
ax.set_ylim(.5,num_double_hit_genes + .5)
pt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are on
labeltop='off',
direction='in',
labelbottom='off',
length=2)
pt.tick_params(
axis='y', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
left='off', # ticks along the bottom edge are off
right='off', # ticks along the top edge are on
labelleft='off',
labelright='off')
ax.set_xlim(0,x_max-.2)
ax.axhline(kre_border, color='k')
#for i in numpy.arange(x_max):
# ax.axvline(i, color=bg_color,alpha=bg_alpha,linewidth=.5)
ax.patch.set_alpha(bg_alpha)
ax.text(5.5, num_double_hit_genes+1, 'Evolved at OT')
ax.axhline(kre_border, color='k',linewidth=.8)
ax.axhline(camp_border, color='k',linewidth=.8)
ax.axhline(sir_border, color='k',linewidth=.8)
ax.legend([bar1, bar2],['missense','nonsense or indel'],loc='lower left')
ax.text(.8, kre_border + 8, '90s preribosomal', rotation=90,fontsize=9)
ax.text(.8, camp_border + 3, 'cAMP', rotation=90,fontsize=9)
ax.text(.8, sir_border + 1.5, 'SIR', rotation=90,fontsize=9)
#ax.text(1, sir_border, 'SIR pathway', rotation=90)
###########
bounding_rect = [grp2_width + grp4_width + 2*space + middle_space, .02, grp1_width, .98]
ax = fig.add_axes(bounding_rect,axis_bgcolor='DarkSlateBlue',alpha=bg_alpha)#,frame_on=False)
#ax.axvline(0,color='k',linewidth=2)
for i in range(len(gene_names_reordered)):
nmut = int(counts_grp1_mutations[::-1][i])
nindel = int(counts_grp1_indels[::-1][i])
for j in range(nmut):
bar1 = ax.barh(left = j, bottom = i + .5, height=.8, width = .8, color = color1, linewidth=0)
for j in range(nindel):
bar2 = ax.barh(left = nmut + j, bottom = i + .5, height=.8, width = .8, color = color2, linewidth=.5)
ax.set_ylim(.5,num_double_hit_genes + .5)
pt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are on
labeltop='off',
direction='in',
labelbottom='off',
length=2)
pt.tick_params(
axis='y', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
left='off', # ticks along the bottom edge are off
right='off', # ticks along the top edge are on
labelleft='off',
labelright='off')
ax.set_xlim(0,x_max)
ax.patch.set_alpha(bg_alpha)
ax.text(5.5, num_double_hit_genes+1, 'Evolved at OT')
ax.axhline(kre_border, color='k',linewidth=.8)
ax.axhline(camp_border, color='k',linewidth=.8)
ax.axhline(sir_border, color='k',linewidth=.8)
########
bounding_rect = [grp2_width + 2*space, .02, grp4_width, .98]
ax = fig.add_axes(bounding_rect,axis_bgcolor='Tomato',alpha=bg_alpha)#,frame_on=False)
#ax.axvline(x_max,color='k',linewidth=2)
for i in range(len(gene_names_reordered)):
nmut = int(counts_grp4_mutations[::-1][i])
nindel = int(counts_grp4_indels[::-1][i])
for j in range(nmut):
bar1 = ax.barh(left = x_max - j - 1, bottom = i + .5, height=.8, width = .8, color = color1, linewidth=0)
for j in range(nindel):
bar2 = ax.barh(left = x_max - nmut - j - 1, bottom = i + .5, height=.8, width = .8, color = color2, linewidth=.5)
ax.set_ylim(.5,num_double_hit_genes + .5)
pt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are on
labeltop='off',
direction='in',
labelbottom='off',
length=2)
pt.tick_params(
axis='y', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
left='off', # ticks along the bottom edge are off
right='off', # ticks along the top edge are on
labelleft='off',
labelright='off')
ax.set_xlim(0,x_max-.2)
ax.patch.set_alpha(bg_alpha)
ax.text(5.5, num_double_hit_genes + 1, 'Evolved at HT')
ax.text(-6, num_double_hit_genes + 2, 'RM KRE33 allele')
ax.axhline(kre_border, color='k')
for i in numpy.arange(num_double_hit_genes):
gene = gene_names_reordered[num_double_hit_genes-(i+1)]
ax.text(x_max + 3.25, i+.6, gene, ha='center', fontsize=9)
ax.axhline(kre_border, color='k',linewidth=.8)
ax.axhline(camp_border, color='k',linewidth=.8)
ax.axhline(sir_border, color='k',linewidth=.8)
#######
bounding_rect = [grp2_width+grp4_width+grp1_width+3*space+middle_space, .02, grp1_width, .98]
ax = fig.add_axes(bounding_rect,axis_bgcolor='Brown',alpha=bg_alpha)#,frame_on=False)
#ax.axvline(0,color='k',linewidth=2)
for i in range(len(gene_names_reordered)):
nmut = int(counts_grp3_mutations[::-1][i])
nindel = int(counts_grp3_indels[::-1][i])
for j in range(nmut):
bar1 = ax.barh(left = j, bottom = i + .5, height=.8, width = .8, color = color1, linewidth=0)
for j in range(nindel):
bar2 = ax.barh(left = nmut + j, bottom = i + .5, height=.8, width = .8, color = color2, linewidth=.5)
ax.set_ylim(.5,num_double_hit_genes + .5)
pt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are on
labeltop='off',
direction='in',
labelbottom='off',
length=2)
pt.tick_params(
axis='y', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
left='off', # ticks along the bottom edge are off
right='off', # ticks along the top edge are on
labelleft='off',
labelright='off')
ax.set_xlim(0,x_max)
ax.set_yticks(numpy.arange(1,num_double_hit_genes + 1,1))
ax.patch.set_alpha(bg_alpha)
ax.text(5.5, num_double_hit_genes+1, 'Evolved at HT')
ax.text(-6, num_double_hit_genes+2, 'BY KRE33 allele')
ax.axhline(kre_border, color='k',linewidth=.8)
ax.axhline(camp_border, color='k',linewidth=.8)
ax.axhline(sir_border, color='k',linewidth=.8)
pt.savefig('mutation_histograms_1_9_2017.pdf',bbox_inches='tight')
| |
"""This module defines some handy :py:class:`Importable` elements.
An ``Importable`` is usually composed of two different parts:
* A *natural key* used to identify *the same* element across different systems.
This is the only required component for an ``Importable``.
* An optional set of properties that form *the contents*. The data in this
properties is carried across systems in the process of syncing the elements.
Two elements that are *the same* and have *equal contents* are said to be *in
sync*.
For example an element representing an online video can use the value of the
streaming URL to be its natural key. The contents of the element can be formed
from a view counter and the video title. In this scenario changes on the video
title and view counter can be detected and carried across systems thus keeping
elements which are the same in sync. Changes to the video URL will make the
video element lose any correspondence with elements belonging to other systems.
"""
__all__ = ['Importable', 'RecordingImportable']
class _AutoContent(type):
"""
>>> class MockImportable(Importable):
... __content_attrs__ = 'attr' # doctest:+IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError:
>>> class MockImportable(Importable):
... __content_attrs__ = 123 # doctest:+IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError:
"""
def __new__(cls, name, bases, d):
_magic_name = '__content_attrs__'
if _magic_name not in d:
return type.__new__(cls, name, bases, d)
ca = d[_magic_name]
# XXX: py3
if isinstance(ca, basestring):
raise ValueError(
'%s must be an iterable not a string.' % _magic_name
)
try:
ca = frozenset(ca)
except TypeError:
raise ValueError('%s must be iterable.' % _magic_name)
def __init__(self, *args, **kwargs):
update_kwargs = {}
for content_attr in self._content_attrs:
try:
update_kwargs[content_attr] = kwargs.pop(content_attr)
except KeyError:
pass # All arguments are optional
self._update(update_kwargs)
super(klass, self).__init__(*args, **kwargs)
def __repr__(self):
attrs = []
for attr_name in self._content_attrs:
try:
attr_value = getattr(self, attr_name)
except AttributeError:
continue
attrs.append('%s=%r' % (attr_name, attr_value))
if attrs:
cls_name = self.__class__.__name__
return '%s(%r, %s)' % (
cls_name, self._natural_key, ', '.join(attrs)
)
return super(klass, self).__repr__()
d['__init__'] = __init__
d.setdefault('__repr__', __repr__)
d['__slots__'] = frozenset(d.get('__slots__', [])) | ca
d['_content_attrs'] = ca
klass = type.__new__(cls, name, bases, d)
return klass
class Importable(object):
"""A default implementation representing an importable element.
This class is intended to be specialized in order to provide the element
content and to override its behaviour if needed.
The :py:meth:`sync` implementation in this class doesn't keep track of
changed values. For such an implementation see
:py:class:`RecordingImportable`.
``Importable`` instances are hashable and comparable based on the
*natural_key* value. Because of this the *natural_key* must also be
hashable and should implement equality and less then operators:
>>> i1 = Importable(0)
>>> i2 = Importable(0)
>>> hash(i1) == hash(i2)
True
>>> i1 == i2
True
>>> not i1 < i2
True
``Importable`` elements can access the *natural_key* value used on
instantiation trough the ``natural_key`` property:
>>> i = Importable((123, 'abc'))
>>> i.natural_key
(123, 'abc')
Listeners can register to observe an ``Importable`` element for changes.
Every time the content attributes change with a value that is not equal to
the previous one all registered listeners will be notified:
>>> class MockImportable(Importable):
... _content_attrs = ['a', 'b']
>>> i = MockImportable(0)
>>> notifications = []
>>> i.register(lambda x: notifications.append(x))
>>> i.a = []
>>> i.b = 'b'
>>> i.b = 'bb'
>>> len(notifications)
3
>>> notifications[0] is notifications[1] is notifications[2] is i
True
>>> notifications = []
>>> l = []
>>> i.a = l
>>> len(notifications)
0
>>> i.a is l
True
There is also a shortcut for defining new ``Importable`` classes other than
using inheritance by setting ``__content_attrs__`` to an iterable of
attribute names. This will automatically create a constructor for your
class that accepts all values in the list as keyword arguments. It also
sets ``_content_attrs`` and ``__slots__`` to include this values and
generates a ``__repr__`` for you. This method however may not fit all your
needs, in that case subclassing ``Importable`` is still your best option.
One thing to keep in mind is that it's not possible to dinamicaly change
``_content_attrs`` for instances created from this class because of the
``__slots__`` usage.
>>> class MockImportable(Importable):
... __content_attrs__ = ['a', 'b']
>>> MockImportable(0)
MockImportable(0)
>>> MockImportable(0, a=1, b=('a', 'b'))
MockImportable(0, a=1, b=('a', 'b'))
>>> i = MockImportable(0, a=1)
>>> i.b = 2
>>> i.a, i.b
(1, 2)
>>> i.update(a=100, b=200)
True
"""
__metaclass__ = _AutoContent
__slots__ = ('_listeners', '_natural_key')
_content_attrs = frozenset([])
_sentinel = object()
def __init__(self, natural_key, *args, **kwargs):
self._listeners = []
self._natural_key = natural_key
super(Importable, self).__init__(*args, **kwargs)
@property
def natural_key(self):
return self._natural_key
def __setattr__(self, attr, value):
is_different = False
if attr in self._content_attrs:
is_different = getattr(self, attr, object()) != value
super(Importable, self).__setattr__(attr, value)
if is_different:
self._notify()
def update(self, **kwargs):
"""Update multiple content attrtibutes and fire a single notification.
Multiple changes to the element content can be grouped in a single call
to :py:meth:`update`. This method should return ``True`` if at least
one element differed from the original values or else ``False``.
>>> class MockImportable(Importable):
... _content_attrs = ['a', 'b']
>>> i = MockImportable(0)
>>> i.register(lambda x: notifications.append(x))
>>> notifications = []
>>> i.update(a=100, b=200)
True
>>> len(notifications)
1
>>> notifications[0] is i
True
>>> notifications = []
>>> i.update(a=100, b=200)
False
>>> len(notifications)
0
Trying to call update using keywords that are not present in
``_content_attrs`` souhld raise ``ValueError``:
>>> i.update(c=1) # doctest:+IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError:
"""
content_attrs = self._content_attrs
for attr_name, value in kwargs.items():
if attr_name not in content_attrs:
raise ValueError(
'Attribute %s is not part of the element content.'
% attr_name
)
has_changed = self._update(kwargs)
if has_changed:
self._notify()
return has_changed
def _update(self, attrs):
has_changed = False
super_ = super(Importable, self)
for attr_name, value in attrs.items():
if not has_changed:
current_value = getattr(self, attr_name, self._sentinel)
# object() sentinel will also be different
if current_value != value:
has_changed = True
super_.__setattr__(attr_name, value)
return has_changed
def sync(self, other):
"""Puts this element in sync with the *other*.
The default implementation uses ``_content_attrs`` to search for
the attributes that need to be synced between the elements and it
copies the values of each attribute it finds from the *other* element
in this one.
By default the ``self._content_attrs`` is an empty list so no
synchronization will take place:
>>> class MockImportable(Importable):
... pass
>>> i1 = MockImportable(0)
>>> i2 = MockImportable(0)
>>> i1.a, i1.b = 'a1', 'b1'
>>> i2.a, i2.b = 'a2', 'b2'
>>> has_changed = i1.sync(i2)
>>> i1.a
'a1'
>>> class MockImportable(Importable):
... _content_attrs = ['a', 'b', 'x']
>>> i1 = MockImportable(0)
>>> i2 = MockImportable(0)
>>> i1.a, i1.b = 'a1', 'b1'
>>> i2.a, i2.b = 'a2', 'b2'
>>> has_changed = i1.sync(i2)
>>> i1.a, i1.b
('a2', 'b2')
If no synchronization was needed (i.e. the content of the elements were
equal) this method should return ``False``, otherwise it should return
``True``:
>>> i1.sync(i2)
False
>>> i1.a = 'a1'
>>> i1.sync(i2)
True
If the sync mutated this element all listeners should be notified. See
:py:meth:`register`:
>>> i1.a = 'a1'
>>> notifications = []
>>> i1.register(lambda x: notifications.append(x))
>>> has_changed = i1.sync(i2)
>>> len(notifications)
1
>>> notifications[0] is i1
True
All attributes that can't be found in the *other* element are skipped:
>>> i1._content_attrs = ['a', 'b', 'c']
>>> has_changed = i1.sync(i2)
>>> hasattr(i1, 'c')
False
"""
has_changed = self._sync(self._content_attrs, other)
if has_changed:
self._notify()
return has_changed
def _sync(self, content_attrs, other):
attrs = {}
for attr in content_attrs:
try:
that = getattr(other, attr)
except AttributeError:
continue
else:
attrs[attr] = that
return self._update(attrs)
def register(self, listener):
"""Register a callable to be notified when ``sync`` changes data.
This method should raise an ``ValueError`` if *listener* is not a
callable:
>>> i = Importable(0)
>>> i.register(1) # doctest:+IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError:
Same listener can register multiple times:
>>> notifications = []
>>> listener = lambda x: notifications.append(x)
>>> i.register(listener)
>>> i.register(listener)
>>> i._notify()
>>> notifications[0] is notifications[1] is i
True
"""
if not callable(listener):
raise ValueError('Listener is not callable: %s' % listener)
self._listeners.append(listener)
def is_registered(self, listener):
"""Check if the listener is already registered.
>>> i = Importable(0)
>>> a = lambda x: None
>>> i.is_registered(a)
False
>>> i.register(a)
>>> i.is_registered(a)
True
"""
return listener in self._listeners
def _notify(self):
"""Sends a notification to all listeners passing this element."""
for listener in self._listeners:
listener(self)
def __hash__(self):
return hash(self._natural_key)
def __eq__(self, other):
"""
>>> Importable(0) == None
False
"""
try:
return self._natural_key == other.natural_key
except AttributeError:
return NotImplemented
def __lt__(self, other):
"""
>>> Importable(0) < None
False
"""
try:
return self._natural_key < other.natural_key
except AttributeError:
return NotImplemented
def __repr__(self):
"""
>>> Importable((1, 'a'))
Importable((1, 'a'))
>>> class MockImportable(Importable): pass
>>> MockImportable('xyz')
MockImportable('xyz')
"""
cls_name = self.__class__.__name__
return '%s(%r)' % (cls_name, self._natural_key)
class _Original(Importable):
def copy(self, content_attrs, other):
self.__dict__.clear()
self._sync(content_attrs, other)
class RecordingImportable(Importable):
"""Very similar to :py:class:`Importable` but tracks changes.
This class records the original values that the attributes had before
any change introduced by attribute assignment or call to ``update`` and
``sync``.
Just as in :py:class:`Importable` case you can define new classes using
``__content_attrs__`` as a shortcut.
>>> class MockImportable(RecordingImportable):
... __content_attrs__ = ['a', 'b']
>>> MockImportable(0)
MockImportable(0)
>>> MockImportable(0, a=1, b=('a', 'b'))
MockImportable(0, a=1, b=('a', 'b'))
>>> i = MockImportable(0, a=1)
>>> i.b = 2
>>> i.a, i.b
(1, 2)
>>> i.update(a=100, b=200)
True
>>> i.orig.a
1
"""
__slots__ = ('_original', )
def __init__(self, *args, **kwargs):
super(RecordingImportable, self).__init__(*args, **kwargs)
self._original = _Original(self.natural_key)
self.reset()
@property
def orig(self):
"""An object that can be used to access the elements original values.
The object has all the attributes that this element had when it was
instantiated or last time when :py:meth:`reset` was called.
>>> class MockImportable(RecordingImportable):
... _content_attrs = ['a']
>>> i = MockImportable(0)
>>> hasattr(i.orig, 'a')
False
>>> i.a = 'a'
>>> i.reset()
>>> i.a
'a'
>>> i.orig.a
'a'
>>> i.a = 'aa'
>>> i.a
'aa'
>>> i.orig.a
'a'
>>> del i.a
>>> i.reset()
>>> hasattr(i.orig, 'a')
False
"""
return self._original
def reset(self):
"""Create a snapshot of the current values.
>>> class MockImportable(RecordingImportable):
... _content_attrs = ['a']
>>> i = MockImportable(0)
>>> hasattr(i.orig, 'a')
False
>>> i.a = 'a'
>>> i.reset()
>>> i.a = 'aa'
>>> i.orig.a
'a'
>>> i.reset()
>>> i.orig.a
'aa'
"""
self._original.copy(self._content_attrs, self)
| |
# Copyright (c) 2015, Activision Publishing, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
from assertpy import assert_that,fail
class TestDate(object):
def setup(self):
self.d1 = datetime.datetime.today()
def test_is_before(self):
d2 = datetime.datetime.today()
assert_that(self.d1).is_before(d2)
def test_is_before_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(d2).is_before(self.d1)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be before <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_before_bad_val_type_failure(self):
try:
assert_that(123).is_before(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('val must be datetime, but was type <int>')
def test_is_before_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_before(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be datetime, but was type <int>')
def test_is_after(self):
d2 = datetime.datetime.today()
assert_that(d2).is_after(self.d1)
def test_is_after_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(self.d1).is_after(d2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be after <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_after_bad_val_type_failure(self):
try:
assert_that(123).is_after(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('val must be datetime, but was type <int>')
def test_is_after_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_after(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be datetime, but was type <int>')
def test_is_equal_to_ignoring_milliseconds(self):
assert_that(self.d1).is_equal_to_ignoring_milliseconds(self.d1)
def test_is_equal_to_ignoring_milliseconds_failure(self):
try:
d2 = datetime.datetime.today() + datetime.timedelta(days=1)
assert_that(self.d1).is_equal_to_ignoring_milliseconds(d2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be equal to <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_equal_to_ignoring_milliseconds_bad_val_type_failure(self):
try:
assert_that(123).is_equal_to_ignoring_milliseconds(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('val must be datetime, but was type <int>')
def test_is_equal_to_ignoring_milliseconds_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_equal_to_ignoring_milliseconds(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be datetime, but was type <int>')
def test_is_equal_to_ignoring_seconds(self):
assert_that(self.d1).is_equal_to_ignoring_seconds(self.d1)
def test_is_equal_to_ignoring_seconds_failure(self):
try:
d2 = datetime.datetime.today() + datetime.timedelta(days=1)
assert_that(self.d1).is_equal_to_ignoring_seconds(d2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}> to be equal to <\d{4}-\d{2}-\d{2} \d{2}:\d{2}>, but was not.')
def test_is_equal_to_ignoring_seconds_bad_val_type_failure(self):
try:
assert_that(123).is_equal_to_ignoring_seconds(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('val must be datetime, but was type <int>')
def test_is_equal_to_ignoring_seconds_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_equal_to_ignoring_seconds(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be datetime, but was type <int>')
def test_is_equal_to_ignoring_time(self):
assert_that(self.d1).is_equal_to_ignoring_time(self.d1)
def test_is_equal_to_ignoring_time_failure(self):
try:
d2 = datetime.datetime.today() + datetime.timedelta(days=1)
assert_that(self.d1).is_equal_to_ignoring_time(d2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2}> to be equal to <\d{4}-\d{2}-\d{2}>, but was not.')
def test_is_equal_to_ignoring_time_bad_val_type_failure(self):
try:
assert_that(123).is_equal_to_ignoring_time(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('val must be datetime, but was type <int>')
def test_is_equal_to_ignoring_time_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_equal_to_ignoring_time(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be datetime, but was type <int>')
def test_is_greater_than(self):
d2 = datetime.datetime.today()
assert_that(d2).is_greater_than(self.d1)
def test_is_greater_than_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(self.d1).is_greater_than(d2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be greater than <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_greater_than_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_greater_than(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <datetime>, but was <int>')
def test_is_greater_than_or_equal_to(self):
assert_that(self.d1).is_greater_than_or_equal_to(self.d1)
def test_is_greater_than_or_equal_to_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(self.d1).is_greater_than_or_equal_to(d2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be greater than or equal to <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_greater_than_or_equal_to_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_greater_than_or_equal_to(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <datetime>, but was <int>')
def test_is_less_than(self):
d2 = datetime.datetime.today()
assert_that(self.d1).is_less_than(d2)
def test_is_less_than_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(d2).is_less_than(self.d1)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be less than <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_less_than_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_less_than(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <datetime>, but was <int>')
def test_is_less_than_or_equal_to(self):
assert_that(self.d1).is_less_than_or_equal_to(self.d1)
def test_is_less_than_or_equal_to_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(d2).is_less_than_or_equal_to(self.d1)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be less than or equal to <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_less_than_or_equal_to_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_less_than_or_equal_to(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <datetime>, but was <int>')
def test_is_between(self):
d2 = datetime.datetime.today()
d3 = datetime.datetime.today()
assert_that(d2).is_between(self.d1, d3)
def test_is_between_failure(self):
try:
d2 = datetime.datetime.today()
d3 = datetime.datetime.today()
assert_that(self.d1).is_between(d2, d3)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be between <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> and <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}>, but was not.')
def test_is_between_bad_arg1_type_failure(self):
try:
assert_that(self.d1).is_between(123, 456)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given low arg must be <datetime>, but was <int>')
def test_is_between_bad_arg2_type_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(self.d1).is_between(d2, 123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given high arg must be <datetime>, but was <datetime>')
def test_is_close_to(self):
d2 = datetime.datetime.today()
assert_that(self.d1).is_close_to(d2, datetime.timedelta(minutes=5))
def test_is_close_to_failure(self):
try:
d2 = self.d1 + datetime.timedelta(minutes=5)
assert_that(self.d1).is_close_to(d2, datetime.timedelta(minutes=1))
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> to be close to <\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}> within tolerance <\d+:\d{2}:\d{2}>, but was not.')
def test_is_close_to_bad_arg_type_failure(self):
try:
assert_that(self.d1).is_close_to(123, 456)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be datetime, but was <int>')
def test_is_close_to_bad_tolerance_arg_type_failure(self):
try:
d2 = datetime.datetime.today()
assert_that(self.d1).is_close_to(d2, 123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given tolerance arg must be timedelta, but was <int>')
class TestTimedelta(object):
def setup(self):
self.t1 = datetime.timedelta(seconds=60)
def test_is_greater_than(self):
d2 = datetime.timedelta(seconds=120)
assert_that(d2).is_greater_than(self.t1)
def test_is_greater_than_failure(self):
try:
t2 = datetime.timedelta(seconds=90)
assert_that(self.t1).is_greater_than(t2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{1,2}:\d{2}:\d{2}> to be greater than <\d{1,2}:\d{2}:\d{2}>, but was not.')
def test_is_greater_than_bad_arg_type_failure(self):
try:
assert_that(self.t1).is_greater_than(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <timedelta>, but was <int>')
def test_is_greater_than_or_equal_to(self):
assert_that(self.t1).is_greater_than_or_equal_to(self.t1)
def test_is_greater_than_or_equal_to_failure(self):
try:
t2 = datetime.timedelta(seconds=90)
assert_that(self.t1).is_greater_than_or_equal_to(t2)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{1,2}:\d{2}:\d{2}> to be greater than or equal to <\d{1,2}:\d{2}:\d{2}>, but was not.')
def test_is_greater_than_or_equal_to_bad_arg_type_failure(self):
try:
assert_that(self.t1).is_greater_than_or_equal_to(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <timedelta>, but was <int>')
def test_is_less_than(self):
t2 = datetime.timedelta(seconds=90)
assert_that(self.t1).is_less_than(t2)
def test_is_less_than_failure(self):
try:
t2 = datetime.timedelta(seconds=90)
assert_that(t2).is_less_than(self.t1)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{1,2}:\d{2}:\d{2}> to be less than <\d{1,2}:\d{2}:\d{2}>, but was not.')
def test_is_less_than_bad_arg_type_failure(self):
try:
assert_that(self.t1).is_less_than(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <timedelta>, but was <int>')
def test_is_less_than_or_equal_to(self):
assert_that(self.t1).is_less_than_or_equal_to(self.t1)
def test_is_less_than_or_equal_to_failure(self):
try:
t2 = datetime.timedelta(seconds=90)
assert_that(t2).is_less_than_or_equal_to(self.t1)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{1,2}:\d{2}:\d{2}> to be less than or equal to <\d{1,2}:\d{2}:\d{2}>, but was not.')
def test_is_less_than_or_equal_to_bad_arg_type_failure(self):
try:
assert_that(self.t1).is_less_than_or_equal_to(123)
fail('should have raised error')
except TypeError as ex:
assert_that(str(ex)).is_equal_to('given arg must be <timedelta>, but was <int>')
def test_is_between(self):
d2 = datetime.timedelta(seconds=90)
d3 = datetime.timedelta(seconds=120)
assert_that(d2).is_between(self.t1, d3)
def test_is_between_failure(self):
try:
d2 = datetime.timedelta(seconds=30)
d3 = datetime.timedelta(seconds=40)
assert_that(self.t1).is_between(d2, d3)
fail('should have raised error')
except AssertionError as ex:
assert_that(str(ex)).matches('Expected <\d{1,2}:\d{2}:\d{2}> to be between <\d{1,2}:\d{2}:\d{2}> and <\d{1,2}:\d{2}:\d{2}>, but was not.')
| |
# Copyright 2012-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib, os.path, re, tempfile
from ..linkers import StaticLinker
from .. import coredata
from .. import mlog
from .. import mesonlib
from ..mesonlib import EnvironmentException, MesonException, version_compare, Popen_safe
"""This file contains the data files of all compilers Meson knows
about. To support a new compiler, add its information below.
Also add corresponding autodetection code in environment.py."""
header_suffixes = ('h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di')
obj_suffixes = ('o', 'obj', 'res')
lib_suffixes = ('a', 'lib', 'dll', 'dylib', 'so')
# Mapping of language to suffixes of files that should always be in that language
# This means we can't include .h headers here since they could be C, C++, ObjC, etc.
lang_suffixes = {
'c': ('c',),
'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'),
# f90, f95, f03, f08 are for free-form fortran ('f90' recommended)
# f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended)
'fortran': ('f90', 'f95', 'f03', 'f08', 'f', 'for', 'ftn', 'fpp'),
'd': ('d', 'di'),
'objc': ('m',),
'objcpp': ('mm',),
'rust': ('rs',),
'vala': ('vala', 'vapi', 'gs'),
'cs': ('cs',),
'swift': ('swift',),
'java': ('java',),
}
cpp_suffixes = lang_suffixes['cpp'] + ('h',)
c_suffixes = lang_suffixes['c'] + ('h',)
# List of languages that can be linked with C code directly by the linker
# used in build.py:process_compilers() and build.py:get_dynamic_linker()
clike_langs = ('objcpp', 'objc', 'd', 'cpp', 'c', 'fortran',)
clike_suffixes = ()
for _l in clike_langs:
clike_suffixes += lang_suffixes[_l]
clike_suffixes += ('h', 'll', 's')
# All these are only for C-like languages; see `clike_langs` above.
def sort_clike(lang):
'''
Sorting function to sort the list of languages according to
reversed(compilers.clike_langs) and append the unknown langs in the end.
The purpose is to prefer C over C++ for files that can be compiled by
both such as assembly, C, etc. Also applies to ObjC, ObjC++, etc.
'''
if lang not in clike_langs:
return 1
return -clike_langs.index(lang)
def is_header(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
suffix = fname.split('.')[-1]
return suffix in header_suffixes
def is_source(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
suffix = fname.split('.')[-1].lower()
return suffix in clike_suffixes
def is_assembly(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
return fname.split('.')[-1].lower() == 's'
def is_llvm_ir(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
return fname.split('.')[-1] == 'll'
def is_object(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
suffix = fname.split('.')[-1]
return suffix in obj_suffixes
def is_library(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
suffix = fname.split('.')[-1]
return suffix in lib_suffixes
gnulike_buildtype_args = {'plain': [],
# -O0 is passed for improved debugging information with gcc
# See https://github.com/mesonbuild/meson/pull/509
'debug': ['-O0', '-g'],
'debugoptimized': ['-O2', '-g'],
'release': ['-O3'],
'minsize': ['-Os', '-g']}
msvc_buildtype_args = {'plain': [],
'debug': ["/MDd", "/ZI", "/Ob0", "/Od", "/RTC1"],
'debugoptimized': ["/MD", "/Zi", "/O2", "/Ob1"],
'release': ["/MD", "/O2", "/Ob2"],
'minsize': ["/MD", "/Zi", "/Os", "/Ob1"],
}
apple_buildtype_linker_args = {'plain': [],
'debug': [],
'debugoptimized': [],
'release': [],
'minsize': [],
}
gnulike_buildtype_linker_args = {'plain': [],
'debug': [],
'debugoptimized': [],
'release': ['-Wl,-O1'],
'minsize': [],
}
msvc_buildtype_linker_args = {'plain': [],
'debug': [],
'debugoptimized': [],
'release': [],
'minsize': ['/INCREMENTAL:NO'],
}
java_buildtype_args = {'plain': [],
'debug': ['-g'],
'debugoptimized': ['-g'],
'release': [],
'minsize': [],
}
rust_buildtype_args = {'plain': [],
'debug': ['-C', 'debuginfo=2'],
'debugoptimized': ['-C', 'debuginfo=2', '-C', 'opt-level=2'],
'release': ['-C', 'opt-level=3'],
'minsize': [], # In a future release: ['-C', 'opt-level=s'],
}
d_gdc_buildtype_args = {'plain': [],
'debug': ['-g', '-O0'],
'debugoptimized': ['-g', '-O'],
'release': ['-O3', '-frelease'],
'minsize': [],
}
d_ldc_buildtype_args = {'plain': [],
'debug': ['-g', '-O0'],
'debugoptimized': ['-g', '-O'],
'release': ['-O3', '-release'],
'minsize': [],
}
d_dmd_buildtype_args = {'plain': [],
'debug': ['-g'],
'debugoptimized': ['-g', '-O'],
'release': ['-O', '-release'],
'minsize': [],
}
mono_buildtype_args = {'plain': [],
'debug': ['-debug'],
'debugoptimized': ['-debug', '-optimize+'],
'release': ['-optimize+'],
'minsize': [],
}
swift_buildtype_args = {'plain': [],
'debug': ['-g'],
'debugoptimized': ['-g', '-O'],
'release': ['-O'],
'minsize': [],
}
gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32',
'-lole32', '-loleaut32', '-luuid', '-lcomdlg32', '-ladvapi32']
msvc_winlibs = ['kernel32.lib', 'user32.lib', 'gdi32.lib',
'winspool.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib',
'uuid.lib', 'comdlg32.lib', 'advapi32.lib']
gnu_color_args = {'auto': ['-fdiagnostics-color=auto'],
'always': ['-fdiagnostics-color=always'],
'never': ['-fdiagnostics-color=never'],
}
clang_color_args = {'auto': ['-Xclang', '-fcolor-diagnostics'],
'always': ['-Xclang', '-fcolor-diagnostics'],
'never': ['-Xclang', '-fno-color-diagnostics'],
}
base_options = {'b_pch': coredata.UserBooleanOption('b_pch', 'Use precompiled headers', True),
'b_lto': coredata.UserBooleanOption('b_lto', 'Use link time optimization', False),
'b_sanitize': coredata.UserComboOption('b_sanitize',
'Code sanitizer to use',
['none', 'address', 'thread', 'undefined', 'memory'],
'none'),
'b_lundef': coredata.UserBooleanOption('b_lundef', 'Use -Wl,--no-undefined when linking', True),
'b_asneeded': coredata.UserBooleanOption('b_asneeded', 'Use -Wl,--as-needed when linking', True),
'b_pgo': coredata.UserComboOption('b_pgo', 'Use profile guide optimization',
['off', 'generate', 'use'],
'off'),
'b_coverage': coredata.UserBooleanOption('b_coverage',
'Enable coverage tracking.',
False),
'b_colorout': coredata.UserComboOption('b_colorout', 'Use colored output',
['auto', 'always', 'never'],
'always'),
'b_ndebug': coredata.UserBooleanOption('b_ndebug',
'Disable asserts',
False),
'b_staticpic': coredata.UserBooleanOption('b_staticpic',
'Build static libraries as position independent',
True),
}
def sanitizer_compile_args(value):
if value == 'none':
return []
args = ['-fsanitize=' + value]
if value == 'address':
args.append('-fno-omit-frame-pointer')
return args
def sanitizer_link_args(value):
if value == 'none':
return []
args = ['-fsanitize=' + value]
return args
def get_base_compile_args(options, compiler):
args = []
# FIXME, gcc/clang specific.
try:
if options['b_lto'].value:
args.append('-flto')
except KeyError:
pass
try:
args += compiler.get_colorout_args(options['b_colorout'].value)
except KeyError:
pass
try:
args += sanitizer_compile_args(options['b_sanitize'].value)
except KeyError:
pass
try:
pgo_val = options['b_pgo'].value
if pgo_val == 'generate':
args.append('-fprofile-generate')
elif pgo_val == 'use':
args.append('-fprofile-use')
except KeyError:
pass
try:
if options['b_coverage'].value:
args += compiler.get_coverage_args()
except KeyError:
pass
try:
if options['b_ndebug'].value:
args += ['-DNDEBUG']
except KeyError:
pass
return args
def get_base_link_args(options, linker, is_shared_module):
args = []
# FIXME, gcc/clang specific.
try:
if options['b_lto'].value:
args.append('-flto')
except KeyError:
pass
try:
args += sanitizer_link_args(options['b_sanitize'].value)
except KeyError:
pass
try:
pgo_val = options['b_pgo'].value
if pgo_val == 'generate':
args.append('-fprofile-generate')
elif pgo_val == 'use':
args.append('-fprofile-use')
except KeyError:
pass
try:
if not is_shared_module and 'b_lundef' in linker.base_options and options['b_lundef'].value:
args.append('-Wl,--no-undefined')
except KeyError:
pass
try:
if 'b_asneeded' in linker.base_options and options['b_asneeded'].value:
args.append('-Wl,--as-needed')
except KeyError:
pass
try:
if options['b_coverage'].value:
args += linker.get_coverage_link_args()
except KeyError:
pass
return args
class CrossNoRunException(MesonException):
pass
class RunResult:
def __init__(self, compiled, returncode=999, stdout='UNDEFINED', stderr='UNDEFINED'):
self.compiled = compiled
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
class CompilerArgs(list):
'''
Class derived from list() that manages a list of compiler arguments. Should
be used while constructing compiler arguments from various sources. Can be
operated with ordinary lists, so this does not need to be used everywhere.
All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
and can converted to the native type of each compiler by using the
.to_native() method to which you must pass an instance of the compiler or
the compiler class.
New arguments added to this class (either with .append(), .extend(), or +=)
are added in a way that ensures that they override previous arguments.
For example:
>>> a = ['-Lfoo', '-lbar']
>>> a += ['-Lpho', '-lbaz']
>>> print(a)
['-Lpho', '-Lfoo', '-lbar', '-lbaz']
Arguments will also be de-duped if they can be de-duped safely.
Note that because of all this, this class is not commutative and does not
preserve the order of arguments if it is safe to not. For example:
>>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
>>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
'''
# NOTE: currently this class is only for C-like compilers, but it can be
# extended to other languages easily. Just move the following to the
# compiler class and initialize when self.compiler is set.
# Arg prefixes that override by prepending instead of appending
prepend_prefixes = ('-I', '-L')
# Arg prefixes and args that must be de-duped by returning 2
dedup2_prefixes = ('-I', '-L', '-D', '-U')
dedup2_suffixes = ()
dedup2_args = ()
# Arg prefixes and args that must be de-duped by returning 1
dedup1_prefixes = ('-l',)
dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
# Match a .so of the form path/to/libfoo.so.0.1.0
# Only UNIX shared libraries require this. Others have a fixed extension.
dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
compiler = None
def _check_args(self, args):
cargs = []
if len(args) > 2:
raise TypeError("CompilerArgs() only accepts at most 2 arguments: "
"The compiler, and optionally an initial list")
elif not args:
return cargs
elif len(args) == 1:
if isinstance(args[0], (Compiler, StaticLinker)):
self.compiler = args[0]
else:
raise TypeError("you must pass a Compiler instance as one of "
"the arguments")
elif len(args) == 2:
if isinstance(args[0], (Compiler, StaticLinker)):
self.compiler = args[0]
cargs = args[1]
elif isinstance(args[1], (Compiler, StaticLinker)):
cargs = args[0]
self.compiler = args[1]
else:
raise TypeError("you must pass a Compiler instance as one of "
"the two arguments")
else:
raise AssertionError('Not reached')
return cargs
def __init__(self, *args):
super().__init__(self._check_args(args))
@classmethod
def _can_dedup(cls, arg):
'''
Returns whether the argument can be safely de-duped. This is dependent
on three things:
a) Whether an argument can be 'overriden' by a later argument. For
example, -DFOO defines FOO and -UFOO undefines FOO. In this case, we
can safely remove the previous occurance and add a new one. The same
is true for include paths and library paths with -I and -L. For
these we return `2`. See `dedup2_prefixes` and `dedup2_args`.
b) Arguments that once specified cannot be undone, such as `-c` or
`-pipe`. New instances of these can be completely skipped. For these
we return `1`. See `dedup1_prefixes` and `dedup1_args`.
c) Whether it matters where or how many times on the command-line
a particular argument is present. This can matter for symbol
resolution in static or shared libraries, so we cannot de-dup or
reorder them. For these we return `0`. This is the default.
In addition to these, we handle library arguments specially.
With GNU ld, we surround library arguments with -Wl,--start/end-group
to recursively search for symbols in the libraries. This is not needed
with other linkers.
'''
# A standalone argument must never be deduplicated because it is
# defined by what comes _after_ it. Thus dedupping this:
# -D FOO -D BAR
# would yield either
# -D FOO BAR
# or
# FOO -D BAR
# both of which are invalid.
if arg in cls.dedup2_prefixes:
return 0
if arg in cls.dedup2_args or \
arg.startswith(cls.dedup2_prefixes) or \
arg.endswith(cls.dedup2_suffixes):
return 2
if arg in cls.dedup1_args or \
arg.startswith(cls.dedup1_prefixes) or \
arg.endswith(cls.dedup1_suffixes) or \
re.search(cls.dedup1_regex, arg):
return 1
return 0
@classmethod
def _should_prepend(cls, arg):
if arg.startswith(cls.prepend_prefixes):
return True
return False
def to_native(self):
# Check if we need to add --start/end-group for circular dependencies
# between static libraries.
if get_compiler_uses_gnuld(self.compiler):
group_started = False
for each in self:
if not each.startswith('-l') and not each.endswith('.a'):
continue
i = self.index(each)
if not group_started:
# First occurance of a library
self.insert(i, '-Wl,--start-group')
group_started = True
# Last occurance of a library
if group_started:
self.insert(i + 1, '-Wl,--end-group')
return self.compiler.unix_args_to_native(self)
def append_direct(self, arg):
'''
Append the specified argument without any reordering or de-dup
'''
super().append(arg)
def extend_direct(self, iterable):
'''
Extend using the elements in the specified iterable without any
reordering or de-dup
'''
super().extend(iterable)
def __add__(self, args):
new = CompilerArgs(self, self.compiler)
new += args
return new
def __iadd__(self, args):
'''
Add two CompilerArgs while taking into account overriding of arguments
and while preserving the order of arguments as much as possible
'''
pre = []
post = []
if not isinstance(args, list):
raise TypeError('can only concatenate list (not "{}") to list'.format(args))
for arg in args:
# If the argument can be de-duped, do it either by removing the
# previous occurance of it and adding a new one, or not adding the
# new occurance.
dedup = self._can_dedup(arg)
if dedup == 1:
# Argument already exists and adding a new instance is useless
if arg in self or arg in pre or arg in post:
continue
if dedup == 2:
# Remove all previous occurances of the arg and add it anew
if arg in self:
self.remove(arg)
if arg in pre:
pre.remove(arg)
if arg in post:
post.remove(arg)
if self._should_prepend(arg):
pre.append(arg)
else:
post.append(arg)
# Insert at the beginning
self[:0] = pre
# Append to the end
super().__iadd__(post)
return self
def __radd__(self, args):
new = CompilerArgs(args, self.compiler)
new += self
return new
def __mul__(self, args):
raise TypeError("can't multiply compiler arguments")
def __imul__(self, args):
raise TypeError("can't multiply compiler arguments")
def __rmul__(self, args):
raise TypeError("can't multiply compiler arguments")
def append(self, arg):
self.__iadd__([arg])
def extend(self, args):
self.__iadd__(args)
class Compiler:
def __init__(self, exelist, version):
if isinstance(exelist, str):
self.exelist = [exelist]
elif isinstance(exelist, list):
self.exelist = exelist
else:
raise TypeError('Unknown argument to Compiler')
# In case it's been overriden by a child class already
if not hasattr(self, 'file_suffixes'):
self.file_suffixes = lang_suffixes[self.language]
if not hasattr(self, 'can_compile_suffixes'):
self.can_compile_suffixes = set(self.file_suffixes)
self.default_suffix = self.file_suffixes[0]
self.version = version
self.base_options = []
def __repr__(self):
repr_str = "<{0}: v{1} `{2}`>"
return repr_str.format(self.__class__.__name__, self.version,
' '.join(self.exelist))
def can_compile(self, src):
if hasattr(src, 'fname'):
src = src.fname
suffix = os.path.splitext(src)[1].lower()
if suffix and suffix[1:] in self.can_compile_suffixes:
return True
return False
def get_id(self):
return self.id
def get_language(self):
return self.language
def get_display_language(self):
return self.language.capitalize()
def get_default_suffix(self):
return self.default_suffix
def get_exelist(self):
return self.exelist[:]
def get_builtin_define(self, *args, **kwargs):
raise EnvironmentException('%s does not support get_builtin_define.' % self.id)
def has_builtin_define(self, *args, **kwargs):
raise EnvironmentException('%s does not support has_builtin_define.' % self.id)
def get_always_args(self):
return []
def get_linker_always_args(self):
return []
def gen_import_library_args(self, implibname):
"""
Used only on Windows for libraries that need an import library.
This currently means C, C++, Fortran.
"""
return []
def get_options(self):
return {} # build afresh every time
def get_option_compile_args(self, options):
return []
def get_option_link_args(self, options):
return []
def has_header(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language())
def has_header_symbol(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support header symbol checks.' % self.get_display_language())
def compiles(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support compile checks.' % self.get_display_language())
def links(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support link checks.' % self.get_display_language())
def run(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support run checks.' % self.get_display_language())
def sizeof(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support sizeof checks.' % self.get_display_language())
def alignment(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support alignment checks.' % self.get_display_language())
def has_function(self, *args, **kwargs):
raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language())
@classmethod
def unix_args_to_native(cls, args):
"Always returns a copy that can be independently mutated"
return args[:]
def find_library(self, *args, **kwargs):
raise EnvironmentException('Language {} does not support library finding.'.format(self.get_display_language()))
def get_library_dirs(self):
return []
def has_argument(self, arg, env):
return self.has_multi_arguments([arg], env)
def has_multi_arguments(self, args, env):
raise EnvironmentException(
'Language {} does not support has_multi_arguments.'.format(
self.get_display_language()))
def get_cross_extra_flags(self, environment, link):
extra_flags = []
if self.is_cross and environment:
if 'properties' in environment.cross_info.config:
props = environment.cross_info.config['properties']
lang_args_key = self.language + '_args'
extra_flags += props.get(lang_args_key, [])
lang_link_args_key = self.language + '_link_args'
if link:
extra_flags += props.get(lang_link_args_key, [])
return extra_flags
def _get_compile_output(self, dirname, mode):
# In pre-processor mode, the output is sent to stdout and discarded
if mode == 'preprocess':
return None
# Extension only matters if running results; '.exe' is
# guaranteed to be executable on every platform.
if mode == 'link':
suffix = 'exe'
else:
suffix = 'obj'
return os.path.join(dirname, 'output.' + suffix)
@contextlib.contextmanager
def compile(self, code, extra_args=None, mode='link'):
if extra_args is None:
extra_args = []
try:
with tempfile.TemporaryDirectory() as tmpdirname:
if isinstance(code, str):
srcname = os.path.join(tmpdirname,
'testfile.' + self.default_suffix)
with open(srcname, 'w') as ofile:
ofile.write(code)
elif isinstance(code, mesonlib.File):
srcname = code.fname
output = self._get_compile_output(tmpdirname, mode)
# Construct the compiler command-line
commands = CompilerArgs(self)
commands.append(srcname)
commands += extra_args
commands += self.get_always_args()
if mode == 'compile':
commands += self.get_compile_only_args()
# Preprocess mode outputs to stdout, so no output args
if mode == 'preprocess':
commands += self.get_preprocess_only_args()
else:
commands += self.get_output_args(output)
# Generate full command-line with the exelist
commands = self.get_exelist() + commands.to_native()
mlog.debug('Running compile:')
mlog.debug('Working directory: ', tmpdirname)
mlog.debug('Command line: ', ' '.join(commands), '\n')
mlog.debug('Code:\n', code)
p, p.stdo, p.stde = Popen_safe(commands, cwd=tmpdirname)
mlog.debug('Compiler stdout:\n', p.stdo)
mlog.debug('Compiler stderr:\n', p.stde)
p.input_name = srcname
p.output_name = output
yield p
except (PermissionError, OSError):
# On Windows antivirus programs and the like hold on to files so
# they can't be deleted. There's not much to do in this case. Also,
# catch OSError because the directory is then no longer empty.
pass
def get_colorout_args(self, colortype):
return []
# Some compilers (msvc) write debug info to a separate file.
# These args specify where it should be written.
def get_compile_debugfile_args(self, rel_obj, **kwargs):
return []
def get_link_debugfile_args(self, rel_obj):
return []
def get_std_shared_lib_link_args(self):
return []
def get_std_shared_module_link_args(self):
return self.get_std_shared_lib_link_args()
def get_link_whole_for(self, args):
if isinstance(args, list) and not args:
return []
raise EnvironmentException('Language %s does not support linking whole archives.' % self.get_display_language())
def build_unix_rpath_args(self, build_dir, from_dir, rpath_paths, install_rpath):
if not rpath_paths and not install_rpath:
return []
# The rpaths we write must be relative, because otherwise
# they have different length depending on the build
# directory. This breaks reproducible builds.
rel_rpaths = []
for p in rpath_paths:
if p == from_dir:
relative = '' # relpath errors out in this case
else:
relative = os.path.relpath(p, from_dir)
rel_rpaths.append(relative)
paths = ':'.join([os.path.join('$ORIGIN', p) for p in rel_rpaths])
if len(paths) < len(install_rpath):
padding = 'X' * (len(install_rpath) - len(paths))
if not paths:
paths = padding
else:
paths = paths + ':' + padding
args = ['-Wl,-rpath,' + paths]
if get_compiler_is_linuxlike(self):
# Rpaths to use while linking must be absolute. These are not
# written to the binary. Needed only with GNU ld:
# https://sourceware.org/bugzilla/show_bug.cgi?id=16936
# Not needed on Windows or other platforms that don't use RPATH
# https://github.com/mesonbuild/meson/issues/1897
lpaths = ':'.join([os.path.join(build_dir, p) for p in rpath_paths])
args += ['-Wl,-rpath-link,' + lpaths]
return args
GCC_STANDARD = 0
GCC_OSX = 1
GCC_MINGW = 2
GCC_CYGWIN = 3
CLANG_STANDARD = 0
CLANG_OSX = 1
CLANG_WIN = 2
# Possibly clang-cl?
ICC_STANDARD = 0
ICC_OSX = 1
ICC_WIN = 2
def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module):
if soversion is None:
sostr = ''
else:
sostr = '.' + soversion
if gcc_type in (GCC_STANDARD, GCC_MINGW, GCC_CYGWIN):
# Might not be correct for mingw but seems to work.
return ['-Wl,-soname,%s%s.%s%s' % (prefix, shlib_name, suffix, sostr)]
elif gcc_type == GCC_OSX:
if is_shared_module:
return []
return ['-install_name', os.path.join(path, 'lib' + shlib_name + '.dylib')]
else:
raise RuntimeError('Not implemented yet.')
def get_compiler_is_linuxlike(compiler):
if (getattr(compiler, 'gcc_type', None) == GCC_STANDARD) or \
(getattr(compiler, 'clang_type', None) == CLANG_STANDARD) or \
(getattr(compiler, 'icc_type', None) == ICC_STANDARD):
return True
return False
def get_compiler_uses_gnuld(c):
# FIXME: Perhaps we should detect the linker in the environment?
# FIXME: Assumes that *BSD use GNU ld, but they might start using lld soon
if (getattr(c, 'gcc_type', None) in (GCC_STANDARD, GCC_MINGW, GCC_CYGWIN)) or \
(getattr(c, 'clang_type', None) in (CLANG_STANDARD, CLANG_WIN)) or \
(getattr(c, 'icc_type', None) in (ICC_STANDARD, ICC_WIN)):
return True
return False
def get_largefile_args(compiler):
'''
Enable transparent large-file-support for 32-bit UNIX systems
'''
if get_compiler_is_linuxlike(compiler):
# Enable large-file support unconditionally on all platforms other
# than macOS and Windows. macOS is now 64-bit-only so it doesn't
# need anything special, and Windows doesn't have automatic LFS.
# You must use the 64-bit counterparts explicitly.
# glibc, musl, and uclibc, and all BSD libcs support this. On Android,
# support for transparent LFS is available depending on the version of
# Bionic: https://github.com/android/platform_bionic#32-bit-abi-bugs
# https://code.google.com/p/android/issues/detail?id=64613
#
# If this breaks your code, fix it! It's been 20+ years!
return ['-D_FILE_OFFSET_BITS=64']
# We don't enable -D_LARGEFILE64_SOURCE since that enables
# transitionary features and must be enabled by programs that use
# those features explicitly.
return []
class GnuCompiler:
# Functionality that is common to all GNU family compilers.
def __init__(self, gcc_type, defines):
self.id = 'gcc'
self.gcc_type = gcc_type
self.defines = defines or {}
self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
'b_colorout', 'b_ndebug', 'b_staticpic']
if self.gcc_type != GCC_OSX:
self.base_options.append('b_lundef')
self.base_options.append('b_asneeded')
# All GCC backends can do assembly
self.can_compile_suffixes.add('s')
def get_colorout_args(self, colortype):
if mesonlib.version_compare(self.version, '>=4.9.0'):
return gnu_color_args[colortype][:]
return []
def get_warn_args(self, level):
args = super().get_warn_args(level)
if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
# -Wpedantic was added in 4.8.0
# https://gcc.gnu.org/gcc-4.8/changes.html
args[args.index('-Wpedantic')] = '-pedantic'
return args
def has_builtin_define(self, define):
return define in self.defines
def get_builtin_define(self, define):
if define in self.defines:
return self.defines[define]
def get_pic_args(self):
if self.gcc_type in (GCC_CYGWIN, GCC_MINGW, GCC_OSX):
return [] # On Window and OS X, pic is always on.
return ['-fPIC']
def get_buildtype_args(self, buildtype):
return gnulike_buildtype_args[buildtype]
def get_buildtype_linker_args(self, buildtype):
if self.gcc_type == GCC_OSX:
return apple_buildtype_linker_args[buildtype]
return gnulike_buildtype_linker_args[buildtype]
def get_pch_suffix(self):
return 'gch'
def split_shlib_to_parts(self, fname):
return os.path.split(fname)[0], fname
def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
return get_gcc_soname_args(self.gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
def get_std_shared_lib_link_args(self):
if self.gcc_type == GCC_OSX:
return ['-bundle']
return ['-shared']
def get_link_whole_for(self, args):
return ['-Wl,--whole-archive'] + args + ['-Wl,--no-whole-archive']
def gen_vs_module_defs_args(self, defsfile):
if not isinstance(defsfile, str):
raise RuntimeError('Module definitions file should be str')
# On Windows targets, .def files may be specified on the linker command
# line like an object file.
if self.gcc_type in (GCC_CYGWIN, GCC_MINGW):
return [defsfile]
# For other targets, discard the .def file.
return []
def get_gui_app_args(self):
if self.gcc_type in (GCC_CYGWIN, GCC_MINGW):
return ['-mwindows']
return []
class ClangCompiler:
def __init__(self, clang_type):
self.id = 'clang'
self.clang_type = clang_type
self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
'b_ndebug', 'b_staticpic', 'b_colorout']
if self.clang_type != CLANG_OSX:
self.base_options.append('b_lundef')
self.base_options.append('b_asneeded')
# All Clang backends can do assembly and LLVM IR
self.can_compile_suffixes.update(['ll', 's'])
def get_pic_args(self):
if self.clang_type in (CLANG_WIN, CLANG_OSX):
return [] # On Window and OS X, pic is always on.
return ['-fPIC']
def get_colorout_args(self, colortype):
return clang_color_args[colortype][:]
def get_buildtype_args(self, buildtype):
return gnulike_buildtype_args[buildtype]
def get_buildtype_linker_args(self, buildtype):
if self.clang_type == CLANG_OSX:
return apple_buildtype_linker_args[buildtype]
return gnulike_buildtype_linker_args[buildtype]
def get_pch_suffix(self):
return 'pch'
def get_pch_use_args(self, pch_dir, header):
# Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136
# This flag is internal to Clang (or at least not documented on the man page)
# so it might change semantics at any time.
return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))]
def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
if self.clang_type == CLANG_STANDARD:
gcc_type = GCC_STANDARD
elif self.clang_type == CLANG_OSX:
gcc_type = GCC_OSX
elif self.clang_type == CLANG_WIN:
gcc_type = GCC_MINGW
else:
raise MesonException('Unreachable code when converting clang type to gcc type.')
return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
def has_multi_arguments(self, args, env):
return super().has_multi_arguments(
['-Werror=unknown-warning-option'] + args,
env)
def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None):
if extra_args is None:
extra_args = []
# Starting with XCode 8, we need to pass this to force linker
# visibility to obey OS X and iOS minimum version targets with
# -mmacosx-version-min, -miphoneos-version-min, etc.
# https://github.com/Homebrew/homebrew-core/issues/3727
if self.clang_type == CLANG_OSX and version_compare(self.version, '>=8.0'):
extra_args.append('-Wl,-no_weak_imports')
return super().has_function(funcname, prefix, env, extra_args, dependencies)
def get_std_shared_module_link_args(self):
if self.clang_type == CLANG_OSX:
return ['-bundle', '-Wl,-undefined,dynamic_lookup']
return ['-shared']
def get_link_whole_for(self, args):
if self.clang_type == CLANG_OSX:
result = []
for a in args:
result += ['-Wl,-force_load', a]
return result
return ['-Wl,--whole-archive'] + args + ['-Wl,--no-whole-archive']
# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1
class IntelCompiler:
def __init__(self, icc_type):
self.id = 'intel'
self.icc_type = icc_type
self.lang_header = 'none'
self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
'b_colorout', 'b_ndebug', 'b_staticpic', 'b_lundef', 'b_asneeded']
# Assembly
self.can_compile_suffixes.add('s')
def get_pic_args(self):
return ['-fPIC']
def get_buildtype_args(self, buildtype):
return gnulike_buildtype_args[buildtype]
def get_buildtype_linker_args(self, buildtype):
return gnulike_buildtype_linker_args[buildtype]
def get_pch_suffix(self):
return 'pchi'
def get_pch_use_args(self, pch_dir, header):
return ['-pch', '-pch_dir', os.path.join(pch_dir), '-x',
self.lang_header, '-include', header, '-x', 'none']
def get_pch_name(self, header_name):
return os.path.split(header_name)[-1] + '.' + self.get_pch_suffix()
def split_shlib_to_parts(self, fname):
return os.path.split(fname)[0], fname
def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module):
if self.icc_type == ICC_STANDARD:
gcc_type = GCC_STANDARD
elif self.icc_type == ICC_OSX:
gcc_type = GCC_OSX
elif self.icc_type == ICC_WIN:
gcc_type = GCC_MINGW
else:
raise MesonException('Unreachable code when converting icc type to gcc type.')
return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module)
def get_std_shared_lib_link_args(self):
# FIXME: Don't know how icc works on OSX
# if self.icc_type == ICC_OSX:
# return ['-bundle']
return ['-shared']
| |
# firebird/base.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: firebird
:name: Firebird
Firebird Dialects
-----------------
Firebird offers two distinct dialects_ (not to be confused with a
SQLAlchemy ``Dialect``):
dialect 1
This is the old syntax and behaviour, inherited from Interbase pre-6.0.
dialect 3
This is the newer and supported syntax, introduced in Interbase 6.0.
The SQLAlchemy Firebird dialect detects these versions and
adjusts its representation of SQL accordingly. However,
support for dialect 1 is not well tested and probably has
incompatibilities.
Locking Behavior
----------------
Firebird locks tables aggressively. For this reason, a DROP TABLE may
hang until other transactions are released. SQLAlchemy does its best
to release transactions as quickly as possible. The most common cause
of hanging transactions is a non-fully consumed result set, i.e.::
result = engine.execute("select * from table")
row = result.fetchone()
return
Where above, the ``ResultProxy`` has not been fully consumed. The
connection will be returned to the pool and the transactional state
rolled back once the Python garbage collector reclaims the objects
which hold onto the connection, which often occurs asynchronously.
The above use case can be alleviated by calling ``first()`` on the
``ResultProxy`` which will fetch the first row and immediately close
all remaining cursor/connection resources.
RETURNING support
-----------------
Firebird 2.0 supports returning a result set from inserts, and 2.1
extends that to deletes and updates. This is generically exposed by
the SQLAlchemy ``returning()`` method, such as::
# INSERT..RETURNING
result = table.insert().returning(table.c.col1, table.c.col2).\\
values(name='foo')
print result.fetchall()
# UPDATE..RETURNING
raises = empl.update().returning(empl.c.id, empl.c.salary).\\
where(empl.c.sales>100).\\
values(dict(salary=empl.c.salary * 1.1))
print raises.fetchall()
.. _dialects: http://mc-computing.com/Databases/Firebird/SQL_Dialect.html
"""
import datetime
from sqlalchemy import schema as sa_schema
from sqlalchemy import exc, types as sqltypes, sql, util
from sqlalchemy.sql import expression
from sqlalchemy.engine import base, default, reflection
from sqlalchemy.sql import compiler
from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC,
SMALLINT, TEXT, TIME, TIMESTAMP, Integer)
RESERVED_WORDS = set([
"active", "add", "admin", "after", "all", "alter", "and", "any", "as",
"asc", "ascending", "at", "auto", "avg", "before", "begin", "between",
"bigint", "bit_length", "blob", "both", "by", "case", "cast", "char",
"character", "character_length", "char_length", "check", "close",
"collate", "column", "commit", "committed", "computed", "conditional",
"connect", "constraint", "containing", "count", "create", "cross",
"cstring", "current", "current_connection", "current_date",
"current_role", "current_time", "current_timestamp",
"current_transaction", "current_user", "cursor", "database", "date",
"day", "dec", "decimal", "declare", "default", "delete", "desc",
"descending", "disconnect", "distinct", "do", "domain", "double",
"drop", "else", "end", "entry_point", "escape", "exception",
"execute", "exists", "exit", "external", "extract", "fetch", "file",
"filter", "float", "for", "foreign", "from", "full", "function",
"gdscode", "generator", "gen_id", "global", "grant", "group",
"having", "hour", "if", "in", "inactive", "index", "inner",
"input_type", "insensitive", "insert", "int", "integer", "into", "is",
"isolation", "join", "key", "leading", "left", "length", "level",
"like", "long", "lower", "manual", "max", "maximum_segment", "merge",
"min", "minute", "module_name", "month", "names", "national",
"natural", "nchar", "no", "not", "null", "numeric", "octet_length",
"of", "on", "only", "open", "option", "or", "order", "outer",
"output_type", "overflow", "page", "pages", "page_size", "parameter",
"password", "plan", "position", "post_event", "precision", "primary",
"privileges", "procedure", "protected", "rdb$db_key", "read", "real",
"record_version", "recreate", "recursive", "references", "release",
"reserv", "reserving", "retain", "returning_values", "returns",
"revoke", "right", "rollback", "rows", "row_count", "savepoint",
"schema", "second", "segment", "select", "sensitive", "set", "shadow",
"shared", "singular", "size", "smallint", "snapshot", "some", "sort",
"sqlcode", "stability", "start", "starting", "starts", "statistics",
"sub_type", "sum", "suspend", "table", "then", "time", "timestamp",
"to", "trailing", "transaction", "trigger", "trim", "uncommitted",
"union", "unique", "update", "upper", "user", "using", "value",
"values", "varchar", "variable", "varying", "view", "wait", "when",
"where", "while", "with", "work", "write", "year",
])
class _StringType(sqltypes.String):
"""Base for Firebird string types."""
def __init__(self, charset=None, **kw):
self.charset = charset
super(_StringType, self).__init__(**kw)
class VARCHAR(_StringType, sqltypes.VARCHAR):
"""Firebird VARCHAR type"""
__visit_name__ = 'VARCHAR'
def __init__(self, length=None, **kwargs):
super(VARCHAR, self).__init__(length=length, **kwargs)
class CHAR(_StringType, sqltypes.CHAR):
"""Firebird CHAR type"""
__visit_name__ = 'CHAR'
def __init__(self, length=None, **kwargs):
super(CHAR, self).__init__(length=length, **kwargs)
class _FBDateTime(sqltypes.DateTime):
def bind_processor(self, dialect):
def process(value):
if type(value) == datetime.date:
return datetime.datetime(value.year, value.month, value.day)
else:
return value
return process
colspecs = {
sqltypes.DateTime: _FBDateTime
}
ischema_names = {
'SHORT': SMALLINT,
'LONG': INTEGER,
'QUAD': FLOAT,
'FLOAT': FLOAT,
'DATE': DATE,
'TIME': TIME,
'TEXT': TEXT,
'INT64': BIGINT,
'DOUBLE': FLOAT,
'TIMESTAMP': TIMESTAMP,
'VARYING': VARCHAR,
'CSTRING': CHAR,
'BLOB': BLOB,
}
# TODO: date conversion types (should be implemented as _FBDateTime,
# _FBDate, etc. as bind/result functionality is required)
class FBTypeCompiler(compiler.GenericTypeCompiler):
def visit_boolean(self, type_, **kw):
return self.visit_SMALLINT(type_, **kw)
def visit_datetime(self, type_, **kw):
return self.visit_TIMESTAMP(type_, **kw)
def visit_TEXT(self, type_, **kw):
return "BLOB SUB_TYPE 1"
def visit_BLOB(self, type_, **kw):
return "BLOB SUB_TYPE 0"
def _extend_string(self, type_, basic):
charset = getattr(type_, 'charset', None)
if charset is None:
return basic
else:
return '%s CHARACTER SET %s' % (basic, charset)
def visit_CHAR(self, type_, **kw):
basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw)
return self._extend_string(type_, basic)
def visit_VARCHAR(self, type_, **kw):
if not type_.length:
raise exc.CompileError(
"VARCHAR requires a length on dialect %s" %
self.dialect.name)
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw)
return self._extend_string(type_, basic)
class FBCompiler(sql.compiler.SQLCompiler):
"""Firebird specific idiosyncrasies"""
ansi_bind_rules = True
# def visit_contains_op_binary(self, binary, operator, **kw):
# cant use CONTAINING b.c. it's case insensitive.
# def visit_notcontains_op_binary(self, binary, operator, **kw):
# cant use NOT CONTAINING b.c. it's case insensitive.
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
def visit_startswith_op_binary(self, binary, operator, **kw):
return '%s STARTING WITH %s' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw))
def visit_notstartswith_op_binary(self, binary, operator, **kw):
return '%s NOT STARTING WITH %s' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw))
def visit_mod_binary(self, binary, operator, **kw):
return "mod(%s, %s)" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw))
def visit_alias(self, alias, asfrom=False, **kwargs):
if self.dialect._version_two:
return super(FBCompiler, self).\
visit_alias(alias, asfrom=asfrom, **kwargs)
else:
# Override to not use the AS keyword which FB 1.5 does not like
if asfrom:
alias_name = isinstance(alias.name,
expression._truncated_label) and \
self._truncated_identifier("alias",
alias.name) or alias.name
return self.process(
alias.original, asfrom=asfrom, **kwargs) + \
" " + \
self.preparer.format_alias(alias, alias_name)
else:
return self.process(alias.original, **kwargs)
def visit_substring_func(self, func, **kw):
s = self.process(func.clauses.clauses[0])
start = self.process(func.clauses.clauses[1])
if len(func.clauses.clauses) > 2:
length = self.process(func.clauses.clauses[2])
return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length)
else:
return "SUBSTRING(%s FROM %s)" % (s, start)
def visit_length_func(self, function, **kw):
if self.dialect._version_two:
return "char_length" + self.function_argspec(function)
else:
return "strlen" + self.function_argspec(function)
visit_char_length_func = visit_length_func
def function_argspec(self, func, **kw):
# TODO: this probably will need to be
# narrowed to a fixed list, some no-arg functions
# may require parens - see similar example in the oracle
# dialect
if func.clauses is not None and len(func.clauses):
return self.process(func.clause_expr, **kw)
else:
return ""
def default_from(self):
return " FROM rdb$database"
def visit_sequence(self, seq):
return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)
def get_select_precolumns(self, select, **kw):
"""Called when building a ``SELECT`` statement, position is just
before column list Firebird puts the limit and offset right
after the ``SELECT``...
"""
result = ""
if select._limit_clause is not None:
result += "FIRST %s " % self.process(select._limit_clause, **kw)
if select._offset_clause is not None:
result += "SKIP %s " % self.process(select._offset_clause, **kw)
if select._distinct:
result += "DISTINCT "
return result
def limit_clause(self, select, **kw):
"""Already taken care of in the `get_select_precolumns` method."""
return ""
def returning_clause(self, stmt, returning_cols):
columns = [
self._label_select_column(None, c, True, False, {})
for c in expression._select_iterables(returning_cols)
]
return 'RETURNING ' + ', '.join(columns)
class FBDDLCompiler(sql.compiler.DDLCompiler):
"""Firebird syntactic idiosyncrasies"""
def visit_create_sequence(self, create):
"""Generate a ``CREATE GENERATOR`` statement for the sequence."""
# no syntax for these
# http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html
if create.element.start is not None:
raise NotImplemented(
"Firebird SEQUENCE doesn't support START WITH")
if create.element.increment is not None:
raise NotImplemented(
"Firebird SEQUENCE doesn't support INCREMENT BY")
if self.dialect._version_two:
return "CREATE SEQUENCE %s" % \
self.preparer.format_sequence(create.element)
else:
return "CREATE GENERATOR %s" % \
self.preparer.format_sequence(create.element)
def visit_drop_sequence(self, drop):
"""Generate a ``DROP GENERATOR`` statement for the sequence."""
if self.dialect._version_two:
return "DROP SEQUENCE %s" % \
self.preparer.format_sequence(drop.element)
else:
return "DROP GENERATOR %s" % \
self.preparer.format_sequence(drop.element)
class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
"""Install Firebird specific reserved words."""
reserved_words = RESERVED_WORDS
illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(
['_'])
def __init__(self, dialect):
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
class FBExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_):
"""Get the next value from the sequence using ``gen_id()``."""
return self._execute_scalar(
"SELECT gen_id(%s, 1) FROM rdb$database" %
self.dialect.identifier_preparer.format_sequence(seq),
type_
)
class FBDialect(default.DefaultDialect):
"""Firebird dialect"""
name = 'firebird'
max_identifier_length = 31
supports_sequences = True
sequences_optional = False
supports_default_values = True
postfetch_lastrowid = False
supports_native_boolean = False
requires_name_normalize = True
supports_empty_insert = False
statement_compiler = FBCompiler
ddl_compiler = FBDDLCompiler
preparer = FBIdentifierPreparer
type_compiler = FBTypeCompiler
execution_ctx_cls = FBExecutionContext
colspecs = colspecs
ischema_names = ischema_names
construct_arguments = []
# defaults to dialect ver. 3,
# will be autodetected off upon
# first connect
_version_two = True
def initialize(self, connection):
super(FBDialect, self).initialize(connection)
self._version_two = ('firebird' in self.server_version_info and
self.server_version_info >= (2, )
) or \
('interbase' in self.server_version_info and
self.server_version_info >= (6, )
)
if not self._version_two:
# TODO: whatever other pre < 2.0 stuff goes here
self.ischema_names = ischema_names.copy()
self.ischema_names['TIMESTAMP'] = sqltypes.DATE
self.colspecs = {
sqltypes.DateTime: sqltypes.DATE
}
self.implicit_returning = self._version_two and \
self.__dict__.get('implicit_returning', True)
def normalize_name(self, name):
# Remove trailing spaces: FB uses a CHAR() type,
# that is padded with spaces
name = name and name.rstrip()
if name is None:
return None
elif name.upper() == name and \
not self.identifier_preparer._requires_quotes(name.lower()):
return name.lower()
else:
return name
def denormalize_name(self, name):
if name is None:
return None
elif name.lower() == name and \
not self.identifier_preparer._requires_quotes(name.lower()):
return name.upper()
else:
return name
def has_table(self, connection, table_name, schema=None):
"""Return ``True`` if the given table exists, ignoring
the `schema`."""
tblqry = """
SELECT 1 AS has_table FROM rdb$database
WHERE EXISTS (SELECT rdb$relation_name
FROM rdb$relations
WHERE rdb$relation_name=?)
"""
c = connection.execute(tblqry, [self.denormalize_name(table_name)])
return c.first() is not None
def has_sequence(self, connection, sequence_name, schema=None):
"""Return ``True`` if the given sequence (generator) exists."""
genqry = """
SELECT 1 AS has_sequence FROM rdb$database
WHERE EXISTS (SELECT rdb$generator_name
FROM rdb$generators
WHERE rdb$generator_name=?)
"""
c = connection.execute(genqry, [self.denormalize_name(sequence_name)])
return c.first() is not None
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
# there are two queries commonly mentioned for this.
# this one, using view_blr, is at the Firebird FAQ among other places:
# http://www.firebirdfaq.org/faq174/
s = """
select rdb$relation_name
from rdb$relations
where rdb$view_blr is null
and (rdb$system_flag is null or rdb$system_flag = 0);
"""
# the other query is this one. It's not clear if there's really
# any difference between these two. This link:
# http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
# states them as interchangeable. Some discussion at [ticket:2898]
# SELECT DISTINCT rdb$relation_name
# FROM rdb$relation_fields
# WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
# see http://www.firebirdfaq.org/faq174/
s = """
select rdb$relation_name
from rdb$relations
where rdb$view_blr is not null
and (rdb$system_flag is null or rdb$system_flag = 0);
"""
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None, **kw):
qry = """
SELECT rdb$view_source AS view_source
FROM rdb$relations
WHERE rdb$relation_name=?
"""
rp = connection.execute(qry, [self.denormalize_name(view_name)])
row = rp.first()
if row:
return row['view_source']
else:
return None
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
# Query to extract the PK/FK constrained fields of the given table
keyqry = """
SELECT se.rdb$field_name AS fname
FROM rdb$relation_constraints rc
JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
"""
tablename = self.denormalize_name(table_name)
# get primary key fields
c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()]
return {'constrained_columns': pkfields, 'name': None}
@reflection.cache
def get_column_sequence(self, connection,
table_name, column_name,
schema=None, **kw):
tablename = self.denormalize_name(table_name)
colname = self.denormalize_name(column_name)
# Heuristic-query to determine the generator associated to a PK field
genqry = """
SELECT trigdep.rdb$depended_on_name AS fgenerator
FROM rdb$dependencies tabdep
JOIN rdb$dependencies trigdep
ON tabdep.rdb$dependent_name=trigdep.rdb$dependent_name
AND trigdep.rdb$depended_on_type=14
AND trigdep.rdb$dependent_type=2
JOIN rdb$triggers trig ON
trig.rdb$trigger_name=tabdep.rdb$dependent_name
WHERE tabdep.rdb$depended_on_name=?
AND tabdep.rdb$depended_on_type=0
AND trig.rdb$trigger_type=1
AND tabdep.rdb$field_name=?
AND (SELECT count(*)
FROM rdb$dependencies trigdep2
WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2
"""
genr = connection.execute(genqry, [tablename, colname]).first()
if genr is not None:
return dict(name=self.normalize_name(genr['fgenerator']))
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
# Query to extract the details of all the fields of the given table
tblqry = """
SELECT r.rdb$field_name AS fname,
r.rdb$null_flag AS null_flag,
t.rdb$type_name AS ftype,
f.rdb$field_sub_type AS stype,
f.rdb$field_length/
COALESCE(cs.rdb$bytes_per_character,1) AS flen,
f.rdb$field_precision AS fprec,
f.rdb$field_scale AS fscale,
COALESCE(r.rdb$default_source,
f.rdb$default_source) AS fdefault
FROM rdb$relation_fields r
JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name
JOIN rdb$types t
ON t.rdb$type=f.rdb$field_type AND
t.rdb$field_name='RDB$FIELD_TYPE'
LEFT JOIN rdb$character_sets cs ON
f.rdb$character_set_id=cs.rdb$character_set_id
WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=?
ORDER BY r.rdb$field_position
"""
# get the PK, used to determine the eventual associated sequence
pk_constraint = self.get_pk_constraint(connection, table_name)
pkey_cols = pk_constraint['constrained_columns']
tablename = self.denormalize_name(table_name)
# get all of the fields for this table
c = connection.execute(tblqry, [tablename])
cols = []
while True:
row = c.fetchone()
if row is None:
break
name = self.normalize_name(row['fname'])
orig_colname = row['fname']
# get the data type
colspec = row['ftype'].rstrip()
coltype = self.ischema_names.get(colspec)
if coltype is None:
util.warn("Did not recognize type '%s' of column '%s'" %
(colspec, name))
coltype = sqltypes.NULLTYPE
elif issubclass(coltype, Integer) and row['fprec'] != 0:
coltype = NUMERIC(
precision=row['fprec'],
scale=row['fscale'] * -1)
elif colspec in ('VARYING', 'CSTRING'):
coltype = coltype(row['flen'])
elif colspec == 'TEXT':
coltype = TEXT(row['flen'])
elif colspec == 'BLOB':
if row['stype'] == 1:
coltype = TEXT()
else:
coltype = BLOB()
else:
coltype = coltype()
# does it have a default value?
defvalue = None
if row['fdefault'] is not None:
# the value comes down as "DEFAULT 'value'": there may be
# more than one whitespace around the "DEFAULT" keyword
# and it may also be lower case
# (see also http://tracker.firebirdsql.org/browse/CORE-356)
defexpr = row['fdefault'].lstrip()
assert defexpr[:8].rstrip().upper() == \
'DEFAULT', "Unrecognized default value: %s" % \
defexpr
defvalue = defexpr[8:].strip()
if defvalue == 'NULL':
# Redundant
defvalue = None
col_d = {
'name': name,
'type': coltype,
'nullable': not bool(row['null_flag']),
'default': defvalue,
'autoincrement': defvalue is None
}
if orig_colname.lower() == orig_colname:
col_d['quote'] = True
# if the PK is a single field, try to see if its linked to
# a sequence thru a trigger
if len(pkey_cols) == 1 and name == pkey_cols[0]:
seq_d = self.get_column_sequence(connection, tablename, name)
if seq_d is not None:
col_d['sequence'] = seq_d
cols.append(col_d)
return cols
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
# Query to extract the details of each UK/FK of the given table
fkqry = """
SELECT rc.rdb$constraint_name AS cname,
cse.rdb$field_name AS fname,
ix2.rdb$relation_name AS targetrname,
se.rdb$field_name AS targetfname
FROM rdb$relation_constraints rc
JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
JOIN rdb$index_segments cse ON
cse.rdb$index_name=ix1.rdb$index_name
JOIN rdb$index_segments se
ON se.rdb$index_name=ix2.rdb$index_name
AND se.rdb$field_position=cse.rdb$field_position
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
ORDER BY se.rdb$index_name, se.rdb$field_position
"""
tablename = self.denormalize_name(table_name)
c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
fks = util.defaultdict(lambda: {
'name': None,
'constrained_columns': [],
'referred_schema': None,
'referred_table': None,
'referred_columns': []
})
for row in c:
cname = self.normalize_name(row['cname'])
fk = fks[cname]
if not fk['name']:
fk['name'] = cname
fk['referred_table'] = self.normalize_name(row['targetrname'])
fk['constrained_columns'].append(
self.normalize_name(row['fname']))
fk['referred_columns'].append(
self.normalize_name(row['targetfname']))
return list(fks.values())
@reflection.cache
def get_indexes(self, connection, table_name, schema=None, **kw):
qry = """
SELECT ix.rdb$index_name AS index_name,
ix.rdb$unique_flag AS unique_flag,
ic.rdb$field_name AS field_name
FROM rdb$indices ix
JOIN rdb$index_segments ic
ON ix.rdb$index_name=ic.rdb$index_name
LEFT OUTER JOIN rdb$relation_constraints
ON rdb$relation_constraints.rdb$index_name =
ic.rdb$index_name
WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
AND rdb$relation_constraints.rdb$constraint_type IS NULL
ORDER BY index_name, ic.rdb$field_position
"""
c = connection.execute(qry, [self.denormalize_name(table_name)])
indexes = util.defaultdict(dict)
for row in c:
indexrec = indexes[row['index_name']]
if 'name' not in indexrec:
indexrec['name'] = self.normalize_name(row['index_name'])
indexrec['column_names'] = []
indexrec['unique'] = bool(row['unique_flag'])
indexrec['column_names'].append(
self.normalize_name(row['field_name']))
return list(indexes.values())
| |
import re
import inspect
from AXUI.logger import LOGGER
from AXUI.exceptions import DriverException
try:
import appium.webdriver as webdriver
except ImportError as e:
LOGGER.error("To use AXUI appium driver, you must install selenium and appium python client first, check https://pypi.python.org/pypi/selenium, https://pypi.python.org/pypi/Appium-Python-Client")
raise e
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.keys import Keys
from . import Translater
class Keyboard(object):
def __init__(self, selenium_element):
self.selenium_element = selenium_element
def input(self, *values):
'''send keyboard input to UI element
Keyboard input string:
(1) For normal charactors like [0~9][a~z][A~Z], input directly
(2) For special charactors like "space", "tab", "newline", "F1~F12"
You use {key_name} to replace them, all support keys in "selenium/webdriver/common/keys "
'''
translated_values = []
for value in values:
#still support selenium Keys object
if isinstance(value, Keys):
translated_values.append(value)
elif isinstance(value, int):
translated_values.append(str(value))
elif isinstance(value, str):
#handle special keys
if re.match("^{.*}$", value) != None:
key = value.lstrip("{").rstrip("}")
try:
key_value = getattr(Keys, key)
except AttributeError as e:
LOGGER.warning("Input special key not support: %s, skip this input" , key)
else:
translated_values.append(key_value)
else:
translated_values.append(value)
self.selenium_element.send_keys(*translated_values)
class Mouse(object):
def __init__(self, selenium_element):
self.selenium_element = selenium_element
def left_click(self):
self.selenium_element.click()
class Touch(object):
def __init__(self, selenium_element):
self.selenium_element = selenium_element
############################
#porting appium touch methods
############################
def scroll(self, origin_el, destination_el):
self.selenium_element.scroll(origin_el.selenium_element, destination_el.selenium_element)
def drag_and_drop(self, origin_el, destination_el):
self.selenium_element.drag_and_drop(origin_el.selenium_element, destination_el.selenium_element)
def tap(self, positions, duration=None):
self.selenium_element.tap(positions, duration)
def swipe(self, start_x, start_y, end_x, end_y, duration=None):
self.selenium_element.swipe(start_x, start_y, end_x, end_y, duration)
def flick(self, start_x, start_y, end_x, end_y):
self.selenium_element.flick(start_x, start_y, end_x, end_y)
def pinch(self, element=None, percent=200, steps=50):
self.selenium_element.pinch(element.selenium_element, percent, steps)
def zoom(self, element=None, percent=200, steps=50):
self.selenium_element.zoom(element.selenium_element, percent, steps)
class NormalPattern(object):
'''
pattern interface for browser
'''
interfaces = [
"submit",
"clear",
"is_selected",
"is_enabled",
"is_displayed",
"value_of_css_property",
]
def __init__(self, selenium_element):
self.selenium_element = selenium_element
def __getattr__(self, name):
if name in self.interfaces:
return getattr(self.selenium_element, name)
else:
LOGGER.debug("This method not exist in NormalPattern: %s", name)
class BrowserPattern(object):
'''
pattern interface for browser
'''
interfaces = [
"get",
"close",
"maximize_window",
"execute_script",
"execute_async_script",
"set_script_timeout",
"back",
"forward",
"refresh",
"get_cookies",
"get_cookie",
"delete_cookie",
"delete_all_cookies",
"add_cookie",
"implicitly_wait",
"set_page_load_timeout",
"set_window_size",
"get_window_size",
"set_window_position",
"get_window_position",
"get_log",
]
def __init__(self, selenium_element):
self.selenium_element = selenium_element
def __getattr__(self, name):
if name in self.interfaces:
return getattr(self.selenium_element, name)
else:
LOGGER.debug("This method not exist in BrowserPattern: %s", name)
class MobilePattern(object):
'''
pattern interface for mobile root element
'''
#interfaces may change due to appium interface maybe change
interfaces = [
"scroll",
"drag_and_drop",
"tap",
"swipe",
"flick",
"pinch",
"zoom",
"reset",
"pull_file",
"pull_folder",
"push_file",
"background_app",
"is_app_installed",
"install_app",
"remove_app",
"launch_app",
"close_app",
"start_activity",
"lock",
"shake",
"open_notifications",
"set_network_connection",
"is_ime_active",
"activate_ime_engine",
"deactivate_ime_engine",
"get_settings",
"update_settings",
"toggle_location_services",
]
def __init__(self, selenium_element):
self.selenium_element = selenium_element
def __getattr__(self, name):
if name in self.interfaces:
return getattr(self.selenium_element, name)
else:
LOGGER.debug("This method not exist in MobileRootPattern: %s", name)
class UIElement(object):
'''This class defines interfaces for common UI element
Every driver (Windows, Appium, Selenium) should implement this interfaces,
provides independent interfaces for uplevel modules, so we transplant AXUI cross different platform
Attributes:
find_element: find the first descendant element which matches parsed_identifier
find_elements: find all elements which match parsed_identifier
verify: verify current element is valid
get_keyboard: class for keyboard related methods
get_mouse: class for mouse related methods
get_touch: class for touch related methods
get_property: get property value for current element
get_pattern: get pattern interface for current element
'''
def __init__(self, selenium_element):
self.selenium_element = selenium_element
def find_element(self, parsed_identifier):
'''
find the first child UI element via identifier, return one UIAElement if success, return None if not find
'''
translated_identifier = Translater.ID_Translater(parsed_identifier).get_translated()
try:
selenium_element = self.selenium_element.find_element(by=translated_identifier[0], value=translated_identifier[1])
except NoSuchElementException:
LOGGER.debug("Cannot find target element")
return None
else:
return UIElement(selenium_element)
def find_elements(self, parsed_identifier):
'''
find the child UI elements via identifier, return a list containing target UI elements
'''
translated_identifier = Translater.ID_Translater(parsed_identifier).get_translated()
elements = self.selenium_element.find_elements(by=translated_identifier[0], value=translated_identifier[1])
UIElements = []
for element in elements:
UIElements.append(UIElement(element))
return UIElements
def get_property(self, name):
'''
get property value
'''
try:
obj = getattr(self.selenium_element, name)
except AttributeError:
LOGGER.debug("Cannot find this attribute: %s" , name)
if hasattr(self.selenium_element, "get_attribute"):
LOGGER.debug("Try get_attribute method")
return self.selenium_element.get_attribute(name)
else:
if inspect.ismethod(obj):
LOGGER.debug("This is a method, not a property: %s" , name)
return None
else:
return obj
def get_pattern(self, name):
'''
pattern is a class support one kind of UI actions
'''
if name == "WebUIElementPattern":
return NormalPattern(self.selenium_element)
else:
return None
def get_keyboard(self):
'''
get keyboard class to use keyboard related methods
'''
return Keyboard(self.selenium_element)
def get_mouse(self):
'''
get mouse class to use mouse related methods
'''
return Mouse(self.selenium_element)
def get_touch(self):
'''
get touch class to use touch related methods
'''
return Touch(self.selenium_element)
def __getattr__(self, name):
if name == "Keyboard":
return self.get_keyboard()
elif name == "Mouse":
return self.get_mouse()
elif name == "Touch":
return self.get_touch()
else:
attr = self.get_property(name)
if attr is not None:
return attr
attr = self.get_pattern(name)
if attr is not None:
return attr
raise AttributeError("Attribute not exist: %s" % name)
class Root(UIElement):
'''
root is the entry point to interact with UI
like desktop of windows UIA, web browser of web driver API
This class defines interfaces for root element
Every driver (Windows, Appium, Selenium) should implement this interfaces,
provides independent interfaces for uplevel modules, so we transplant AXUI cross different platform
Attributes:
start: start root element
stop: stop root element
screenshot: take a screen shot for root element
find_element: find the first descendant element which matches parsed_identifier
find_elements: find all elements which match parsed_identifier
verify: verify current element is valid
get_keyboard: class for keyboard related methods
get_mouse: class for mouse related methods
get_touch: class for touch related methods
get_property: get property value for current element
get_pattern: get pattern interface for current element
'''
def __init__(self):
self.webdriver = None
@property
def selenium_element(self):
return self.webdriver
def start(self, **kwargs):
'''
get root ready
like get root element in windows UIA, get browser to target website
must have a "browser_name" argument in kwargs to indicate which browser to use
other kwargs are same as normal selenium webdrivers
'''
if "command_executor" not in kwargs:
#use default command executor
kwargs["command_executor"] = 'http://127.0.0.1:4444/wd/hub'
self.webdriver = webdriver.Remote(**kwargs)
def stop(self, **kwargs):
'''
stop root
like close browser for web driver API
'''
self.webdriver.quit()
def screenshot(self, absfile_path):
'''
take a screen shot for root
'''
self.webdriver.get_screenshot_as_file(absfile_path)
def verify(self):
'''
verify if session exist, not check for appium
'''
return self.webdriver
def get_pattern(self, name):
'''
pattern is a class support one kind of UI actions
'''
if name == "BrowserPattern":
return BrowserPattern(self.selenium_element)
elif name == "MobilePattern":
return MobilePattern(self.selenium_element)
else:
return None
def get_keyboard(self):
'''
get keyboard class to use keyboard related methods
'''
LOGGER.debug("Browser not support keyboard action")
return None
def get_mouse(self):
'''
get mouse class to use mouse related methods
'''
LOGGER.debug("Browser not support mouse action")
return None
def get_touch(self):
'''
get touch class to use touch related methods
'''
LOGGER.debug("Browser not support touch action")
return None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.