gt stringclasses 1 value | context stringlengths 2.49k 119k |
|---|---|
from __future__ import absolute_import
# Interface to the Salesforce BULK API
import os
from collections import namedtuple
from httplib2 import Http
import requests
import urllib2
import urlparse
import requests
import xml.etree.ElementTree as ET
from tempfile import TemporaryFile, NamedTemporaryFile
import StringIO
import re
import time
import csv
from . import bulk_states
UploadResult = namedtuple('UploadResult', 'id success created error')
class BulkApiError(Exception):
def __init__(self, message, status_code=None):
super(BulkApiError, self).__init__(message)
self.status_code = status_code
class BulkJobAborted(BulkApiError):
def __init__(self, job_id):
self.job_id = job_id
message = 'Job {0} aborted'.format(job_id)
super(BulkJobAborted, self).__init__(message)
class BulkBatchFailed(BulkApiError):
def __init__(self, job_id, batch_id, state_message):
self.job_id = job_id
self.batch_id = batch_id
self.state_message = state_message
message = 'Batch {0} of job {1} failed: {2}'.format(batch_id, job_id,
state_message)
super(BulkBatchFailed, self).__init__(message)
class SalesforceBulk(object):
def __init__(self, sessionId=None, host=None, username=None, password=None,
exception_class=BulkApiError, API_version="29.0"):
if not sessionId and not username:
raise RuntimeError(
"Must supply either sessionId/instance_url or username/password")
if not sessionId:
sessionId, endpoint = SalesforceBulk.login_to_salesforce(
username, password)
host = urlparse.urlparse(endpoint)
host = host.hostname.replace("-api", "")
if host[0:4] == 'http':
self.endpoint = host
else:
self.endpoint = "https://" + host
self.endpoint += "/services/async/%s" % API_version
self.sessionId = sessionId
self.jobNS = 'http://www.force.com/2009/06/asyncapi/dataload'
self.jobs = {} # dict of job_id => job_id
self.batches = {} # dict of batch_id => job_id
self.batch_statuses = {}
self.exception_class = exception_class
@staticmethod
def login_to_salesforce(username, password):
env_vars = (
'SALESFORCE_CLIENT_ID',
'SALESFORCE_CLIENT_SECRET',
'SALESFORCE_REDIRECT_URI',
)
missing_env_vars = [e for e in env_vars if e not in os.environ]
if missing_env_vars:
raise RuntimeError(
"You must set {0} to use username/pass login".format(
', '.join(missing_env_vars)))
try:
import salesforce_oauth_request
except ImportError:
raise ImportError(
"You must install salesforce-oauth-request to use username/password")
packet = salesforce_oauth_request.login(
username=username, password=password)
return packet['access_token'], packet['instance_url']
def headers(self, values={}):
default = {"X-SFDC-Session": self.sessionId,
"Content-Type": "application/xml; charset=UTF-8"}
for k, val in values.iteritems():
default[k] = val
return default
# Register a new Bulk API job - returns the job id
def create_query_job(self, object_name, **kwargs):
return self.create_job(object_name, "query", **kwargs)
def create_insert_job(self, object_name, **kwargs):
return self.create_job(object_name, "insert", **kwargs)
def create_upsert_job(self, object_name, external_id_name, **kwargs):
return self.create_job(object_name, "upsert", external_id_name=external_id_name, **kwargs)
def create_update_job(self, object_name, **kwargs):
return self.create_job(object_name, "update", **kwargs)
def create_delete_job(self, object_name, **kwargs):
return self.create_job(object_name, "delete", **kwargs)
def create_job(self, object_name=None, operation=None, contentType='CSV',
concurrency=None, external_id_name=None):
assert(object_name is not None)
assert(operation is not None)
doc = self.create_job_doc(object_name=object_name,
operation=operation,
contentType=contentType,
concurrency=concurrency,
external_id_name=external_id_name)
http = Http()
resp, content = http.request(self.endpoint + "/job",
"POST",
headers=self.headers(),
body=doc)
self.check_status(resp, content)
tree = ET.fromstring(content)
job_id = tree.findtext("{%s}id" % self.jobNS)
self.jobs[job_id] = job_id
return job_id
def check_status(self, resp, content):
if resp.status >= 400:
msg = "Bulk API HTTP Error result: {0}".format(content)
self.raise_error(msg, resp.status)
def close_job(self, job_id):
doc = self.create_close_job_doc()
http = Http()
url = self.endpoint + "/job/%s" % job_id
resp, content = http.request(url, "POST", headers=self.headers(),
body=doc)
self.check_status(resp, content)
def abort_job(self, job_id):
"""Abort a given bulk job"""
doc = self.create_abort_job_doc()
http = Http()
url = self.endpoint + "/job/%s" % job_id
resp, content = http.request(
url,
"POST",
headers=self.headers(),
body=doc
)
self.check_status(resp, content)
def create_job_doc(self, object_name=None, operation=None,
contentType='CSV', concurrency=None, external_id_name=None):
root = ET.Element("jobInfo")
root.set("xmlns", self.jobNS)
op = ET.SubElement(root, "operation")
op.text = operation
obj = ET.SubElement(root, "object")
obj.text = object_name
if external_id_name:
ext = ET.SubElement(root, 'externalIdFieldName')
ext.text = external_id_name
if concurrency:
con = ET.SubElement(root, "concurrencyMode")
con.text = concurrency
ct = ET.SubElement(root, "contentType")
ct.text = contentType
buf = StringIO.StringIO()
tree = ET.ElementTree(root)
tree.write(buf, encoding="UTF-8")
return buf.getvalue()
def create_close_job_doc(self):
root = ET.Element("jobInfo")
root.set("xmlns", self.jobNS)
state = ET.SubElement(root, "state")
state.text = "Closed"
buf = StringIO.StringIO()
tree = ET.ElementTree(root)
tree.write(buf, encoding="UTF-8")
return buf.getvalue()
def create_abort_job_doc(self):
"""Create XML doc for aborting a job"""
root = ET.Element("jobInfo")
root.set("xmlns", self.jobNS)
state = ET.SubElement(root, "state")
state.text = "Aborted"
buf = StringIO.StringIO()
tree = ET.ElementTree(root)
tree.write(buf, encoding="UTF-8")
return buf.getvalue()
# Add a BulkQuery to the job - returns the batch id
def query(self, job_id, soql):
if job_id is None:
job_id = self.create_job(
re.search(re.compile("from (\w+)", re.I), soql).group(1),
"query")
http = Http()
uri = self.endpoint + "/job/%s/batch" % job_id
headers = self.headers({"Content-Type": "text/csv"})
resp, content = http.request(uri, method="POST", body=soql,
headers=headers)
self.check_status(resp, content)
tree = ET.fromstring(content)
batch_id = tree.findtext("{%s}id" % self.jobNS)
self.batches[batch_id] = job_id
return batch_id
def split_csv(self, csv, batch_size):
csv_io = StringIO.StringIO(csv)
batches = []
for i, line in enumerate(csv_io):
if not i:
headers = line
batch = headers
continue
if not i % batch_size:
batches.append(batch)
batch = headers
batch += line
batches.append(batch)
return batches
# Add a BulkUpload to the job - returns the batch id
def bulk_csv_upload(self, job_id, csv, batch_size=2500):
# Split a large CSV into manageable batches
batches = self.split_csv(csv, batch_size)
batch_ids = []
uri = self.endpoint + "/job/%s/batch" % job_id
headers = self.headers({"Content-Type": "text/csv"})
for batch in batches:
resp = requests.post(uri, data=batch, headers=headers)
content = resp.content
if resp.status_code >= 400:
self.raise_error(content, resp.status)
tree = ET.fromstring(content)
batch_id = tree.findtext("{%s}id" % self.jobNS)
self.batches[batch_id] = job_id
batch_ids.append(batch_id)
return batch_ids
def raise_error(self, message, status_code=None):
if status_code:
message = "[{0}] {1}".format(status_code, message)
if self.exception_class == BulkApiError:
raise self.exception_class(message, status_code=status_code)
else:
raise self.exception_class(message)
def post_bulk_batch(self, job_id, csv_generator):
uri = self.endpoint + "/job/%s/batch" % job_id
headers = self.headers({"Content-Type": "text/csv"})
resp = requests.post(uri, data=csv_generator, headers=headers)
content = resp.content
if resp.status_code >= 400:
self.raise_error(content, resp.status_code)
tree = ET.fromstring(content)
batch_id = tree.findtext("{%s}id" % self.jobNS)
return batch_id
# Add a BulkDelete to the job - returns the batch id
def bulk_delete(self, job_id, object_type, where, batch_size=2500):
query_job_id = self.create_query_job(object_type)
soql = "Select Id from %s where %s Limit 10000" % (object_type, where)
query_batch_id = self.query(query_job_id, soql)
self.wait_for_batch(query_job_id, query_batch_id, timeout=120)
results = []
def save_results(tf, **kwargs):
results.append(tf.read())
flag = self.get_batch_results(
query_job_id, query_batch_id, callback=save_results)
if job_id is None:
job_id = self.create_job(object_type, "delete")
http = Http()
# Split a large CSV into manageable batches
batches = self.split_csv(csv, batch_size)
batch_ids = []
uri = self.endpoint + "/job/%s/batch" % job_id
headers = self.headers({"Content-Type": "text/csv"})
for batch in results:
resp = requests.post(uri, data=batch, headers=headers)
content = resp.content
if resp.status_code >= 400:
self.raise_error(content, resp.status)
tree = ET.fromstring(content)
batch_id = tree.findtext("{%s}id" % self.jobNS)
self.batches[batch_id] = job_id
batch_ids.append(batch_id)
return batch_ids
def lookup_job_id(self, batch_id):
try:
return self.batches[batch_id]
except KeyError:
raise Exception(
"Batch id '%s' is uknown, can't retrieve job_id" % batch_id)
def job_status(self, job_id=None):
job_id = job_id or self.lookup_job_id(batch_id)
uri = urlparse.urljoin(self.endpoint +"/",
'job/{0}'.format(job_id))
response = requests.get(uri, headers=self.headers())
if response.status_code != 200:
self.raise_error(response.content, response.status_code)
tree = ET.fromstring(response.content)
result = {}
for child in tree:
result[re.sub("{.*?}", "", child.tag)] = child.text
return result
def job_state(self, job_id):
status = self.job_status(job_id)
if 'state' in status:
return status['state']
else:
return None
def batch_status(self, job_id=None, batch_id=None, reload=False):
if not reload and batch_id in self.batch_statuses:
return self.batch_statuses[batch_id]
job_id = job_id or self.lookup_job_id(batch_id)
http = Http()
uri = self.endpoint + \
"/job/%s/batch/%s" % (job_id, batch_id)
resp, content = http.request(uri, headers=self.headers())
self.check_status(resp, content)
tree = ET.fromstring(content)
result = {}
for child in tree:
result[re.sub("{.*?}", "", child.tag)] = child.text
self.batch_statuses[batch_id] = result
return result
def batch_state(self, job_id, batch_id, reload=False):
status = self.batch_status(job_id, batch_id, reload=reload)
if 'state' in status:
return status['state']
else:
return None
def is_batch_done(self, job_id, batch_id):
batch_state = self.batch_state(job_id, batch_id, reload=True)
if batch_state in bulk_states.ERROR_STATES:
status = self.batch_status(job_id, batch_id)
raise BulkBatchFailed(job_id, batch_id, status['stateMessage'])
return batch_state == bulk_states.COMPLETED
# Wait for the given batch to complete, waiting at most timeout seconds
# (defaults to 10 minutes).
def wait_for_batch(self, job_id, batch_id, timeout=60 * 10,
sleep_interval=10):
waited = 0
while not self.is_batch_done(job_id, batch_id) and waited < timeout:
time.sleep(sleep_interval)
waited += sleep_interval
def get_batch_result_ids(self, batch_id, job_id=None):
job_id = job_id or self.lookup_job_id(batch_id)
if not self.is_batch_done(job_id, batch_id):
return False
uri = urlparse.urljoin(
self.endpoint + "/",
"job/{0}/batch/{1}/result".format(
job_id, batch_id),
)
resp = requests.get(uri, headers=self.headers())
if resp.status_code != 200:
return False
tree = ET.fromstring(resp.content)
find_func = getattr(tree, 'iterfind', tree.findall)
return [str(r.text) for r in
find_func("{{{0}}}result".format(self.jobNS))]
def get_all_results_for_batch(self, batch_id, job_id=None, parse_csv=False, logger=None):
"""
Gets result ids and generates each result set from the batch and returns it
as an generator fetching the next result set when needed
Args:
batch_id: id of batch
job_id: id of job, if not provided, it will be looked up
parse_csv: if true, results will be dictionaries instead of lines
"""
result_ids = self.get_batch_result_ids(batch_id, job_id=job_id)
if not result_ids:
if logger:
logger.error('Batch is not complete, may have timed out. '
'batch_id: %s, job_id: %s', batch_id, job_id)
raise RuntimeError('Batch is not complete')
for result_id in result_ids:
yield self.get_batch_results(
batch_id,
result_id,
job_id=job_id,
parse_csv=parse_csv)
def get_batch_results(self, batch_id, result_id, job_id=None,
parse_csv=False, logger=None):
job_id = job_id or self.lookup_job_id(batch_id)
logger = logger or (lambda message: None)
uri = urlparse.urljoin(
self.endpoint + "/",
"job/{0}/batch/{1}/result/{2}".format(
job_id, batch_id, result_id),
)
logger('Downloading bulk result file id=#{0}'.format(result_id))
resp = requests.get(uri, headers=self.headers(), stream=True)
if not parse_csv:
iterator = resp.iter_lines()
else:
iterator = csv.reader(resp.iter_lines(), delimiter=',',
quotechar='"')
BATCH_SIZE = 5000
for i, line in enumerate(iterator):
if i % BATCH_SIZE == 0:
logger('Loading bulk result #{0}'.format(i))
yield line
def get_batch_result_iter(self, job_id, batch_id, parse_csv=False,
logger=None):
"""
Return a line interator over the contents of a batch result document. If
csv=True then parses the first line as the csv header and the iterator
returns dicts.
"""
status = self.batch_status(job_id, batch_id)
if status['state'] != 'Completed':
return None
elif logger:
if 'numberRecordsProcessed' in status:
logger("Bulk batch %d processed %s records" %
(batch_id, status['numberRecordsProcessed']))
if 'numberRecordsFailed' in status:
failed = int(status['numberRecordsFailed'])
if failed > 0:
logger("Bulk batch %d had %d failed records" %
(batch_id, failed))
uri = self.endpoint + \
"/job/%s/batch/%s/result" % (job_id, batch_id)
r = requests.get(uri, headers=self.headers(), stream=True)
result_id = r.text.split("<result>")[1].split("</result>")[0]
uri = self.endpoint + \
"/job/%s/batch/%s/result/%s" % (job_id, batch_id, result_id)
r = requests.get(uri, headers=self.headers(), stream=True)
if parse_csv:
return csv.DictReader(r.iter_lines(chunk_size=2048), delimiter=",",
quotechar='"')
else:
return r.iter_lines(chunk_size=2048)
def get_upload_results(self, job_id, batch_id,
callback=(lambda *args, **kwargs: None),
batch_size=0, logger=None):
job_id = job_id or self.lookup_job_id(batch_id)
if not self.is_batch_done(job_id, batch_id):
return False
http = Http()
uri = self.endpoint + \
"/job/%s/batch/%s/result" % (job_id, batch_id)
resp, content = http.request(uri, method="GET", headers=self.headers())
tf = TemporaryFile()
tf.write(content)
total_remaining = self.count_file_lines(tf)
if logger:
logger("Total records: %d" % total_remaining)
tf.seek(0)
records = []
line_number = 0
col_names = []
reader = csv.reader(tf, delimiter=",", quotechar='"')
for row in reader:
line_number += 1
records.append(UploadResult(*row))
if len(records) == 1:
col_names = records[0]
if batch_size > 0 and len(records) >= (batch_size + 1):
callback(records, total_remaining, line_number)
total_remaining -= (len(records) - 1)
records = [col_names]
callback(records, total_remaining, line_number)
tf.close()
return True
def parse_csv(self, tf, callback, batch_size, total_remaining):
records = []
line_number = 0
col_names = []
reader = csv.reader(tf, delimiter=",", quotechar='"')
for row in reader:
line_number += 1
records.append(row)
if len(records) == 1:
col_names = records[0]
if batch_size > 0 and len(records) >= (batch_size + 1):
callback(records, total_remaining, line_number)
total_remaining -= (len(records) - 1)
records = [col_names]
return records, total_remaining
def count_file_lines(self, tf):
tf.seek(0)
buffer = bytearray(2048)
lines = 0
quotes = 0
while tf.readinto(buffer) > 0:
quoteChar = ord('"')
newline = ord('\n')
for c in buffer:
if c == quoteChar:
quotes += 1
elif c == newline:
if (quotes % 2) == 0:
lines += 1
quotes = 0
return lines
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/client/graphs.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import collections
import datetime
import ipaddress
import string
from king_phisher import its
from king_phisher import ua_parser
from king_phisher.client import gui_utilities
from king_phisher.constants import OSFamily
from gi.repository import Gtk
from smoke_zephyr.requirements import check_requirements
from smoke_zephyr.utilities import unique
try:
import matplotlib
matplotlib.rcParams['backend'] = 'GTK3Cairo'
from matplotlib import dates
from matplotlib import patches
from matplotlib import pyplot
from matplotlib.backends.backend_gtk3cairo import FigureCanvasGTK3Cairo as FigureCanvas
from matplotlib.backends.backend_gtk3cairo import FigureManagerGTK3Cairo as FigureManager
from matplotlib.backends.backend_gtk3 import NavigationToolbar2GTK3 as NavigationToolbar
except ImportError:
has_matplotlib = False
"""Whether the :py:mod:`matplotlib` module is available."""
else:
if not its.frozen and check_requirements(['matplotlib>=1.4.1']):
has_matplotlib = False
else:
has_matplotlib = True
try:
import mpl_toolkits.basemap
except ImportError:
has_matplotlib_basemap = False
"""Whether the :py:mod:`mpl_toolkits.basemap` module is available."""
else:
if not its.frozen and check_requirements(['basemap>=1.0.7']):
has_matplotlib_basemap = False
else:
has_matplotlib_basemap = True
EXPORTED_GRAPHS = {}
MPL_COLOR_LAND = 'gray'
MPL_COLOR_NULL = 'darkcyan'
MPL_COLOR_WATER = 'paleturquoise'
MPL_OS_COLORS = collections.defaultdict(lambda: MPL_COLOR_NULL)
"""Matplotlib colors for the different operating systems defined in the :py:class:`~king_phisher.constants.OSFamily` class."""
MPL_OS_COLORS.update({
OSFamily.ANDROID: 'olive',
OSFamily.BLACKBERRY: 'gray',
OSFamily.IOS: 'violet',
OSFamily.LINUX: 'palegreen',
OSFamily.OSX: 'darkviolet',
OSFamily.WINDOWS: 'gold',
OSFamily.WINDOWS_PHONE: 'darkgoldenrod'
})
__all__ = ['export_graph_provider', 'get_graph', 'get_graphs', 'CampaignGraph']
def export_graph_provider(cls):
"""
Decorator to mark classes as valid graph providers. This decorator also sets
the :py:attr:`~.CampaignGraph.name` attribute.
:param class cls: The class to mark as a graph provider.
:return: The *cls* parameter is returned.
"""
if not issubclass(cls, CampaignGraph):
raise RuntimeError("{0} is not a subclass of CampaignGraph".format(cls.__name__))
if not cls.is_available:
return None
graph_name = cls.__name__[13:]
cls.name = graph_name
EXPORTED_GRAPHS[graph_name] = cls
return cls
def get_graph(graph_name):
"""
Return the graph providing class for *graph_name*. The class providing the
specified graph must have been previously exported using
:py:func:`.export_graph_provider`.
:param str graph_name: The name of the graph provider.
:return: The graph provider class.
:rtype: :py:class:`.CampaignGraph`
"""
return EXPORTED_GRAPHS.get(graph_name)
def get_graphs():
"""
Get a list of all registered graph providers.
:return: All registered graph providers.
:rtype: list
"""
return sorted(EXPORTED_GRAPHS.keys())
class CampaignGraph(object):
"""
A basic graph provider for using :py:mod:`matplotlib` to create graph
representations of campaign data. This class is meant to be subclassed
by real providers.
"""
name = 'Unknown'
"""The name of the graph provider."""
name_human = 'Unknown'
"""The human readable name of the graph provider used for UI identification."""
graph_title = 'Unknown'
"""The title that will be given to the graph."""
table_subscriptions = []
"""A list of tables from which information is needed to produce the graph."""
is_available = True
def __init__(self, application, size_request=None):
"""
:param tuple size_request: The size to set for the canvas.
"""
self.application = application
self.config = application.config
"""A reference to the King Phisher client configuration."""
self.figure, _ = pyplot.subplots()
self.axes = self.figure.get_axes()
self.canvas = FigureCanvas(self.figure)
self.manager = None
if size_request:
self.canvas.set_size_request(*size_request)
self.canvas.mpl_connect('button_press_event', self.mpl_signal_canvas_button_pressed)
self.canvas.show()
self.navigation_toolbar = NavigationToolbar(self.canvas, self.application.get_active_window())
self.popup_menu = Gtk.Menu.new()
menu_item = Gtk.MenuItem.new_with_label('Export')
menu_item.connect('activate', self.signal_activate_popup_menu_export)
self.popup_menu.append(menu_item)
menu_item = Gtk.MenuItem.new_with_label('Refresh')
menu_item.connect('activate', lambda action: self.refresh())
self.popup_menu.append(menu_item)
menu_item = Gtk.CheckMenuItem.new_with_label('Show Toolbar')
menu_item.connect('toggled', self.signal_toggled_popup_menu_show_toolbar)
self._menu_item_show_toolbar = menu_item
self.popup_menu.append(menu_item)
self.popup_menu.show_all()
self.navigation_toolbar.hide()
@property
def rpc(self):
return self.application.rpc
def _load_graph(self, info_cache):
raise NotImplementedError()
def _graph_bar_set_yparams(self, top_lim):
min_value = top_lim + (top_lim * 0.075)
if min_value <= 25:
scale = 5
else:
scale = scale = 10 ** (len(str(int(min_value))) - 1)
inc_scale = scale
while scale <= min_value:
scale += inc_scale
top_lim = scale
ax = self.axes[0]
yticks = set((round(top_lim * 0.5), top_lim))
ax.set_yticks(tuple(yticks))
ax.set_ylim(top=top_lim)
return
def _graph_null_pie(self, title):
ax = self.axes[0]
ax.pie((100,), labels=(title,), colors=(MPL_COLOR_NULL,), autopct='%1.0f%%', shadow=True, startangle=90)
ax.axis('equal')
return
def add_legend_patch(self, legend_rows, fontsize=None):
handles = []
if not fontsize:
scale = self.markersize_scale
if scale < 5:
fontsize = 'xx-small'
elif scale < 7:
fontsize = 'x-small'
elif scale < 9:
fontsize = 'small'
else:
fontsize = 'medium'
for row in legend_rows:
handles.append(patches.Patch(color=row[0], label=row[1]))
self.axes[0].legend(handles=handles, fontsize=fontsize, loc='lower right')
def graph_bar(self, bars, color=None, xticklabels=None, ylabel=None):
"""
Create a standard bar graph with better defaults for the standard use
cases.
:param list bars: The values of the bars to graph.
:param color: The color of the bars on the graph.
:type color: list, str
:param list xticklabels: The labels to use on the x-axis.
:param str ylabel: The label to give to the y-axis.
:return: The bars created using :py:mod:`matplotlib`
:rtype: `matplotlib.container.BarContainer`
"""
color = color or MPL_COLOR_NULL
width = 0.25
ax = self.axes[0]
self._graph_bar_set_yparams(max(bars) if bars else 0)
bars = ax.bar(range(len(bars)), bars, width, color=color)
ax.set_xticks([float(x) + (width / 2) for x in range(len(bars))])
if xticklabels:
ax.set_xticklabels(xticklabels, rotation=30)
for col in bars:
height = col.get_height()
ax.text(col.get_x() + col.get_width() / 2.0, height, "{0:,}".format(height), ha='center', va='bottom')
if ylabel:
ax.set_ylabel(ylabel)
self.figure.subplots_adjust(bottom=0.25)
return bars
def make_window(self):
"""
Create a window from the figure manager.
:return: The graph in a new, dedicated window.
:rtype: :py:class:`Gtk.Window`
"""
if self.manager == None:
self.manager = FigureManager(self.canvas, 0)
self.navigation_toolbar.destroy()
self.navigation_toolbar = self.manager.toolbar
self._menu_item_show_toolbar.set_active(True)
window = self.manager.window
window.set_transient_for(self.application.get_active_window())
window.set_title(self.graph_title)
return window
@property
def markersize_scale(self):
bbox = self.axes[0].get_window_extent().transformed(self.figure.dpi_scale_trans.inverted())
return max(bbox.width, bbox.width) * self.figure.dpi * 0.01
def mpl_signal_canvas_button_pressed(self, event):
if event.button != 3:
return
self.popup_menu.popup(None, None, None, None, event.button, Gtk.get_current_event_time())
return True
def signal_activate_popup_menu_export(self, action):
dialog = gui_utilities.FileChooser('Export Graph', self.application.get_active_window())
file_name = self.config['campaign_name'] + '.png'
response = dialog.run_quick_save(file_name)
dialog.destroy()
if not response:
return
destination_file = response['target_path']
self.figure.savefig(destination_file, format='png')
def signal_toggled_popup_menu_show_toolbar(self, widget):
if widget.get_property('active'):
self.navigation_toolbar.show()
else:
self.navigation_toolbar.hide()
def load_graph(self):
"""Load the graph information via :py:meth:`.refresh`."""
self.refresh()
def refresh(self, info_cache=None, stop_event=None):
"""
Refresh the graph data by retrieving the information from the
remote server.
:param dict info_cache: An optional cache of data tables.
:param stop_event: An optional object indicating that the operation should stop.
:type stop_event: :py:class:`threading.Event`
:return: A dictionary of cached tables from the server.
:rtype: dict
"""
info_cache = (info_cache or {})
if not self.rpc:
return info_cache
for table in self.table_subscriptions:
if stop_event and stop_event.is_set():
return info_cache
if not table in info_cache:
info_cache[table] = tuple(self.rpc.remote_table('campaign/' + table, self.config['campaign_id']))
for ax in self.axes:
ax.clear()
self._load_graph(info_cache)
self.axes[0].set_title(self.graph_title, y=1.03)
self.canvas.draw()
return info_cache
@export_graph_provider
class CampaignGraphOverview(CampaignGraph):
"""Display a graph which represents an overview of the campaign."""
graph_title = 'Campaign Overview'
name_human = 'Bar - Campaign Overview'
table_subscriptions = ('credentials', 'visits')
def _load_graph(self, info_cache):
rpc = self.rpc
visits = info_cache['visits']
creds = info_cache['credentials']
bars = []
bars.append(rpc('campaign/messages/count', self.config['campaign_id']))
bars.append(len(visits))
bars.append(len(unique(visits, key=lambda visit: visit.message_id)))
if len(creds):
bars.append(len(creds))
bars.append(len(unique(creds, key=lambda cred: cred.message_id)))
xticklabels = ('Messages', 'Visits', 'Unique\nVisits', 'Credentials', 'Unique\nCredentials')[:len(bars)]
bars = self.graph_bar(bars, xticklabels=xticklabels, ylabel='Grand Total')
return
@export_graph_provider
class CampaignGraphVisitorInfo(CampaignGraph):
"""Display a graph which shows the different operating systems seen from visitors."""
graph_title = 'Campaign Visitor OS Information'
name_human = 'Bar - Visitor OS Information'
table_subscriptions = ('visits',)
def _load_graph(self, info_cache):
visits = info_cache['visits']
operating_systems = collections.Counter()
for visit in visits:
ua = ua_parser.parse_user_agent(visit.visitor_details)
operating_systems.update([ua.os_name or 'Unknown OS' if ua else 'Unknown OS'])
os_names = list(operating_systems.keys())
os_names.sort(key=lambda name: operating_systems[name])
os_names.reverse()
bars = []
for os_name in os_names:
bars.append(operating_systems[os_name])
self.graph_bar(bars, color=[MPL_OS_COLORS[osn] for osn in os_names], xticklabels=os_names, ylabel='Total Visits')
return
@export_graph_provider
class CampaignGraphVisitorInfoPie(CampaignGraph):
"""Display a graph which compares the different operating systems seen from visitors."""
graph_title = 'Campaign Visitor OS Information'
name_human = 'Pie - Visitor OS Information'
table_subscriptions = ('visits',)
def _load_graph(self, info_cache):
visits = info_cache['visits']
if not len(visits):
self._graph_null_pie('No Visitor Information')
return
operating_systems = collections.Counter()
for visit in visits:
ua = ua_parser.parse_user_agent(visit.visitor_details)
operating_systems.update([ua.os_name or 'Unknown OS' if ua else 'Unknown OS'])
(os_names, count) = zip(*operating_systems.items())
colors = [MPL_OS_COLORS[osn] for osn in os_names]
ax = self.axes[0]
ax.pie(count, labels=os_names, labeldistance=1.05, colors=colors, autopct='%1.1f%%', shadow=True, startangle=45)
ax.axis('equal')
return
@export_graph_provider
class CampaignGraphVisitsTimeline(CampaignGraph):
"""Display a graph which represents the visits of a campaign over time."""
graph_title = 'Campaign Visits Timeline'
name_human = 'Line - Visits Timeline'
table_subscriptions = ('visits',)
def _load_graph(self, info_cache):
visits = info_cache['visits']
first_visits = [visit.first_visit for visit in visits]
ax = self.axes[0]
ax.set_ylabel('Number of Visits')
if not len(first_visits):
ax.set_yticks((0,))
ax.set_xticks((0,))
return
ax.xaxis.set_major_locator(dates.AutoDateLocator())
ax.xaxis.set_major_formatter(dates.DateFormatter('%Y-%m-%d'))
first_visits.sort()
first_visit_span = first_visits[-1] - first_visits[0]
ax.plot_date(first_visits, range(1, len(first_visits) + 1), '-')
self.figure.autofmt_xdate()
if first_visit_span < datetime.timedelta(7):
ax.xaxis.set_minor_locator(dates.DayLocator())
if first_visit_span < datetime.timedelta(3) and len(first_visits) > 1:
ax.xaxis.set_minor_locator(dates.HourLocator())
ax.grid(True)
return
@export_graph_provider
class CampaignGraphMessageResults(CampaignGraph):
"""Display the percentage of messages which resulted in a visit."""
graph_title = 'Campaign Message Results'
name_human = 'Pie - Message Results'
table_subscriptions = ('credentials', 'visits')
def _load_graph(self, info_cache):
rpc = self.rpc
messages_count = rpc('campaign/messages/count', self.config['campaign_id'])
if not messages_count:
self._graph_null_pie('No Messages Sent')
return
visits_count = len(unique(info_cache['visits'], key=lambda visit: visit.message_id))
credentials_count = len(unique(info_cache['credentials'], key=lambda cred: cred.message_id))
assert credentials_count <= visits_count <= messages_count
labels = ['Without Visit', 'With Visit', 'With Credentials']
sizes = []
sizes.append((float(messages_count - visits_count) / float(messages_count)) * 100)
sizes.append((float(visits_count - credentials_count) / float(messages_count)) * 100)
sizes.append((float(credentials_count) / float(messages_count)) * 100)
colors = ['yellowgreen', 'gold', 'indianred']
explode = [0.1, 0, 0]
if not credentials_count:
labels.pop()
sizes.pop()
colors.pop()
explode.pop()
if not visits_count:
labels.pop()
sizes.pop()
colors.pop()
explode.pop()
ax = self.axes[0]
ax.pie(sizes, explode=explode, labels=labels, labeldistance=1.05, colors=colors, autopct='%1.1f%%', shadow=True, startangle=45)
ax.axis('equal')
return
class CampaignGraphVisitsMap(CampaignGraph):
"""A base class to display a map which shows the locations of visit origins."""
graph_title = 'Campaign Visit Locations'
table_subscriptions = ('credentials', 'visits')
is_available = has_matplotlib_basemap
mpl_color_with_creds = 'indianred'
mpl_color_without_creds = 'gold'
draw_states = False
def _load_graph(self, info_cache):
visits = unique(info_cache['visits'], key=lambda visit: visit.message_id)
cred_ips = set(cred.message_id for cred in info_cache['credentials'])
cred_ips = set([visit.visitor_ip for visit in visits if visit.message_id in cred_ips])
ax = self.axes[0]
bm = mpl_toolkits.basemap.Basemap(resolution='c', ax=ax, **self.basemap_args)
if self.draw_states:
bm.drawstates()
bm.drawcoastlines()
bm.drawcountries()
bm.fillcontinents(color=MPL_COLOR_LAND, lake_color=MPL_COLOR_WATER)
bm.drawparallels((-60, -30, 0, 30, 60), labels=(1, 1, 0, 0))
bm.drawmeridians((0, 90, 180, 270), labels=(0, 0, 0, 1))
bm.drawmapboundary(fill_color=MPL_COLOR_WATER)
if not visits:
return
ctr = collections.Counter()
ctr.update([visit.visitor_ip for visit in visits])
base_markersize = self.markersize_scale
base_markersize = max(base_markersize, 3.05)
base_markersize = min(base_markersize, 9)
self._plot_visitor_map_points(bm, ctr, base_markersize, cred_ips)
self.add_legend_patch(((self.mpl_color_with_creds, 'With Credentials'), (self.mpl_color_without_creds, 'Without Credentials')))
return
def _plot_visitor_map_points(self, bm, ctr, base_markersize, cred_ips):
o_high = float(max(ctr.values()))
o_low = float(min(ctr.values()))
for visitor_ip, occurrences in ctr.items():
visitor_ip = ipaddress.ip_address(visitor_ip)
if visitor_ip.is_loopback or visitor_ip.is_private:
continue
geo_location = self.rpc.geoip_lookup(visitor_ip)
if not geo_location:
continue
if not (geo_location.coordinates.longitude and geo_location.coordinates.latitude):
continue
pts = bm(geo_location.coordinates.longitude, geo_location.coordinates.latitude)
if o_high == o_low:
markersize = 2.0
else:
markersize = 1.0 + (float(occurrences) - o_low) / (o_high - o_low)
markersize = markersize * base_markersize
bm.plot(pts[0], pts[1], 'o', markerfacecolor=(self.mpl_color_with_creds if visitor_ip in cred_ips else self.mpl_color_without_creds), markersize=markersize)
return
@export_graph_provider
class CampaignGraphVisitsMapUSA(CampaignGraphVisitsMap):
"""Display a map of the USA which shows the locations of visit origins."""
name_human = 'Map - Visit Locations (USA)'
draw_states = True
basemap_args = dict(projection='lcc', lat_1=30, lon_0=-90, llcrnrlon=-122.5, llcrnrlat=12.5, urcrnrlon=-45, urcrnrlat=50)
@export_graph_provider
class CampaignGraphVisitsMapWorld(CampaignGraphVisitsMap):
"""Display a map of the world which shows the locations of visit origins."""
name_human = 'Map - Visit Locations (World)'
basemap_args = dict(projection='kav7', lon_0=0)
@export_graph_provider
class CampaignGraphPasswordComplexityPie(CampaignGraph):
"""Display a graph which displays the number of passwords which meet standard complexity requirements."""
graph_title = 'Campaign Password Complexity'
name_human = 'Pie - Password Complexity'
table_subscriptions = ('credentials',)
def _load_graph(self, info_cache):
passwords = set(cred.password for cred in info_cache['credentials'])
if not len(passwords):
self._graph_null_pie('No Credential Information')
return
ctr = collections.Counter()
ctr.update(self._check_complexity(password) for password in passwords)
ax = self.axes[0]
ax.pie((ctr[True], ctr[False]), explode=(0.1, 0), labels=('Complex', 'Not Complex'), labeldistance=1.05, colors=('yellowgreen', 'indianred'), autopct='%1.1f%%', shadow=True, startangle=45)
ax.axis('equal')
return
def _check_complexity(self, password):
if len(password) < 8:
return False
met = 0
for char_set in (string.ascii_uppercase, string.ascii_lowercase, string.digits, string.punctuation):
for char in password:
if char in char_set:
met += 1
break
return met >= 3
| |
from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
is_, in_, not_in_
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines
from sqlalchemy import (
exc, sql, func, select, String, Integer, MetaData, and_, ForeignKey,
union, intersect, except_, union_all, VARCHAR, INT, text,
bindparam, literal, not_, literal_column, desc, asc,
TypeDecorator, or_, cast)
from sqlalchemy.engine import default
from sqlalchemy.testing.schema import Table, Column
# ongoing - these are old tests. those which are of general use
# to test a dialect are being slowly migrated to
# sqlalhcemy.testing.suite
users = users2 = addresses = metadata = None
class QueryTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global users, users2, addresses, metadata
metadata = MetaData(testing.db)
users = Table(
'query_users', metadata,
Column(
'user_id', INT, primary_key=True,
test_needs_autoincrement=True),
Column('user_name', VARCHAR(20)),
test_needs_acid=True
)
addresses = Table(
'query_addresses', metadata,
Column(
'address_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('user_id', Integer, ForeignKey('query_users.user_id')),
Column('address', String(30)),
test_needs_acid=True
)
users2 = Table(
'u2', metadata,
Column('user_id', INT, primary_key=True),
Column('user_name', VARCHAR(20)),
test_needs_acid=True
)
metadata.create_all()
@engines.close_first
def teardown(self):
addresses.delete().execute()
users.delete().execute()
users2.delete().execute()
@classmethod
def teardown_class(cls):
metadata.drop_all()
@testing.fails_on(
'firebird', "kinterbasdb doesn't send full type information")
def test_order_by_label(self):
"""test that a label within an ORDER BY works on each backend.
This test should be modified to support [ticket:1068] when that ticket
is implemented. For now, you need to put the actual string in the
ORDER BY.
"""
users.insert().execute(
{'user_id': 7, 'user_name': 'jack'},
{'user_id': 8, 'user_name': 'ed'},
{'user_id': 9, 'user_name': 'fred'},
)
concat = ("test: " + users.c.user_name).label('thedata')
eq_(
select([concat]).order_by("thedata").execute().fetchall(),
[("test: ed",), ("test: fred",), ("test: jack",)]
)
eq_(
select([concat]).order_by("thedata").execute().fetchall(),
[("test: ed",), ("test: fred",), ("test: jack",)]
)
concat = ("test: " + users.c.user_name).label('thedata')
eq_(
select([concat]).order_by(desc('thedata')).execute().fetchall(),
[("test: jack",), ("test: fred",), ("test: ed",)]
)
@testing.requires.order_by_label_with_expression
def test_order_by_label_compound(self):
users.insert().execute(
{'user_id': 7, 'user_name': 'jack'},
{'user_id': 8, 'user_name': 'ed'},
{'user_id': 9, 'user_name': 'fred'},
)
concat = ("test: " + users.c.user_name).label('thedata')
eq_(
select([concat]).order_by(literal_column('thedata') + "x").
execute().fetchall(),
[("test: ed",), ("test: fred",), ("test: jack",)]
)
@testing.requires.boolean_col_expressions
def test_or_and_as_columns(self):
true, false = literal(True), literal(False)
eq_(testing.db.execute(select([and_(true, false)])).scalar(), False)
eq_(testing.db.execute(select([and_(true, true)])).scalar(), True)
eq_(testing.db.execute(select([or_(true, false)])).scalar(), True)
eq_(testing.db.execute(select([or_(false, false)])).scalar(), False)
eq_(
testing.db.execute(select([not_(or_(false, false))])).scalar(),
True)
row = testing.db.execute(
select(
[or_(false, false).label("x"),
and_(true, false).label("y")])).first()
assert row.x == False # noqa
assert row.y == False # noqa
row = testing.db.execute(
select(
[or_(true, false).label("x"),
and_(true, false).label("y")])).first()
assert row.x == True # noqa
assert row.y == False # noqa
def test_like_ops(self):
users.insert().execute(
{'user_id': 1, 'user_name': 'apples'},
{'user_id': 2, 'user_name': 'oranges'},
{'user_id': 3, 'user_name': 'bananas'},
{'user_id': 4, 'user_name': 'legumes'},
{'user_id': 5, 'user_name': 'hi % there'},
)
for expr, result in (
(select([users.c.user_id]).
where(users.c.user_name.startswith('apple')), [(1,)]),
(select([users.c.user_id]).
where(users.c.user_name.contains('i % t')), [(5,)]),
(select([users.c.user_id]).
where(users.c.user_name.endswith('anas')), [(3,)]),
(select([users.c.user_id]).
where(users.c.user_name.contains('i % t', escape='&')),
[(5,)]),
):
eq_(expr.execute().fetchall(), result)
@testing.requires.mod_operator_as_percent_sign
@testing.emits_warning('.*now automatically escapes.*')
def test_percents_in_text(self):
for expr, result in (
(text("select 6 % 10"), 6),
(text("select 17 % 10"), 7),
(text("select '%'"), '%'),
(text("select '%%'"), '%%'),
(text("select '%%%'"), '%%%'),
(text("select 'hello % world'"), "hello % world")
):
eq_(testing.db.scalar(expr), result)
def test_ilike(self):
users.insert().execute(
{'user_id': 1, 'user_name': 'one'},
{'user_id': 2, 'user_name': 'TwO'},
{'user_id': 3, 'user_name': 'ONE'},
{'user_id': 4, 'user_name': 'OnE'},
)
eq_(
select([users.c.user_id]).where(users.c.user_name.ilike('one')).
execute().fetchall(), [(1, ), (3, ), (4, )])
eq_(
select([users.c.user_id]).where(users.c.user_name.ilike('TWO')).
execute().fetchall(), [(2, )])
if testing.against('postgresql'):
eq_(
select([users.c.user_id]).
where(users.c.user_name.like('one')).execute().fetchall(),
[(1, )])
eq_(
select([users.c.user_id]).
where(users.c.user_name.like('TWO')).execute().fetchall(), [])
def test_compiled_execute(self):
users.insert().execute(user_id=7, user_name='jack')
s = select([users], users.c.user_id == bindparam('id')).compile()
c = testing.db.connect()
assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7
def test_compiled_insert_execute(self):
users.insert().compile().execute(user_id=7, user_name='jack')
s = select([users], users.c.user_id == bindparam('id')).compile()
c = testing.db.connect()
assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7
def test_repeated_bindparams(self):
"""Tests that a BindParam can be used more than once.
This should be run for DB-APIs with both positional and named
paramstyles.
"""
users.insert().execute(user_id=7, user_name='jack')
users.insert().execute(user_id=8, user_name='fred')
u = bindparam('userid')
s = users.select(and_(users.c.user_name == u, users.c.user_name == u))
r = s.execute(userid='fred').fetchall()
assert len(r) == 1
def test_bindparam_detection(self):
dialect = default.DefaultDialect(paramstyle='qmark')
prep = lambda q: str(sql.text(q).compile(dialect=dialect))
def a_eq(got, wanted):
if got != wanted:
print("Wanted %s" % wanted)
print("Received %s" % got)
self.assert_(got == wanted, got)
a_eq(prep('select foo'), 'select foo')
a_eq(prep("time='12:30:00'"), "time='12:30:00'")
a_eq(prep("time='12:30:00'"), "time='12:30:00'")
a_eq(prep(":this:that"), ":this:that")
a_eq(prep(":this :that"), "? ?")
a_eq(prep("(:this),(:that :other)"), "(?),(? ?)")
a_eq(prep("(:this),(:that:other)"), "(?),(:that:other)")
a_eq(prep("(:this),(:that,:other)"), "(?),(?,?)")
a_eq(prep("(:that_:other)"), "(:that_:other)")
a_eq(prep("(:that_ :other)"), "(? ?)")
a_eq(prep("(:that_other)"), "(?)")
a_eq(prep("(:that$other)"), "(?)")
a_eq(prep("(:that$:other)"), "(:that$:other)")
a_eq(prep(".:that$ :other."), ".? ?.")
a_eq(prep(r'select \foo'), r'select \foo')
a_eq(prep(r"time='12\:30:00'"), r"time='12\:30:00'")
a_eq(prep(":this \:that"), "? :that")
a_eq(prep(r"(\:that$other)"), "(:that$other)")
a_eq(prep(r".\:that$ :other."), ".:that$ ?.")
@testing.requires.standalone_binds
def test_select_from_bindparam(self):
"""Test result row processing when selecting from a plain bind
param."""
class MyInteger(TypeDecorator):
impl = Integer
def process_bind_param(self, value, dialect):
return int(value[4:])
def process_result_value(self, value, dialect):
return "INT_%d" % value
eq_(
testing.db.scalar(select([cast("INT_5", type_=MyInteger)])),
"INT_5"
)
eq_(
testing.db.scalar(
select([cast("INT_5", type_=MyInteger).label('foo')])),
"INT_5"
)
def test_order_by(self):
"""Exercises ORDER BY clause generation.
Tests simple, compound, aliased and DESC clauses.
"""
users.insert().execute(user_id=1, user_name='c')
users.insert().execute(user_id=2, user_name='b')
users.insert().execute(user_id=3, user_name='a')
def a_eq(executable, wanted):
got = list(executable.execute())
eq_(got, wanted)
for labels in False, True:
a_eq(users.select(order_by=[users.c.user_id],
use_labels=labels),
[(1, 'c'), (2, 'b'), (3, 'a')])
a_eq(users.select(order_by=[users.c.user_name, users.c.user_id],
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, 'c')])
a_eq(select([users.c.user_id.label('foo')],
use_labels=labels,
order_by=[users.c.user_id]),
[(1,), (2,), (3,)])
a_eq(select([users.c.user_id.label('foo'), users.c.user_name],
use_labels=labels,
order_by=[users.c.user_name, users.c.user_id]),
[(3, 'a'), (2, 'b'), (1, 'c')])
a_eq(users.select(distinct=True,
use_labels=labels,
order_by=[users.c.user_id]),
[(1, 'c'), (2, 'b'), (3, 'a')])
a_eq(select([users.c.user_id.label('foo')],
distinct=True,
use_labels=labels,
order_by=[users.c.user_id]),
[(1,), (2,), (3,)])
a_eq(select([users.c.user_id.label('a'),
users.c.user_id.label('b'),
users.c.user_name],
use_labels=labels,
order_by=[users.c.user_id]),
[(1, 1, 'c'), (2, 2, 'b'), (3, 3, 'a')])
a_eq(users.select(distinct=True,
use_labels=labels,
order_by=[desc(users.c.user_id)]),
[(3, 'a'), (2, 'b'), (1, 'c')])
a_eq(select([users.c.user_id.label('foo')],
distinct=True,
use_labels=labels,
order_by=[users.c.user_id.desc()]),
[(3,), (2,), (1,)])
@testing.requires.nullsordering
def test_order_by_nulls(self):
"""Exercises ORDER BY clause generation.
Tests simple, compound, aliased and DESC clauses.
"""
users.insert().execute(user_id=1)
users.insert().execute(user_id=2, user_name='b')
users.insert().execute(user_id=3, user_name='a')
def a_eq(executable, wanted):
got = list(executable.execute())
eq_(got, wanted)
for labels in False, True:
a_eq(users.select(order_by=[users.c.user_name.nullsfirst()],
use_labels=labels),
[(1, None), (3, 'a'), (2, 'b')])
a_eq(users.select(order_by=[users.c.user_name.nullslast()],
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, None)])
a_eq(users.select(order_by=[asc(users.c.user_name).nullsfirst()],
use_labels=labels),
[(1, None), (3, 'a'), (2, 'b')])
a_eq(users.select(order_by=[asc(users.c.user_name).nullslast()],
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, None)])
a_eq(users.select(order_by=[users.c.user_name.desc().nullsfirst()],
use_labels=labels),
[(1, None), (2, 'b'), (3, 'a')])
a_eq(
users.select(
order_by=[users.c.user_name.desc().nullslast()],
use_labels=labels),
[(2, 'b'), (3, 'a'), (1, None)])
a_eq(
users.select(
order_by=[desc(users.c.user_name).nullsfirst()],
use_labels=labels),
[(1, None), (2, 'b'), (3, 'a')])
a_eq(users.select(order_by=[desc(users.c.user_name).nullslast()],
use_labels=labels),
[(2, 'b'), (3, 'a'), (1, None)])
a_eq(
users.select(
order_by=[users.c.user_name.nullsfirst(), users.c.user_id],
use_labels=labels),
[(1, None), (3, 'a'), (2, 'b')])
a_eq(
users.select(
order_by=[users.c.user_name.nullslast(), users.c.user_id],
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, None)])
@testing.emits_warning('.*empty sequence.*')
def test_in_filtering(self):
"""test the behavior of the in_() function."""
users.insert().execute(user_id=7, user_name='jack')
users.insert().execute(user_id=8, user_name='fred')
users.insert().execute(user_id=9, user_name=None)
s = users.select(users.c.user_name.in_([]))
r = s.execute().fetchall()
# No username is in empty set
assert len(r) == 0
s = users.select(not_(users.c.user_name.in_([])))
r = s.execute().fetchall()
# All usernames with a value are outside an empty set
assert len(r) == 2
s = users.select(users.c.user_name.in_(['jack', 'fred']))
r = s.execute().fetchall()
assert len(r) == 2
s = users.select(not_(users.c.user_name.in_(['jack', 'fred'])))
r = s.execute().fetchall()
# Null values are not outside any set
assert len(r) == 0
@testing.emits_warning('.*empty sequence.*')
@testing.fails_on('firebird', "uses sql-92 rules")
@testing.fails_on('sybase', "uses sql-92 rules")
@testing.fails_if(
lambda: testing.against('mssql+pyodbc') and not
testing.db.dialect.freetds, "uses sql-92 rules")
def test_bind_in(self):
"""test calling IN against a bind parameter.
this isn't allowed on several platforms since we
generate ? = ?.
"""
users.insert().execute(user_id=7, user_name='jack')
users.insert().execute(user_id=8, user_name='fred')
users.insert().execute(user_id=9, user_name=None)
u = bindparam('search_key')
s = users.select(not_(u.in_([])))
r = s.execute(search_key='john').fetchall()
assert len(r) == 3
r = s.execute(search_key=None).fetchall()
assert len(r) == 0
@testing.emits_warning('.*empty sequence.*')
def test_literal_in(self):
"""similar to test_bind_in but use a bind with a value."""
users.insert().execute(user_id=7, user_name='jack')
users.insert().execute(user_id=8, user_name='fred')
users.insert().execute(user_id=9, user_name=None)
s = users.select(not_(literal("john").in_([])))
r = s.execute().fetchall()
assert len(r) == 3
@testing.emits_warning('.*empty sequence.*')
@testing.requires.boolean_col_expressions
def test_in_filtering_advanced(self):
"""test the behavior of the in_() function when
comparing against an empty collection, specifically
that a proper boolean value is generated.
"""
users.insert().execute(user_id=7, user_name='jack')
users.insert().execute(user_id=8, user_name='fred')
users.insert().execute(user_id=9, user_name=None)
s = users.select(users.c.user_name.in_([]) == True) # noqa
r = s.execute().fetchall()
assert len(r) == 0
s = users.select(users.c.user_name.in_([]) == False) # noqa
r = s.execute().fetchall()
assert len(r) == 2
s = users.select(users.c.user_name.in_([]) == None) # noqa
r = s.execute().fetchall()
assert len(r) == 1
class RequiredBindTest(fixtures.TablesTest):
run_create_tables = None
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
'foo', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50)),
Column('x', Integer)
)
def _assert_raises(self, stmt, params):
assert_raises_message(
exc.StatementError,
"A value is required for bind parameter 'x'",
testing.db.execute, stmt, **params)
assert_raises_message(
exc.StatementError,
"A value is required for bind parameter 'x'",
testing.db.execute, stmt, params)
def test_insert(self):
stmt = self.tables.foo.insert().values(
x=bindparam('x'), data=bindparam('data'))
self._assert_raises(stmt, {'data': 'data'})
def test_select_where(self):
stmt = select([self.tables.foo]). \
where(self.tables.foo.c.data == bindparam('data')). \
where(self.tables.foo.c.x == bindparam('x'))
self._assert_raises(stmt, {'data': 'data'})
@testing.requires.standalone_binds
def test_select_columns(self):
stmt = select([bindparam('data'), bindparam('x')])
self._assert_raises(
stmt, {'data': 'data'}
)
def test_text(self):
stmt = text("select * from foo where x=:x and data=:data1")
self._assert_raises(
stmt, {'data1': 'data'}
)
def test_required_flag(self):
is_(bindparam('foo').required, True)
is_(bindparam('foo', required=False).required, False)
is_(bindparam('foo', 'bar').required, False)
is_(bindparam('foo', 'bar', required=True).required, True)
c = lambda: None
is_(bindparam('foo', callable_=c, required=True).required, True)
is_(bindparam('foo', callable_=c).required, False)
is_(bindparam('foo', callable_=c, required=False).required, False)
class LimitTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global users, addresses, metadata
metadata = MetaData(testing.db)
users = Table(
'query_users', metadata,
Column('user_id', INT, primary_key=True),
Column('user_name', VARCHAR(20)),
)
addresses = Table(
'query_addresses', metadata,
Column('address_id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('query_users.user_id')),
Column('address', String(30)))
metadata.create_all()
users.insert().execute(user_id=1, user_name='john')
addresses.insert().execute(address_id=1, user_id=1, address='addr1')
users.insert().execute(user_id=2, user_name='jack')
addresses.insert().execute(address_id=2, user_id=2, address='addr1')
users.insert().execute(user_id=3, user_name='ed')
addresses.insert().execute(address_id=3, user_id=3, address='addr2')
users.insert().execute(user_id=4, user_name='wendy')
addresses.insert().execute(address_id=4, user_id=4, address='addr3')
users.insert().execute(user_id=5, user_name='laura')
addresses.insert().execute(address_id=5, user_id=5, address='addr4')
users.insert().execute(user_id=6, user_name='ralph')
addresses.insert().execute(address_id=6, user_id=6, address='addr5')
users.insert().execute(user_id=7, user_name='fido')
addresses.insert().execute(address_id=7, user_id=7, address='addr5')
@classmethod
def teardown_class(cls):
metadata.drop_all()
def test_select_limit(self):
r = users.select(limit=3, order_by=[users.c.user_id]).execute(). \
fetchall()
self.assert_(r == [(1, 'john'), (2, 'jack'), (3, 'ed')], repr(r))
@testing.requires.offset
def test_select_limit_offset(self):
"""Test the interaction between limit and offset"""
r = users.select(limit=3, offset=2, order_by=[users.c.user_id]). \
execute().fetchall()
self.assert_(r == [(3, 'ed'), (4, 'wendy'), (5, 'laura')])
r = users.select(offset=5, order_by=[users.c.user_id]).execute(). \
fetchall()
self.assert_(r == [(6, 'ralph'), (7, 'fido')])
def test_select_distinct_limit(self):
"""Test the interaction between limit and distinct"""
r = sorted(
[x[0] for x in select([addresses.c.address]).distinct().
limit(3).order_by(addresses.c.address).execute().fetchall()])
self.assert_(len(r) == 3, repr(r))
self.assert_(r[0] != r[1] and r[1] != r[2], repr(r))
@testing.requires.offset
@testing.fails_on('mssql', 'FIXME: unknown')
def test_select_distinct_offset(self):
"""Test the interaction between distinct and offset"""
r = sorted(
[x[0] for x in select([addresses.c.address]).distinct().
offset(1).order_by(addresses.c.address).
execute().fetchall()])
eq_(len(r), 4)
self.assert_(r[0] != r[1] and r[1] != r[2] and r[2] != [3], repr(r))
@testing.requires.offset
def test_select_distinct_limit_offset(self):
"""Test the interaction between limit and limit/offset"""
r = select([addresses.c.address]).order_by(addresses.c.address). \
distinct().offset(2).limit(3).execute().fetchall()
self.assert_(len(r) == 3, repr(r))
self.assert_(r[0] != r[1] and r[1] != r[2], repr(r))
class CompoundTest(fixtures.TestBase):
"""test compound statements like UNION, INTERSECT, particularly their
ability to nest on different databases."""
__backend__ = True
@classmethod
def setup_class(cls):
global metadata, t1, t2, t3
metadata = MetaData(testing.db)
t1 = Table(
't1', metadata,
Column(
'col1', Integer, test_needs_autoincrement=True,
primary_key=True),
Column('col2', String(30)),
Column('col3', String(40)),
Column('col4', String(30)))
t2 = Table(
't2', metadata,
Column(
'col1', Integer, test_needs_autoincrement=True,
primary_key=True),
Column('col2', String(30)),
Column('col3', String(40)),
Column('col4', String(30)))
t3 = Table(
't3', metadata,
Column(
'col1', Integer, test_needs_autoincrement=True,
primary_key=True),
Column('col2', String(30)),
Column('col3', String(40)),
Column('col4', String(30)))
metadata.create_all()
t1.insert().execute([
dict(col2="t1col2r1", col3="aaa", col4="aaa"),
dict(col2="t1col2r2", col3="bbb", col4="bbb"),
dict(col2="t1col2r3", col3="ccc", col4="ccc"),
])
t2.insert().execute([
dict(col2="t2col2r1", col3="aaa", col4="bbb"),
dict(col2="t2col2r2", col3="bbb", col4="ccc"),
dict(col2="t2col2r3", col3="ccc", col4="aaa"),
])
t3.insert().execute([
dict(col2="t3col2r1", col3="aaa", col4="ccc"),
dict(col2="t3col2r2", col3="bbb", col4="aaa"),
dict(col2="t3col2r3", col3="ccc", col4="bbb"),
])
@engines.close_first
def teardown(self):
pass
@classmethod
def teardown_class(cls):
metadata.drop_all()
def _fetchall_sorted(self, executed):
return sorted([tuple(row) for row in executed.fetchall()])
@testing.requires.subqueries
def test_union(self):
(s1, s2) = (
select([t1.c.col3.label('col3'), t1.c.col4.label('col4')],
t1.c.col2.in_(["t1col2r1", "t1col2r2"])),
select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
t2.c.col2.in_(["t2col2r2", "t2col2r3"]))
)
u = union(s1, s2)
wanted = [('aaa', 'aaa'), ('bbb', 'bbb'), ('bbb', 'ccc'),
('ccc', 'aaa')]
found1 = self._fetchall_sorted(u.execute())
eq_(found1, wanted)
found2 = self._fetchall_sorted(u.alias('bar').select().execute())
eq_(found2, wanted)
@testing.fails_on('firebird', "doesn't like ORDER BY with UNIONs")
def test_union_ordered(self):
(s1, s2) = (
select([t1.c.col3.label('col3'), t1.c.col4.label('col4')],
t1.c.col2.in_(["t1col2r1", "t1col2r2"])),
select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
t2.c.col2.in_(["t2col2r2", "t2col2r3"]))
)
u = union(s1, s2, order_by=['col3', 'col4'])
wanted = [('aaa', 'aaa'), ('bbb', 'bbb'), ('bbb', 'ccc'),
('ccc', 'aaa')]
eq_(u.execute().fetchall(), wanted)
@testing.fails_on('firebird', "doesn't like ORDER BY with UNIONs")
@testing.requires.subqueries
def test_union_ordered_alias(self):
(s1, s2) = (
select([t1.c.col3.label('col3'), t1.c.col4.label('col4')],
t1.c.col2.in_(["t1col2r1", "t1col2r2"])),
select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
t2.c.col2.in_(["t2col2r2", "t2col2r3"]))
)
u = union(s1, s2, order_by=['col3', 'col4'])
wanted = [('aaa', 'aaa'), ('bbb', 'bbb'), ('bbb', 'ccc'),
('ccc', 'aaa')]
eq_(u.alias('bar').select().execute().fetchall(), wanted)
@testing.crashes('oracle', 'FIXME: unknown, verify not fails_on')
@testing.fails_on(
'firebird',
"has trouble extracting anonymous column from union subquery")
@testing.fails_on('mysql', 'FIXME: unknown')
@testing.fails_on('sqlite', 'FIXME: unknown')
def test_union_all(self):
e = union_all(
select([t1.c.col3]),
union(
select([t1.c.col3]),
select([t1.c.col3]),
)
)
wanted = [('aaa',), ('aaa',), ('bbb',), ('bbb',), ('ccc',), ('ccc',)]
found1 = self._fetchall_sorted(e.execute())
eq_(found1, wanted)
found2 = self._fetchall_sorted(e.alias('foo').select().execute())
eq_(found2, wanted)
def test_union_all_lightweight(self):
"""like test_union_all, but breaks the sub-union into
a subquery with an explicit column reference on the outside,
more palatable to a wider variety of engines.
"""
u = union(
select([t1.c.col3]),
select([t1.c.col3]),
).alias()
e = union_all(
select([t1.c.col3]),
select([u.c.col3])
)
wanted = [('aaa',), ('aaa',), ('bbb',), ('bbb',), ('ccc',), ('ccc',)]
found1 = self._fetchall_sorted(e.execute())
eq_(found1, wanted)
found2 = self._fetchall_sorted(e.alias('foo').select().execute())
eq_(found2, wanted)
@testing.requires.intersect
def test_intersect(self):
i = intersect(
select([t2.c.col3, t2.c.col4]),
select([t2.c.col3, t2.c.col4], t2.c.col4 == t3.c.col3)
)
wanted = [('aaa', 'bbb'), ('bbb', 'ccc'), ('ccc', 'aaa')]
found1 = self._fetchall_sorted(i.execute())
eq_(found1, wanted)
found2 = self._fetchall_sorted(i.alias('bar').select().execute())
eq_(found2, wanted)
@testing.requires.except_
@testing.fails_on('sqlite', "Can't handle this style of nesting")
def test_except_style1(self):
e = except_(union(
select([t1.c.col3, t1.c.col4]),
select([t2.c.col3, t2.c.col4]),
select([t3.c.col3, t3.c.col4]),
), select([t2.c.col3, t2.c.col4]))
wanted = [('aaa', 'aaa'), ('aaa', 'ccc'), ('bbb', 'aaa'),
('bbb', 'bbb'), ('ccc', 'bbb'), ('ccc', 'ccc')]
found = self._fetchall_sorted(e.alias().select().execute())
eq_(found, wanted)
@testing.requires.except_
def test_except_style2(self):
# same as style1, but add alias().select() to the except_().
# sqlite can handle it now.
e = except_(union(
select([t1.c.col3, t1.c.col4]),
select([t2.c.col3, t2.c.col4]),
select([t3.c.col3, t3.c.col4]),
).alias().select(), select([t2.c.col3, t2.c.col4]))
wanted = [('aaa', 'aaa'), ('aaa', 'ccc'), ('bbb', 'aaa'),
('bbb', 'bbb'), ('ccc', 'bbb'), ('ccc', 'ccc')]
found1 = self._fetchall_sorted(e.execute())
eq_(found1, wanted)
found2 = self._fetchall_sorted(e.alias().select().execute())
eq_(found2, wanted)
@testing.fails_on('sqlite', "Can't handle this style of nesting")
@testing.requires.except_
def test_except_style3(self):
# aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
e = except_(
select([t1.c.col3]), # aaa, bbb, ccc
except_(
select([t2.c.col3]), # aaa, bbb, ccc
select([t3.c.col3], t3.c.col3 == 'ccc'), # ccc
)
)
eq_(e.execute().fetchall(), [('ccc',)])
eq_(e.alias('foo').select().execute().fetchall(), [('ccc',)])
@testing.requires.except_
def test_except_style4(self):
# aaa, bbb, ccc - (aaa, bbb, ccc - (ccc)) = ccc
e = except_(
select([t1.c.col3]), # aaa, bbb, ccc
except_(
select([t2.c.col3]), # aaa, bbb, ccc
select([t3.c.col3], t3.c.col3 == 'ccc'), # ccc
).alias().select()
)
eq_(e.execute().fetchall(), [('ccc',)])
eq_(
e.alias().select().execute().fetchall(),
[('ccc',)]
)
@testing.requires.intersect
@testing.fails_on('sqlite', "sqlite can't handle leading parenthesis")
def test_intersect_unions(self):
u = intersect(
union(
select([t1.c.col3, t1.c.col4]),
select([t3.c.col3, t3.c.col4]),
),
union(
select([t2.c.col3, t2.c.col4]),
select([t3.c.col3, t3.c.col4]),
).alias().select()
)
wanted = [('aaa', 'ccc'), ('bbb', 'aaa'), ('ccc', 'bbb')]
found = self._fetchall_sorted(u.execute())
eq_(found, wanted)
@testing.requires.intersect
def test_intersect_unions_2(self):
u = intersect(
union(
select([t1.c.col3, t1.c.col4]),
select([t3.c.col3, t3.c.col4]),
).alias().select(),
union(
select([t2.c.col3, t2.c.col4]),
select([t3.c.col3, t3.c.col4]),
).alias().select()
)
wanted = [('aaa', 'ccc'), ('bbb', 'aaa'), ('ccc', 'bbb')]
found = self._fetchall_sorted(u.execute())
eq_(found, wanted)
@testing.requires.intersect
def test_intersect_unions_3(self):
u = intersect(
select([t2.c.col3, t2.c.col4]),
union(
select([t1.c.col3, t1.c.col4]),
select([t2.c.col3, t2.c.col4]),
select([t3.c.col3, t3.c.col4]),
).alias().select()
)
wanted = [('aaa', 'bbb'), ('bbb', 'ccc'), ('ccc', 'aaa')]
found = self._fetchall_sorted(u.execute())
eq_(found, wanted)
@testing.requires.intersect
def test_composite_alias(self):
ua = intersect(
select([t2.c.col3, t2.c.col4]),
union(
select([t1.c.col3, t1.c.col4]),
select([t2.c.col3, t2.c.col4]),
select([t3.c.col3, t3.c.col4]),
).alias().select()
).alias()
wanted = [('aaa', 'bbb'), ('bbb', 'ccc'), ('ccc', 'aaa')]
found = self._fetchall_sorted(ua.select().execute())
eq_(found, wanted)
t1 = t2 = t3 = None
class JoinTest(fixtures.TestBase):
"""Tests join execution.
The compiled SQL emitted by the dialect might be ANSI joins or
theta joins ('old oracle style', with (+) for OUTER). This test
tries to exercise join syntax and uncover any inconsistencies in
`JOIN rhs ON lhs.col=rhs.col` vs `rhs.col=lhs.col`. At least one
database seems to be sensitive to this.
"""
__backend__ = True
@classmethod
def setup_class(cls):
global metadata
global t1, t2, t3
metadata = MetaData(testing.db)
t1 = Table('t1', metadata,
Column('t1_id', Integer, primary_key=True),
Column('name', String(32)))
t2 = Table('t2', metadata,
Column('t2_id', Integer, primary_key=True),
Column('t1_id', Integer, ForeignKey('t1.t1_id')),
Column('name', String(32)))
t3 = Table('t3', metadata,
Column('t3_id', Integer, primary_key=True),
Column('t2_id', Integer, ForeignKey('t2.t2_id')),
Column('name', String(32)))
metadata.drop_all()
metadata.create_all()
# t1.10 -> t2.20 -> t3.30
# t1.11 -> t2.21
# t1.12
t1.insert().execute({'t1_id': 10, 'name': 't1 #10'},
{'t1_id': 11, 'name': 't1 #11'},
{'t1_id': 12, 'name': 't1 #12'})
t2.insert().execute({'t2_id': 20, 't1_id': 10, 'name': 't2 #20'},
{'t2_id': 21, 't1_id': 11, 'name': 't2 #21'})
t3.insert().execute({'t3_id': 30, 't2_id': 20, 'name': 't3 #30'})
@classmethod
def teardown_class(cls):
metadata.drop_all()
def assertRows(self, statement, expected):
"""Execute a statement and assert that rows returned equal expected."""
found = sorted([tuple(row)
for row in statement.execute().fetchall()])
eq_(found, sorted(expected))
def test_join_x1(self):
"""Joins t1->t2."""
for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id],
from_obj=[t1.join(t2, criteria)])
self.assertRows(expr, [(10, 20), (11, 21)])
def test_join_x2(self):
"""Joins t1->t2->t3."""
for criteria in (t1.c.t1_id == t2.c.t1_id, t2.c.t1_id == t1.c.t1_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id],
from_obj=[t1.join(t2, criteria)])
self.assertRows(expr, [(10, 20), (11, 21)])
def test_outerjoin_x1(self):
"""Outer joins t1->t2."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id],
from_obj=[t1.join(t2).join(t3, criteria)])
self.assertRows(expr, [(10, 20)])
def test_outerjoin_x2(self):
"""Outer joins t1->t2,t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
from_obj=[t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria)])
self.assertRows(
expr, [(10, 20, 30), (11, 21, None), (12, None, None)])
def test_outerjoin_where_x2_t1(self):
"""Outer joins t1->t2,t3, where on t1."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t1.c.name == 't1 #10',
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t1.c.t1_id < 12,
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
def test_outerjoin_where_x2_t2(self):
"""Outer joins t1->t2,t3, where on t2."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t2.c.name == 't2 #20',
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t2.c.t2_id < 29,
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
def test_outerjoin_where_x2_t3(self):
"""Outer joins t1->t2,t3, where on t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t3.c.name == 't3 #30',
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t3.c.t3_id < 39,
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
def test_outerjoin_where_x2_t1t3(self):
"""Outer joins t1->t2,t3, where on t1 and t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.name == 't1 #10', t3.c.name == 't3 #30'),
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.t1_id < 19, t3.c.t3_id < 39),
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
def test_outerjoin_where_x2_t1t2(self):
"""Outer joins t1->t2,t3, where on t1 and t2."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.name == 't1 #10', t2.c.name == 't2 #20'),
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.t1_id < 12, t2.c.t2_id < 39),
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
def test_outerjoin_where_x2_t1t2t3(self):
"""Outer joins t1->t2,t3, where on t1, t2 and t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.name == 't1 #10',
t2.c.name == 't2 #20',
t3.c.name == 't3 #30'),
from_obj=[(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.t1_id < 19, t2.c.t2_id < 29, t3.c.t3_id < 39),
from_obj=[
(t1.outerjoin(t2, t1.c.t1_id == t2.c.t1_id).
outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
def test_mixed(self):
"""Joins t1->t2, outer t2->t3."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
print(expr)
self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
def test_mixed_where(self):
"""Joins t1->t2, outer t2->t3, plus a where on each table in turn."""
for criteria in (t2.c.t2_id == t3.c.t2_id, t3.c.t2_id == t2.c.t2_id):
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t1.c.name == 't1 #10',
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t2.c.name == 't2 #20',
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
t3.c.name == 't3 #30',
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.name == 't1 #10', t2.c.name == 't2 #20'),
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t2.c.name == 't2 #20', t3.c.name == 't3 #30'),
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
expr = select(
[t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
and_(t1.c.name == 't1 #10',
t2.c.name == 't2 #20',
t3.c.name == 't3 #30'),
from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
self.assertRows(expr, [(10, 20, 30)])
metadata = flds = None
class OperatorTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global metadata, flds
metadata = MetaData(testing.db)
flds = Table(
'flds', metadata,
Column(
'idcol', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('intcol', Integer),
Column('strcol', String(50)),
)
metadata.create_all()
flds.insert().execute([
dict(intcol=5, strcol='foo'),
dict(intcol=13, strcol='bar')
])
@classmethod
def teardown_class(cls):
metadata.drop_all()
# TODO: seems like more tests warranted for this setup.
def test_modulo(self):
eq_(
select([flds.c.intcol % 3],
order_by=flds.c.idcol).execute().fetchall(),
[(2,), (1,)]
)
@testing.requires.window_functions
def test_over(self):
eq_(
select([
flds.c.intcol, func.row_number().over(order_by=flds.c.strcol)
]).execute().fetchall(),
[(13, 1), (5, 2)]
)
| |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script Language Operators. See the @{$python/script_ops} guide.
@@py_func
"""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
import numpy as np
import six
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_script_ops
class FuncRegistry(object):
"""A helper class to keep track of registered py functions.
FuncRegistry keeps a map from unique tokens (string) to python
functions, which takes numpy arrays and outputs numpy arrays.
"""
def __init__(self):
self._lock = threading.Lock()
self._unique_id = 0 # GUARDED_BY(self._lock)
self._funcs = {}
def insert(self, func):
"""Registers `func` and returns a unique token for this entry."""
token = self._next_unique_token()
self._funcs[token] = func
return token
def remove(self, token):
"""Removes the registered function corresponding to `token`."""
self._funcs.pop(token, None)
@staticmethod
def _convert(value):
"""Converts an arg to numpy, avoiding dangerous string and unicode dtypes.
Numpy pads with zeros when using string and unicode dtypes if different
components of a tensor have different lengths. This is bad: ignoring the
padding is wrong for text data, and removing the padding is wrong for binary
data. To avoid this bug, we redo the conversion using an object dtype.
Args:
value: Value to convert to a numpy array.
Returns:
A numpy array.
"""
result = np.asarray(value, order="C")
if result.dtype.char in "SU" and result is not value:
return np.asarray(value, order="C", dtype=object)
return result
def __call__(self, token, args):
"""Calls the registered function for `token` with args."""
func = self._funcs[token]
if func is None:
raise ValueError("callback %s is not found" % token)
ret = func(*args)
# Strings seem to lead to a memory leak here if they're not wrapped in a
# list.
if isinstance(ret, six.binary_type):
ret = [ret]
# Ensures that we return either a single numpy array or a list of numpy
# arrays.
if isinstance(ret, (tuple, list)):
return [self._convert(x) for x in ret]
else:
return self._convert(ret)
def size(self):
"""Returns how many functions are currently registered."""
return len(self._funcs)
def _next_unique_token(self):
"""Returns a unique token."""
with self._lock:
uid = self._unique_id
self._unique_id += 1
return "pyfunc_%d" % uid
# Global registry for py functions.
_py_funcs = FuncRegistry()
pywrap_tensorflow.InitializePyTrampoline(_py_funcs)
class CleanupFunc(object):
"""A helper class to remove a registered function from _py_funcs."""
def __init__(self, token):
self._token = token
def __del__(self):
_py_funcs.remove(self._token)
def py_func(func, inp, Tout, stateful=True, name=None):
"""Wraps a python function and uses it as a TensorFlow op.
Given a python function `func`, which takes numpy arrays as its
inputs and returns numpy arrays as its outputs, wrap this function as an
operation in a TensorFlow graph. The following snippet constructs a simple
TensorFlow graph that invokes the `np.sinh()` NumPy function as a operation
in the graph:
```python
def my_func(x):
# x will be a numpy array with the contents of the placeholder below
return np.sinh(x)
inp = tf.placeholder(tf.float32)
y = tf.py_func(my_func, [inp], tf.float32)
```
**N.B.** The `tf.py_func()` operation has the following known limitations:
* The body of the function (i.e. `func`) will not be serialized in a
`GraphDef`. Therefore, you should not use this function if you need to
serialize your model and restore it in a different environment.
* The operation must run in the same address space as the Python program
that calls `tf.py_func()`. If you are using distributed TensorFlow, you
must run a `tf.train.Server` in the same process as the program that calls
`tf.py_func()` and you must pin the created operation to a device in that
server (e.g. using `with tf.device():`).
Args:
func: A Python function, which accepts a list of NumPy `ndarray` objects
having element types that match the corresponding `tf.Tensor` objects
in `inp`, and returns a list of `ndarray` objects (or a single `ndarray`)
having element types that match the corresponding values in `Tout`.
inp: A list of `Tensor` objects.
Tout: A list or tuple of tensorflow data types or a single tensorflow data
type if there is only one, indicating what `func` returns.
stateful: (Boolean.) If True, the function should be considered stateful.
If a function is stateless, when given the same input it will return the
same output and have no observable side effects. Optimizations such as
common subexpression elimination are only performed on stateless
operations.
name: A name for the operation (optional).
Returns:
A list of `Tensor` or a single `Tensor` which `func` computes.
"""
token = _py_funcs.insert(func)
# We tie the registered function's life-time with the current
# default graph. I.e., when the current graph is destroyed, we
# should remove its py funcs.
g = ops.get_default_graph()
# pylint: disable=protected-access
while isinstance(g, function._FuncGraph):
# If the py_func was declared inside a _FuncGraph, its lifetime should be
# bound to that of the outer graph instead.
g = g._outer_graph
cleanup = CleanupFunc(token)
# TODO(zhifengc): Consider adding a Graph method to collect
# `cleanup` objects in one of its member.
if not hasattr(g, "_cleanup_py_funcs_used_in_graph"):
g._cleanup_py_funcs_used_in_graph = []
# When g is destroyed, elements in _cleanup_py_funcs_used_in_graph
# will be destroyed and their __del__ will remove the 'token' from
# the funcs registry.
g._cleanup_py_funcs_used_in_graph.append(cleanup)
# pylint: enable=protected-access
if isinstance(Tout, (list, tuple)):
is_list_or_tuple = True
else:
Tout = [Tout]
is_list_or_tuple = False
# pylint: disable=protected-access
if stateful:
result = gen_script_ops._py_func(
input=inp, token=token, Tout=Tout, name=name)
else:
result = gen_script_ops._py_func_stateless(
input=inp, token=token, Tout=Tout, name=name)
# pylint: enable=protected-access
return result if is_list_or_tuple else result[0]
ops.NotDifferentiable("PyFunc")
ops.NotDifferentiable("PyFuncStateless")
| |
#!/usr/bin/env python2
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import copy
import os
import subprocess
import sys
from subunit import run as subunit_run
from testtools import run as testtools_run
def get_parser(args):
parser = argparse.ArgumentParser(
description='Tool to run openstack tests')
list_files = parser.add_mutually_exclusive_group()
list_files.add_argument('--blacklist_file', '-b',
help='Path to a blacklist file, this file '
'contains a separate regex exclude on each '
'newline')
list_files.add_argument('--whitelist_file', '-w',
help='Path to a whitelist file, this file '
'contains a separate regex on each newline.')
group = parser.add_mutually_exclusive_group()
group.add_argument('--regex', '-r',
help='A normal testr selection regex. If a blacklist '
'file is specified, the regex will be appended '
'to the end of the generated regex from that '
'file.')
group.add_argument('--path', metavar='FILE_OR_DIRECTORY',
help='A file name or directory of tests to run.')
group.add_argument('--no-discover', '-n', metavar='TEST_ID',
help="Takes in a single test to bypasses test "
"discover and just excute the test specified. "
"A file name may be used in place of a test "
"name.")
pretty = parser.add_mutually_exclusive_group()
pretty.add_argument('--pretty', '-p', dest='pretty', action='store_true',
help='Print pretty output from subunit-trace. This is '
'mutually exclusive with --subunit')
pretty.add_argument('--no-pretty', dest='pretty', action='store_false',
help='Disable the pretty output with subunit-trace')
parser.add_argument('--subunit', '-s', action='store_true',
help='output the raw subunit v2 from the test run '
'this is mutually exclusive with --pretty')
parser.add_argument('--list', '-l', action='store_true',
help='List all the tests which will be run.')
slowest = parser.add_mutually_exclusive_group()
slowest.add_argument('--slowest', dest='slowest', action='store_true',
help="after the test run print the slowest tests")
slowest.add_argument('--no-slowest', dest='slowest', action='store_false',
help="after the test run don't print the slowest "
"tests")
parser.add_argument('--pdb', metavar='TEST_ID',
help='Run a single test that has pdb traces added')
parallel = parser.add_mutually_exclusive_group()
parallel.add_argument('--parallel', dest='parallel', action='store_true',
help='Run tests in parallel (this is the default)')
parallel.add_argument('--serial', dest='parallel', action='store_false',
help='Run tests serially')
parser.add_argument('--concurrency', '-c', type=int, metavar='WORKERS',
help='The number of workers to use when running in '
'parallel. By default this is the number of cpus')
parser.add_argument('--until-failure', action='store_true',
help='Run the tests in a loop until a failure is '
'encountered. Running with subunit or pretty'
'output enable will force the loop to run tests'
'serially')
parser.add_argument('--print-exclude', action='store_true',
help='If an exclude file is used this option will '
'prints the comment from the same line and all '
'skipped tests before the test run')
parser.set_defaults(pretty=True, slowest=True, parallel=True)
return parser.parse_args(args)
def _get_test_list(regex, env=None):
env = env or copy.deepcopy(os.environ)
proc = subprocess.Popen(['testr', 'list-tests', regex], env=env,
stdout=subprocess.PIPE)
out = proc.communicate()[0]
raw_test_list = out.split('\n')
bad = False
test_list = []
exclude_list = ['OS_', 'CAPTURE', 'TEST_TIMEOUT', 'PYTHON',
'subunit.run discover']
for line in raw_test_list:
for exclude in exclude_list:
if exclude in line:
bad = True
break
elif not line:
bad = True
break
if not bad:
test_list.append(line)
bad = False
return test_list
def print_skips(regex, message):
test_list = _get_test_list(regex)
if test_list:
if message:
print(message)
else:
print('Skipped because of regex %s:' % regex)
for test in test_list:
print(test)
# Extra whitespace to separate
print('\n')
def path_to_regex(path):
root, _ = os.path.splitext(path)
return root.replace('/', '.')
def get_regex_from_whitelist_file(file_path):
return '|'.join(open(file_path).read().splitlines())
def construct_regex(blacklist_file, whitelist_file, regex, print_exclude):
if not blacklist_file:
exclude_regex = ''
else:
black_file = open(blacklist_file, 'r')
exclude_regex = ''
for line in black_file:
raw_line = line.strip()
split_line = raw_line.split('#')
# Before the # is the regex
line_regex = split_line[0].strip()
if len(split_line) > 1:
# After the # is a comment
comment = split_line[1].strip()
else:
comment = ''
if line_regex:
if print_exclude:
print_skips(line_regex, comment)
if exclude_regex:
exclude_regex = '|'.join([line_regex, exclude_regex])
else:
exclude_regex = line_regex
if exclude_regex:
exclude_regex = "^((?!" + exclude_regex + ").)*$"
if regex:
exclude_regex += regex
if whitelist_file:
exclude_regex += '%s' % get_regex_from_whitelist_file(whitelist_file)
return exclude_regex
def call_testr(regex, subunit, pretty, list_tests, slowest, parallel, concur,
until_failure):
if parallel:
cmd = ['testr', 'run', '--parallel']
if concur:
cmd.append('--concurrency=%s' % concur)
else:
cmd = ['testr', 'run']
if list_tests:
cmd = ['testr', 'list-tests']
elif (subunit or pretty) and not until_failure:
cmd.append('--subunit')
elif not (subunit or pretty) and until_failure:
cmd.append('--until-failure')
cmd.append(regex)
env = copy.deepcopy(os.environ)
# This workaround is necessary because of lp bug 1411804 it's super hacky
# and makes tons of unfounded assumptions, but it works for the most part
if (subunit or pretty) and until_failure:
test_list = _get_test_list(regex, env)
count = 0
failed = False
if not test_list:
print("No tests to run")
exit(1)
# If pretty or subunit output is desired manually loop forever over
# test individually and generate the desired output in a linear series
# this avoids 1411804 while retaining most of the desired behavior
while True:
for test in test_list:
if pretty:
cmd = ['python', '-m', 'subunit.run', test]
ps = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE)
proc = subprocess.Popen(['subunit-trace',
'--no-failure-debug', '-f',
'--no-summary'], env=env,
stdin=ps.stdout)
ps.stdout.close()
proc.communicate()
if proc.returncode > 0:
failed = True
break
else:
try:
subunit_run.main([sys.argv[0], test], sys.stdout)
except SystemExit as e:
if e > 0:
print("Ran %s tests without failure" % count)
exit(1)
else:
raise
count = count + 1
if failed:
print("Ran %s tests without failure" % count)
exit(0)
# If not until-failure special case call testr like normal
elif pretty and not list_tests:
ps = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE)
proc = subprocess.Popen(['subunit-trace', '--no-failure-debug', '-f'],
env=env, stdin=ps.stdout)
ps.stdout.close()
else:
proc = subprocess.Popen(cmd, env=env)
proc.communicate()
return_code = proc.returncode
if slowest and not list_tests:
print("\nSlowest Tests:\n")
slow_proc = subprocess.Popen(['testr', 'slowest'], env=env)
slow_proc.communicate()
return return_code
def call_subunit_run(test_id, pretty, subunit):
if pretty:
env = copy.deepcopy(os.environ)
cmd = ['python', '-m', 'subunit.run', test_id]
ps = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE)
proc = subprocess.Popen(['subunit-trace', '--no-failure-debug', '-f'],
env=env, stdin=ps.stdout)
ps.stdout.close()
proc.communicate()
return proc.returncode
elif subunit:
subunit_run.main([sys.argv[0], test_id], sys.stdout)
else:
testtools_run.main([sys.argv[0], test_id], sys.stdout)
def _select_and_call_runner(opts, exclude_regex):
ec = 1
if not os.path.isdir('.testrepository'):
subprocess.call(['testr', 'init'])
if not opts.no_discover and not opts.pdb:
ec = call_testr(exclude_regex, opts.subunit, opts.pretty, opts.list,
opts.slowest, opts.parallel, opts.concurrency,
opts.until_failure)
else:
test_to_run = opts.no_discover or opts.pdb
if test_to_run.find('/') != -1:
test_to_run = path_to_regex(test_to_run)
ec = call_subunit_run(test_to_run, opts.pretty, opts.subunit)
return ec
def main():
opts = get_parser(sys.argv[1:])
if opts.pretty and opts.subunit:
msg = ('Subunit output and pretty output cannot be specified at the '
'same time')
print(msg)
exit(2)
if opts.list and opts.no_discover:
msg = ('you can not list tests when you are bypassing discovery to '
'run a single test')
print(msg)
exit(3)
if not opts.parallel and opts.concurrency:
msg = "You can't specify a concurrency to use when running serially"
print(msg)
exit(4)
if (opts.pdb or opts.no_discover) and opts.until_failure:
msg = "You can not use until_failure mode with pdb or no-discover"
print(msg)
exit(5)
if opts.path:
regex = path_to_regex(opts.path)
else:
regex = opts.regex
exclude_regex = construct_regex(opts.blacklist_file,
opts.whitelist_file,
regex,
opts.print_exclude)
exit(_select_and_call_runner(opts, exclude_regex))
if __name__ == '__main__':
main()
| |
#!/usr/bin/env python
# coding=utf-8
# Created by: Li Yao
# Created on: 5/25/20
from django.contrib.auth.decorators import login_required, permission_required
from django.core.paginator import Paginator
from django.core.cache import cache
from django.shortcuts import render
from django.template import loader
from django.http import HttpResponseRedirect, HttpResponse
from django.db.models import Q
from django.utils.translation import gettext as _
from ..tools import error, success, delete_file, handle_uploaded_file, check_disk_quota_lock, get_disk_quota_info, \
page_info
from worker.bases import get_config
from ..forms import *
from QueueDB.models import Job, ProtocolList, Step, Reference, FileArchive, Workspace, Audition
import os
import base64
@login_required
@permission_required("QueueDB.add_job", raise_exception=True)
def add_job(request):
if request.method == 'POST':
job_form = SingleJobForm(request.POST)
if job_form.is_valid():
cd = job_form.cleaned_data
try:
if cd['parameter'].find(';') == -1:
cd['parameter'] += ';'
protocol = ProtocolList.objects.get(id=cd['protocol'])
if protocol.check_owner(request.user.queuedb_profile_related.delegate):
try:
if 'workspace' in request.session:
ws = Workspace.objects.get(id=request.session['workspace'])
else:
ws = None
except Workspace.DoesNotExist:
ws = None
job = Job(
protocol_id=cd['protocol'],
protocol_ver=protocol.ver,
job_name=cd['job_name'],
parameter=cd['parameter'],
run_dir=get_config('env', 'workspace'),
user=request.user.queuedb_profile_related.delegate,
input_file=cd['input_files'],
workspace=ws,
)
if check_disk_quota_lock(request.user.queuedb_profile_related.delegate.id):
job.save()
Audition(operation="Created a new job",
related_job=job,
job_name=job.job_name,
prev_par=job.parameter,
new_par=job.parameter,
prev_input=job.input_file,
current_input=job.input_file,
protocol=job.protocol.name,
protocol_ver=job.protocol_ver,
resume_point=job.resume,
user=job.user
).save()
return success('Successfully added job into queue.')
else:
return error('You have exceed the disk quota limit! Please delete some files!')
else:
return error('You are not owner of the protocol.')
except Exception as e:
return error(e)
return error(str(job_form.errors))
else:
if request.user.is_staff:
available_protocol = ProtocolList.objects.all()
else:
available_protocol = ProtocolList.objects.filter(
Q(user=request.user.queuedb_profile_related.delegate) | Q(user=None)).all()
dt, du, dp = get_disk_quota_info_with_cache(request.user.queuedb_profile_related.delegate.id)
return render(request, 'ui/add_job.html', {'form': SingleJobForm,
'user_protocols': available_protocol,
't_disk': dt,
'u_disk': du,
'disk_per': dp})
@login_required
@permission_required("QueueDB.add_job", raise_exception=True)
def batch_job(request):
if request.method == 'POST':
form = BatchJobForm(request.POST, request.FILES)
if form.is_valid():
file_name = handle_uploaded_file(request.FILES['job_list'])
try:
protocol_cache = dict()
try:
ws = Workspace.objects.get(id=request.session['workspace'])
except Workspace.DoesNotExist:
ws = None
with open(file_name) as f:
jobs = f.readlines()
job_list = []
for job in jobs:
configurations = job.split('\n')[0].split('\t')
if len(configurations) == 4:
if check_disk_quota_lock(request.user.queuedb_profile_related.delegate.id):
protocol_id = int(configurations[0])
if protocol_id not in protocol_cache:
try:
protocol = ProtocolList.objects.get(id=protocol_id)
protocol_cache[protocol_id] = (int(protocol.user_id), protocol.ver)
except Exception as e:
return render(request, 'ui/error.html', {'error_msg': e})
if protocol_cache[
protocol_id][0] == request.user.queuedb_profile_related.delegate.id or request.user.is_staff or \
protocol_cache[protocol_id][0] == 0:
job_list.append(
Job(
protocol_id=protocol_id,
protocol_ver=protocol_cache[protocol_id][1],
job_name=configurations[1],
input_file=configurations[2],
parameter=configurations[3],
run_dir=get_config('env', 'workspace'),
user=request.user.queuedb_profile_related.delegate,
workspace=ws))
else:
return render(request,
'ui/error.html',
{'error_msg': 'You are not the owner of the protocol(%s)' %
protocol_id})
else:
return render(request,
'ui/error.html',
{
'error_msg': 'You have exceed the disk quota limit! Please delete some files!'})
else:
return render(request,
'ui/error.html',
{'error_msg': 'Your job list file must contain three columns.'})
Job.objects.bulk_create(job_list)
return HttpResponseRedirect('/ui/query-job')
except Exception as e:
return render(request, 'ui/error.html', {'error_msg': e})
else:
return render(request,
'ui/error.html',
{'error_msg': str(form.errors)})
@login_required
@permission_required("QueueDB.delete_job", raise_exception=True)
@permission_required("QueueDB.change_job", raise_exception=True)
def batch_operation(request):
if request.method == 'POST':
job_list = request.POST['jobs'].split(',')
while '' in job_list:
job_list.remove('')
if request.POST['operation'] == 'd':
for job_id in job_list:
job = Job.objects.get(id=job_id)
if job.locked:
return error(_("This job is locked, please unlock first"))
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
job.delete()
if job.result is not None:
delete_job_file_tree(request, job.result)
else:
return error('Your are not the owner of the job.')
return success('Ok')
elif request.POST['operation'] == 't':
for job_id in job_list:
job = Job.objects.get(id=job_id)
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
job.terminate_job()
else:
return error('Your are not the owner of the job.')
return success('Ok')
elif request.POST['operation'] == 'r':
for job_id in job_list:
job = Job.objects.get(id=job_id)
if job.locked:
return error(_("This job is locked, please unlock first"))
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
job.rerun_job()
if job.result is not None:
delete_job_file_tree(request, job.result)
else:
return error('Your are not the owner of the job.')
return success('Ok')
else:
return error('Please choose an operation.')
else:
return error('Unsupported operation')
def get_disk_quota_info_with_cache(uid, cache_lifetime=1800):
"""
Get disk quota information with Cache
Parameters
----------
uid : str or int
User ID
cache_lifetime : int
Lifetime of the cache, by default 1800s
Returns
-------
dt : int
All available disk quota
du : int
Used disk quota
dp : int
Percent of disk quota used
"""
disk_quota_cache_key = "{0}.disk_quota_cache".format(str(uid))
quota_info = cache.get(disk_quota_cache_key)
if quota_info is not None:
dt, du, dp = quota_info
else:
dt, du, dp = get_disk_quota_info(uid)
cache.set(disk_quota_cache_key, (dt, du, dp), cache_lifetime)
return dt, du, dp
@login_required
@permission_required("QueueDB.delete_job", raise_exception=True)
def delete_job(request):
if request.method == 'POST':
terminate_form = JobManipulateForm(request.POST)
if terminate_form.is_valid():
cd = terminate_form.cleaned_data
# try:
if True:
job = Job.objects.get(id=cd['job'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
if job.locked:
return error(_("This job is locked, please unlock first"))
n_archive = 0
try:
archives = FileArchive.objects.filter(job=job)
n_archive = len(archives)
if n_archive == 0:
job.delete()
delete_job_file_tree(request, job.result)
return success("Your job has been deleted.")
else:
return error("Job is under protection.(%d dependent archives)" % n_archive)
except Exception as e:
return error(e)
else:
return error('Your are not the owner of the job.')
# except Exception as e:
# return error(e)
else:
return error(str(terminate_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.delete_job", raise_exception=True)
def delete_job_file(request, f):
file_path = os.path.join(get_config('env', 'workspace'), str(request.user.queuedb_profile_related.delegate.id),
base64.b64decode(f).decode())
delete_file(file_path)
return success('Deleted')
def delete_job_file_tree(request, f):
try:
if f is not None and f != "":
user_dir = os.path.join(get_config('env', 'workspace'),
str(request.user.queuedb_profile_related.delegate.id))
job_path = os.path.join(user_dir, f)
import shutil
if os.path.exists(job_path) and not os.path.samefile(user_dir, job_path):
shutil.rmtree(job_path, ignore_errors=True)
except Exception as e:
print(e)
@login_required
@permission_required("QueueDB.view_job", raise_exception=True)
def get_job_list(request):
import json
if request.user.is_staff:
job_list = Job.objects.filter(job_name__icontains=request.GET["q"])
else:
job_list = Job.objects.filter(user=request.user.queuedb_profile_related.delegate, job_name__icontains=request.GET["q"])
pre_result = {"results": []}
for job in job_list:
pre_result["results"].append({"text": job.job_name, "id": job.id})
return HttpResponse(json.dumps(pre_result), content_type='application/json')
def get_job_files(job_id, user_id, super_user):
import time
try:
user_files = []
job = Job.objects.get(id=job_id)
if job.check_owner(user_id):
result_folder = job.get_result()
if result_folder is None: # doesn't have output
return user_files
user_path = os.path.join(job.run_dir, str(job.user_id), result_folder)
print(user_path)
for root, dirs, files in os.walk(user_path):
for file_name in files:
if file_name == ".snapshot.ini":
continue
file_full_path = os.path.join(root, file_name)
file_path = file_full_path.replace(user_path + '\\', '') \
.replace(user_path + '/', '').replace(user_path, '')
tmp = dict()
tmp['name'] = file_path
tmp['file_size'] = os.path.getsize(file_full_path)
tmp['file_create'] = time.ctime(os.path.getctime(file_full_path))
tmp['trace'] = base64.b64encode(os.path.join(result_folder, file_path).encode()).decode()
user_files.append(tmp)
return user_files
else:
return "Your are not the owner of the job."
except Exception as e:
return e
@login_required
@permission_required("QueueDB.view_job", raise_exception=True)
def get_job_file_list(request):
import json
file_list = get_job_files(request.GET["id"], request.user.queuedb_profile_related.delegate.id, request.user.is_superuser)
return HttpResponse(json.dumps(file_list), content_type='application/json')
@login_required
@permission_required("QueueDB.change_job", raise_exception=True)
def lock_job(request):
if request.method == 'POST':
rerun_form = JobManipulateForm(request.POST)
if rerun_form.is_valid():
cd = rerun_form.cleaned_data
try:
job = Job.objects.get(id=cd['job'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
if job.locked:
job.locked = 0
msg = "Unl"
else:
job.locked = 1
msg = "L"
job.save()
return success(f"Your job is {msg}ocked.")
else:
return error('Your are not the owner of the job.')
except Exception as e:
return error(e)
else:
return error(str(rerun_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.view_job", raise_exception=True)
def query_job(request):
try:
ws = Workspace.objects.get(id=request.session['workspace'])
except Workspace.DoesNotExist:
ws = None
request.session['workspace'] = -1
except KeyError:
ws = None
request.session['workspace'] = -1
job_name_filter = ''
if request.GET.get("job_name"):
JNF = FilterJobNameForm(request.GET)
if JNF.is_valid():
jnf_cd = JNF.cleaned_data
job_name_filter = jnf_cd["job_name"]
if request.user.is_superuser:
job_list = Job.objects.order_by('-create_time').filter(job_name__icontains=job_name_filter)
else:
if ws is not None:
job_list = Job.objects.filter(user=request.user.queuedb_profile_related.delegate,
workspace=ws,
job_name__icontains=job_name_filter).order_by('-create_time').all()
else:
job_list = Job.objects.filter(user=request.user.queuedb_profile_related.delegate,
job_name__icontains=job_name_filter).order_by('-create_time').all()
# job_list = Job.objects.filter(job_name__icontains=request.GET["q"])
only_running_flag = 0
only_wrong_flag = 0
if "only_running" in request.COOKIES and request.COOKIES["only_running"] == '1':
job_list = job_list.filter(status__gt=0)
only_running_flag = 1
elif "only_wrong" in request.COOKIES and request.COOKIES["only_wrong"] == '1':
job_list = job_list.filter(status=-3)
only_running_flag = 0
only_wrong_flag = 1
paginator = Paginator(job_list, 12)
page = request.GET.get('page')
jobs = page_info(paginator, page)
dt, du, dp = get_disk_quota_info_with_cache(request.user.queuedb_profile_related.delegate.id, cache_lifetime=1800)
return render(request, 'ui/query_job.html', {'job_list': jobs, 't_disk': dt, 'u_disk': du, 'disk_per': dp,
'workspaces': Workspace.objects.filter(
user=request.user.queuedb_profile_related.delegate),
'only_running': only_running_flag,
'only_wrong': only_wrong_flag,
'paginator': paginator,
'current_page': page})
@login_required
@permission_required("QueueDB.view_step", raise_exception=True)
def query_job_parameter(request):
import re
parent = request.GET.get('parent')
user_defined_wildcards = []
protocol_description = ''
try:
protocol = ProtocolList.objects.get(id=parent)
if protocol.check_owner(request.user.queuedb_profile_related.delegate):
protocol_description = protocol.description
pre_defined_keys = ['InputFile', 'LastOutput',
'Job', 'ThreadN',
'Output', 'LastOutput',
'Uploaded', 'Suffix',
'Workspace', 'UserBin', 'JobName']
reference_list = Reference.objects.filter(Q(user=request.user.queuedb_profile_related.delegate) | Q(user=None)).all()
pre_defined_keys.extend([reference.name for reference in reference_list])
steps = Step.objects.filter(parent=protocol.id)
wildcard_pattern = re.compile("\\{\\{(.*?)\\}\\}", re.IGNORECASE | re.DOTALL)
for step in steps:
for wildcard in re.findall(wildcard_pattern, step.parameter):
wildcard = wildcard.split(':')[0]
if wildcard.find(";") != -1:
continue
if wildcard.find(",") != -1:
continue
if wildcard not in pre_defined_keys:
user_defined_wildcards.append(wildcard)
except Exception as e:
return error(e)
result = dict(par='', desc=protocol_description)
user_defined_wildcards = list(set(user_defined_wildcards))
if len(user_defined_wildcards) > 0:
result['par'] = '=;'.join(user_defined_wildcards)
result['par'] += '=;'
return success(result)
@login_required
@permission_required("QueueDB.view_job", raise_exception=True)
def query_running_jobs(request):
msgs = []
if request.user.is_superuser:
running_job = Job.objects.filter(status__gt=0).count()
else:
running_job = Job.objects.filter(user=request.user.queuedb_profile_related.delegate).filter(status__gt=0).count()
if request.user.queuedb_profile_related.delegate.queuedb_profile_related.notification_enabled:
# qs = Notification.objects.filter(user=request.user.queuedb_profile_related.delegate)
qs = []
for note in qs:
msgs.append(note.msg)
# qs.delete()
return success({"n": running_job, "m": msgs})
@login_required
@permission_required("QueueDB.change_job", raise_exception=True)
def rerun_job(request):
if request.method == 'POST':
rerun_form = JobManipulateForm(request.POST)
if rerun_form.is_valid():
cd = rerun_form.cleaned_data
try:
job = Job.objects.get(id=cd['job'])
prev_protocol_ver = job.protocol_ver
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
if job.locked:
return error(_("This job is locked, please unlock first"))
if job.result is not None:
delete_job_file_tree(request, job.result)
job.rerun_job()
if prev_protocol_ver == job.protocol.ver:
return success("Your job will be rerunning soon.")
else:
return success("Your job will be rerunning soon (protocol modified).")
else:
return error('Your are not the owner of the job.')
except Exception as e:
return error(e)
else:
return error(str(rerun_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.change_job", raise_exception=True)
def mark_wrong_job(request):
if request.method == 'POST':
mw_form = JobManipulateForm(request.POST)
if mw_form.is_valid():
cd = mw_form.cleaned_data
try:
job = Job.objects.get(id=cd['job'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
# delete_job_file_tree(request, job.result)
if job.locked:
return error(_("This job is locked, please unlock first"))
job.status = -3
job.save()
return success('Job status changed')
else:
return error('Your are not the owner of the job.')
except Exception as e:
return error(e)
else:
return error(str(mw_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.change_job", raise_exception=True)
def resume_job(request):
if request.method == 'POST':
terminate_form = JobManipulateForm(request.POST)
if terminate_form.is_valid():
cd = terminate_form.cleaned_data
try:
job = Job.objects.get(id=cd['job'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
if job.locked:
return error(_("This job is locked, please unlock first"))
rollback_to = max(int(cd['step']), 0)
if rollback_to <= job.resume:
job.resume_job(rollback_to)
else:
job.resume_job(job.resume)
return success('Your job will be resumed soon.')
else:
return error('Your are not the owner of the job.')
except Exception as e:
return error(e)
else:
return error(str(terminate_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.view_job", raise_exception=True)
def show_job_log(request):
if request.method == 'POST':
query_job_form = QueryJobLogForm(request.POST)
if query_job_form.is_valid():
cd = query_job_form.cleaned_data
suffix = ".log" if cd["std_out"] else ".err"
log_path = os.path.join(get_config('env', 'log'), str(cd['job'])+suffix)
try:
from worker.bases import get_job_log
return success(get_job_log(log_path))
except Exception as e:
return error(e)
else:
return error(str(query_job_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.view_job", raise_exception=True)
def show_job_folder(request):
if request.method == 'POST':
query_job_form = JobManipulateForm(request.POST)
if query_job_form.is_valid():
cd = query_job_form.cleaned_data
user_files = get_job_files(cd["job"], request.user.queuedb_profile_related.delegate.id, request.user.is_superuser)
if type(user_files) is list:
template = loader.get_template('ui/show_job_folder.html')
import operator
context = {
'user_files': sorted(user_files, key=lambda user_files: user_files['name']),
'jid': cd["job"]
}
return success(template.render(context))
else:
return error(user_files)
else:
return error(str(query_job_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.change_job", raise_exception=True)
def terminate_job(request):
if request.method == 'POST':
terminate_form = JobManipulateForm(request.POST)
if terminate_form.is_valid() or request.user.is_superuser:
cd = terminate_form.cleaned_data
try:
job = Job.objects.get(id=cd['job'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
if job.locked:
return error(_("This job is locked, please unlock first"))
job.terminate_job()
return success('Your job will be terminated soon.')
else:
return error('Your are not the owner of the job.')
except Exception as e:
return error(e)
else:
return error(str(terminate_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.change_job", raise_exception=True)
def update_job_inputs(request):
if request.method == 'GET':
update_job_form = JobUpdateForm(request.GET)
if update_job_form.is_valid():
cd = update_job_form.cleaned_data
try:
import urllib.parse
job = Job.objects.get(id=cd['id'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
if job.locked:
return error(_("This job is locked, please unlock first"))
job.update_inputs(urllib.parse.unquote(cd['parameter']))
else:
return error('Your are not owner of the Job.')
except Job.DoesNotExist:
return error('Workspace doesn\'t exist.')
return success('Job\'s inputs have been updated.')
else:
return error(str(update_job_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.change_job", raise_exception=True)
def update_job_parameter(request):
if request.method == 'GET':
update_job_form = JobUpdateForm(request.GET)
if update_job_form.is_valid():
cd = update_job_form.cleaned_data
try:
import urllib.parse
job = Job.objects.get(id=cd['id'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
if job.locked:
return error(_("This job is locked, please unlock first"))
job.update_parameter(urllib.parse.unquote(cd['parameter']))
else:
return error('Your are not owner of the Job.')
except Job.DoesNotExist:
return error('Workspace doesn\'t exist.')
return success('Job\'s parameters have been updated.')
else:
return error(str(update_job_form.errors))
else:
return error('Method error')
@login_required
@permission_required("QueueDB.change_workspace", raise_exception=True)
def update_workspace(request):
if request.method == 'POST':
update_ws_form = UpdateWorkspaceForm(request.POST)
if update_ws_form.is_valid():
cd = update_ws_form.cleaned_data
job = Job.objects.get(id=cd['id'])
if job.check_owner(request.user.queuedb_profile_related.delegate, read_only=False):
try:
if cd['ws'] != -1:
ws = Workspace.objects.get(id=cd['ws'], user=request.user.queuedb_profile_related.delegate)
else:
ws = None
job.workspace = ws
job.save()
except Job.DoesNotExist:
return error('Workspace doesn\'t exist.')
return success('Job\'s workspace has been updated.')
else:
return error('Your are not owner of the Job.')
else:
return error(str(update_ws_form.errors))
else:
return error('Method error')
| |
#!/usr/bin/env python
# Copyright 2012 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Impala's shell
import cmd
import errno
import getpass
import os
import prettytable
import re
import shlex
import signal
import socket
import sqlparse
import sys
import time
from impala_client import (ImpalaClient, DisconnectedException, QueryStateException,
RPCException, TApplicationException)
from impala_shell_config_defaults import impala_shell_defaults
from option_parser import get_option_parser, get_config_from_file
from shell_output import DelimitedOutputFormatter, OutputStream, PrettyOutputFormatter
from subprocess import call
VERSION_FORMAT = "Impala Shell v%(version)s (%(git_hash)s) built on %(build_date)s"
VERSION_STRING = "build version not available"
HISTORY_LENGTH = 100
# Tarball / packaging build makes impala_build_version available
try:
from impala_build_version import get_git_hash, get_build_date, get_version
VERSION_STRING = VERSION_FORMAT % {'version': get_version(),
'git_hash': get_git_hash()[:7],
'build_date': get_build_date()}
except Exception:
pass
class CmdStatus:
"""Values indicate the execution status of a command to the cmd shell driver module
SUCCESS and ERROR continue running the shell and ABORT exits the shell
Since SUCCESS == None, successful commands do not need to explicitly return
anything on completion
"""
SUCCESS = None
ABORT = True
ERROR = False
class ImpalaPrettyTable(prettytable.PrettyTable):
"""Patched version of PrettyTable that handles utf-8 characters by replacing them with a
placeholder, rather than ignoring them entirely"""
def _unicode(self, value):
if not isinstance(value, basestring):
value = str(value)
if not isinstance(value, unicode):
# If a value cannot be encoded, replace it with a placeholder.
value = unicode(value, self.encoding, "replace")
return value
class ImpalaShell(cmd.Cmd):
""" Simple Impala Shell.
Basic usage: type connect <host:port> to connect to an impalad
Then issue queries or other commands. Tab-completion should show the set of
available commands.
Methods that implement shell commands return a boolean tuple (stop, status)
stop is a flag the command loop uses to continue/discontinue the prompt.
Status tells the caller that the command completed successfully.
"""
# If not connected to an impalad, the server version is unknown.
UNKNOWN_SERVER_VERSION = "Not Connected"
DISCONNECTED_PROMPT = "[Not connected] > "
# Error and warning that is printed by cancel_query
CANCELLATION_ERROR = 'Cancelled'
# Message to display in shell when cancelling a query
CANCELLATION_MESSAGE = ' Cancelling Query'
# Commands are terminated with the following delimiter.
CMD_DELIM = ';'
DEFAULT_DB = 'default'
# Regex applied to all tokens of a query to detect the query type.
INSERT_REGEX = re.compile("^insert$", re.I)
def __init__(self, options):
cmd.Cmd.__init__(self)
self.is_alive = True
self.impalad = None
self.use_kerberos = options.use_kerberos
self.kerberos_service_name = options.kerberos_service_name
self.use_ssl = options.ssl
self.ca_cert = options.ca_cert
self.user = options.user
self.ldap_password = None;
self.use_ldap = options.use_ldap
self.verbose = options.verbose
self.prompt = ImpalaShell.DISCONNECTED_PROMPT
self.server_version = ImpalaShell.UNKNOWN_SERVER_VERSION
self.refresh_after_connect = options.refresh_after_connect
self.current_db = options.default_db
self.history_file = os.path.expanduser("~/.impalahistory")
# Stores the state of user input until a delimiter is seen.
self.partial_cmd = str()
# Stores the old prompt while the user input is incomplete.
self.cached_prompt = str()
self.show_profiles = options.show_profiles
# Output formatting flags/options
self.output_file = options.output_file
self.output_delimiter = options.output_delimiter
self.write_delimited = options.write_delimited
self.print_header = options.print_header
self.set_query_options = {}
self._populate_command_list()
self.imp_client = None;
# Tracks query handle of the last query executed. Used by the 'profile' command.
self.last_query_handle = None;
self.query_handle_closed = None
try:
self.readline = __import__('readline')
self.readline.set_history_length(HISTORY_LENGTH)
except ImportError:
self._disable_readline()
if options.use_ldap:
self.ldap_password = getpass.getpass("LDAP password for %s:" % self.user)
if options.impalad != None:
self.do_connect(options.impalad)
# We handle Ctrl-C ourselves, using an Event object to signal cancellation
# requests between the handler and the main shell thread.
signal.signal(signal.SIGINT, self._signal_handler)
def _populate_command_list(self):
"""Populate a list of commands in the shell.
Each command has its own method of the form do_<command>, and can be extracted by
introspecting the class directory.
"""
# Slice the command method name to get the name of the command.
self.commands = [cmd[3:] for cmd in dir(self.__class__) if cmd.startswith('do_')]
def _disable_readline(self):
"""Disables the readline module.
The readline module is responsible for keeping track of command history.
"""
self.readline = None
def _print_options(self, default_options, set_options):
# Prints the current query options
# with default values distinguished from set values by brackets []
if not default_options and not set_options:
print '\tNo options available.'
else:
for k in sorted(default_options.keys()):
if k in set_options.keys() and set_options[k] != default_options[k]:
print '\n'.join(["\t%s: %s" % (k, set_options[k])])
else:
print '\n'.join(["\t%s: [%s]" % (k, default_options[k])])
def do_shell(self, args):
"""Run a command on the shell
Usage: shell <cmd>
! <cmd>
"""
try:
start_time = time.time()
os.system(args)
self._print_if_verbose("--------\nExecuted in %2.2fs" % (time.time() - start_time))
except Exception, e:
print_to_stderr('Error running command : %s' % e)
return CmdStatus.ERROR
def sanitise_input(self, args, interactive=True):
"""Convert the command to lower case, so it's recognized"""
# A command terminated by a semi-colon is legal. Check for the trailing
# semi-colons and strip them from the end of the command.
args = args.strip()
tokens = args.split(' ')
if not interactive:
tokens[0] = tokens[0].lower()
# Strip all the non-interactive commands of the delimiter.
return ' '.join(tokens).rstrip(ImpalaShell.CMD_DELIM)
# The first token is converted into lower case to route it to the
# appropriate command handler. This only applies to the first line of user input.
# Modifying tokens in subsequent lines may change the semantics of the command,
# so do not modify the text.
if not self.partial_cmd:
# The first token is the command.
# If it's EOF, call do_quit()
if tokens[0] == 'EOF':
return 'quit'
else:
tokens[0] = tokens[0].lower()
elif tokens[0] == "EOF":
# If a command is in progress and the user hits a Ctrl-D, clear its state
# and reset the prompt.
self.prompt = self.cached_prompt
self.partial_cmd = str()
# The print statement makes the new prompt appear in a new line.
# Also print an extra newline to indicate that the current command has
# been cancelled.
print '\n'
return str()
args = self._check_for_command_completion(' '.join(tokens).strip())
return args.rstrip(ImpalaShell.CMD_DELIM)
def _shlex_split(self, line):
"""Reimplement shlex.split() so that escaped single quotes
are actually escaped. shlex.split() only escapes double quotes
by default. This method will throw a ValueError if an open
quotation (either single or double) is found.
"""
my_split = shlex.shlex(line, posix=True)
my_split.escapedquotes = '"\''
my_split.whitespace_split = True
my_split.commenters = ''
return list(my_split)
def _cmd_ends_with_delim(self, line):
"""Check if the input command ends with a command delimiter.
A command ending with the delimiter and containing an open quotation character is
not considered terminated. If no open quotation is found, it's considered
terminated.
"""
if line.endswith(ImpalaShell.CMD_DELIM):
try:
# Look for an open quotation in the entire command, and not just the
# current line.
if self.partial_cmd: line = '%s %s' % (self.partial_cmd, line)
self._shlex_split(line)
return True
# If the command ends with a delimiter, check if it has an open quotation.
# shlex in self._split() throws a ValueError iff an open quotation is found.
# A quotation can either be a single quote or a double quote.
except ValueError:
pass
# This checks to see if there are any backslashed quotes
# outside of quotes, since backslashed quotes
# outside of single or double quotes should not be escaped.
# Ex. 'abc\'xyz' -> closed because \' is escaped
# \'abcxyz -> open because \' is not escaped
# \'abcxyz' -> closed
# Iterate through the line and switch the state if a single or double quote is found
# and ignore escaped single and double quotes if the line is considered open (meaning
# a previous single or double quote has not been closed yet)
state_closed = True;
opener = None;
for i, char in enumerate(line):
if state_closed and (char in ['\'', '\"']):
state_closed = False
opener = char
elif not state_closed and opener == char:
if line[i - 1] != '\\':
state_closed = True
opener = None;
return state_closed
return False
def _check_for_command_completion(self, cmd):
"""Check for a delimiter at the end of user input.
The end of the user input is scanned for a legal delimiter.
If a delimiter is not found:
- Input is not send to onecmd()
- onecmd() is a method in Cmd which routes the user input to the
appropriate method. An empty string results in a no-op.
- Input is removed from history.
- Input is appended to partial_cmd
If a delimiter is found:
- The contents of partial_cmd are put in history, as they represent
a completed command.
- The contents are passed to the appropriate method for execution.
- partial_cmd is reset to an empty string.
"""
if self.readline:
current_history_len = self.readline.get_current_history_length()
# Input is incomplete, store the contents and do nothing.
if not self._cmd_ends_with_delim(cmd):
# The user input is incomplete, change the prompt to reflect this.
if not self.partial_cmd and cmd:
self.cached_prompt = self.prompt
self.prompt = '> '.rjust(len(self.cached_prompt))
# partial_cmd is already populated, add the current input after a newline.
if self.partial_cmd and cmd:
self.partial_cmd = "%s\n%s" % (self.partial_cmd, cmd)
else:
# If the input string is empty or partial_cmd is empty.
self.partial_cmd = "%s%s" % (self.partial_cmd, cmd)
# Remove the most recent item from history if:
# -- The current state of user input in incomplete.
# -- The most recent user input is not an empty string
if self.readline and current_history_len > 0 and cmd:
self.readline.remove_history_item(current_history_len - 1)
# An empty string results in a no-op. Look at emptyline()
return str()
elif self.partial_cmd: # input ends with a delimiter and partial_cmd is not empty
if cmd != ImpalaShell.CMD_DELIM:
completed_cmd = "%s\n%s" % (self.partial_cmd, cmd)
else:
completed_cmd = "%s%s" % (self.partial_cmd, cmd)
# Reset partial_cmd to an empty string
self.partial_cmd = str()
# Replace the most recent history item with the completed command.
completed_cmd = sqlparse.format(completed_cmd, strip_comments=True)
if self.readline and current_history_len > 0:
# Update the history item to replace newlines with spaces. This is needed so
# readline can properly restore the history (otherwise it interprets each newline
# as a separate history item).
self.readline.replace_history_item(current_history_len - 1,
completed_cmd.encode('utf-8').replace('\n', ' '))
# Revert the prompt to its earlier state
self.prompt = self.cached_prompt
else: # Input has a delimiter and partial_cmd is empty
completed_cmd = sqlparse.format(cmd, strip_comments=True)
# The comments have been parsed out, there is no need to retain the newlines.
# They can cause parse errors in sqlparse when unescaped quotes and delimiters
# come into play.
return completed_cmd.replace('\n', ' ')
def _signal_handler(self, signal, frame):
"""Handles query cancellation on a Ctrl+C event"""
if self.last_query_handle is None or self.query_handle_closed:
return
# Create a new connection to the impalad and cancel the query.
try:
self.query_handle_closed = True
print_to_stderr(ImpalaShell.CANCELLATION_MESSAGE)
new_imp_client = ImpalaClient(self.impalad)
new_imp_client.connect()
new_imp_client.cancel_query(self.last_query_handle, False)
self._validate_database()
except Exception, e:
print_to_stderr("Failed to reconnect and close: %s" % str(e))
# TODO: Add a retry here
def precmd(self, args):
args = self.sanitise_input(args)
if not args: return args
# Split args using sqlparse. If there are multiple queries present in user input,
# the length of the returned query list will be greater than one.
parsed_cmds = sqlparse.split(args)
if len(parsed_cmds) > 1:
# The last command needs a delimiter to be successfully executed.
parsed_cmds[-1] += ImpalaShell.CMD_DELIM
self.cmdqueue.extend(parsed_cmds)
# If cmdqueue is populated, then commands are executed from the cmdqueue, and user
# input is ignored. Send an empty string as the user input just to be safe.
return str()
return args.encode('utf-8')
def postcmd(self, status, args):
# status conveys to shell how the shell should continue execution
# should always be a CmdStatus
return status
def do_summary(self, args):
summary = None
try:
summary = self.imp_client.get_summary(self.last_query_handle)
except RPCException:
pass
if summary is None:
print_to_stderr("Could not retrieve summary for query.")
return CmdStatus.ERROR
if summary.nodes is None:
print_to_stderr("Summary not available")
return CmdStatus.SUCCESS
output = []
table = self.construct_table_header(["Operator", "#Hosts", "Avg Time", "Max Time",
"#Rows", "Est. #Rows", "Peak Mem",
"Est. Peak Mem", "Detail"])
self.imp_client.build_summary_table(summary, 0, False, 0, False, output)
formatter = PrettyOutputFormatter(table)
self.output_stream = OutputStream(formatter, filename=self.output_file)
self.output_stream.write(output)
def do_set(self, args):
"""Set or display query options.
Display query options:
Usage: SET
Set query options:
Usage: SET <option>=<value>
"""
# TODO: Expand set to allow for setting more than just query options.
if len(args) == 0:
print "Query options (defaults shown in []):"
self._print_options(self.imp_client.default_query_options, self.set_query_options);
return CmdStatus.SUCCESS
# Remove any extra spaces surrounding the tokens.
# Allows queries that have spaces around the = sign.
tokens = [arg.strip() for arg in args.split("=")]
if len(tokens) != 2:
print_to_stderr("Error: SET <option>=<value>")
return CmdStatus.ERROR
option_upper = tokens[0].upper()
if option_upper not in self.imp_client.default_query_options.keys():
print "Unknown query option: %s" % (tokens[0])
print "Available query options, with their values (defaults shown in []):"
self._print_options(self.imp_client.default_query_options, self.set_query_options)
return CmdStatus.ERROR
self.set_query_options[option_upper] = tokens[1]
self._print_if_verbose('%s set to %s' % (option_upper, tokens[1]))
def do_unset(self, args):
"""Unset a query option"""
if len(args.split()) != 1:
print 'Usage: unset <option>'
return CmdStatus.ERROR
option = args.upper()
if self.set_query_options.get(option):
print 'Unsetting %s' % option
del self.set_query_options[option]
else:
print "No option called %s is set" % args
def do_quit(self, args):
"""Quit the Impala shell"""
self._print_if_verbose("Goodbye " + self.user)
self.is_alive = False
return CmdStatus.ABORT
def do_exit(self, args):
"""Exit the impala shell"""
return self.do_quit(args)
def do_connect(self, args):
"""Connect to an Impalad instance:
Usage: connect, defaults to the fqdn of the localhost and port 21000
connect <hostname:port>
connect <hostname>, defaults to port 21000
"""
# Assume the user wants to connect to the local impalad if no connection string is
# specified. Conneting to a kerberized impalad requires an fqdn as the host name.
if not args: args = socket.getfqdn()
tokens = args.split(" ")
# validate the connection string.
host_port = [val for val in tokens[0].split(':') if val.strip()]
if (':' in tokens[0] and len(host_port) != 2):
print_to_stderr("Connection string must either be empty, or of the form "
"<hostname[:port]>")
return CmdStatus.ERROR
elif len(host_port) == 1:
host_port.append('21000')
self.impalad = tuple(host_port)
if self.imp_client: self.imp_client.close_connection()
self.imp_client = ImpalaClient(self.impalad, self.use_kerberos,
self.kerberos_service_name, self.use_ssl,
self.ca_cert, self.user, self.ldap_password,
self.use_ldap)
self._connect()
# If the connection fails and the Kerberos has not been enabled,
# check for a valid kerberos ticket and retry the connection
# with kerberos enabled.
if not self.imp_client.connected and not self.use_kerberos:
try:
if call(["klist", "-s"]) == 0:
print_to_stderr(("Kerberos ticket found in the credentials cache, retrying "
"the connection with a secure transport."))
self.imp_client.use_kerberos = True
self._connect()
except OSError, e:
pass
if self.imp_client.connected:
self._print_if_verbose('Connected to %s:%s' % self.impalad)
self._print_if_verbose('Server version: %s' % self.server_version)
self.prompt = "[%s:%s] > " % self.impalad
if self.refresh_after_connect:
self.cmdqueue.append('invalidate metadata' + ImpalaShell.CMD_DELIM)
print_to_stderr("Invalidating Metadata")
self._validate_database()
try:
self.imp_client.build_default_query_options_dict()
except RPCException, e:
print_to_stderr(e)
# In the case that we lost connection while a command was being entered,
# we may have a dangling command, clear partial_cmd
self.partial_cmd = str()
# Check if any of query options set by the user are inconsistent
# with the impalad being connected to
for set_option in self.set_query_options.keys():
if set_option not in set(self.imp_client.default_query_options.keys()):
print ('%s is not supported for the impalad being '
'connected to, ignoring.' % set_option)
del self.set_query_options[set_option]
def _connect(self):
try:
server_version = self.imp_client.connect()
if server_version:
self.server_version = server_version
except TApplicationException:
# We get a TApplicationException if the transport is valid,
# but the RPC does not exist.
print_to_stderr("Error: Unable to communicate with impalad service. This "
"service may not be an impalad instance. Check host:port and try again.")
self.imp_client.close_connection()
raise
except ImportError:
print_to_stderr(("Unable to import the python 'ssl' module. It is"
" required for an SSL-secured connection."))
sys.exit(1)
except socket.error as (code, e):
# if the socket was interrupted, reconnect the connection with the client
if code == errno.EINTR:
self._reconnect_cancellation
else:
print_to_stderr("Socket error %s: %s" % (code, e))
self.prompt = self.DISCONNECTED_PROMPT
except Exception, e:
print_to_stderr("Error connecting: %s, %s" % (type(e).__name__, e))
# If a connection to another impalad failed while already connected
# reset the prompt to disconnected.
self.server_version = self.UNKNOWN_SERVER_VERSION
self.prompt = self.DISCONNECTED_PROMPT
def _reconnect_cancellation(self):
self._connect()
self._validate_database()
def _validate_database(self):
if self.current_db:
self.current_db = self.current_db.strip('`')
self.cmdqueue.append(('use `%s`' % self.current_db) + ImpalaShell.CMD_DELIM)
def _print_if_verbose(self, message):
if self.verbose:
print_to_stderr(message)
def print_runtime_profile(self, profile, status=False):
if self.show_profiles or status:
if profile is not None:
print "Query Runtime Profile:\n" + profile
def _parse_table_name_arg(self, arg):
""" Parses an argument string and returns the result as a db name, table name combo.
If the table name was not fully qualified, the current database is returned as the db.
Otherwise, the table is split into db/table name parts and returned.
If an invalid format is provided, None is returned.
"""
if not arg: return
# If a multi-line argument, the name might be split across lines
arg = arg.replace('\n', '')
# Get the database and table name, using the current database if the table name
# wasn't fully qualified.
db_name, tbl_name = self.current_db, arg
if db_name is None:
db_name = ImpalaShell.DEFAULT_DB
db_table_name = arg.split('.')
if len(db_table_name) == 1:
return db_name, db_table_name[0]
if len(db_table_name) == 2:
return db_table_name
def do_alter(self, args):
query = self.imp_client.create_beeswax_query("alter %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_create(self, args):
query = self.imp_client.create_beeswax_query("create %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_drop(self, args):
query = self.imp_client.create_beeswax_query("drop %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_load(self, args):
query = self.imp_client.create_beeswax_query("load %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_profile(self, args):
"""Prints the runtime profile of the last INSERT or SELECT query executed."""
if len(args) > 0:
print_to_stderr("'profile' does not accept any arguments")
return CmdStatus.ERROR
elif self.last_query_handle is None:
print_to_stderr('No previous query available to profile')
return CmdStatus.ERROR
profile = self.imp_client.get_runtime_profile(self.last_query_handle)
return self.print_runtime_profile(profile, True)
def do_select(self, args):
"""Executes a SELECT... query, fetching all rows"""
query = self.imp_client.create_beeswax_query("select %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def _format_outputstream(self):
column_names = self.imp_client.get_column_names(self.last_query_handle)
if self.write_delimited:
formatter = DelimitedOutputFormatter(field_delim=self.output_delimiter)
self.output_stream = OutputStream(formatter, filename=self.output_file)
# print the column names
if self.print_header:
self.output_stream.write([column_names])
else:
prettytable = self.construct_table_header(column_names)
formatter = PrettyOutputFormatter(prettytable)
self.output_stream = OutputStream(formatter, filename=self.output_file)
def _execute_stmt(self, query, is_insert=False):
""" The logic of executing any query statement
The client executes the query and the query_handle is returned immediately,
even as the client waits for the query to finish executing.
If the query was not an insert, the results are fetched from the client
as they are streamed in, through the use of a generator.
The execution time is printed and the query is closed if it hasn't been already
"""
try:
self._print_if_verbose("Query: %s" % (query.query,))
start_time = time.time()
self.last_query_handle = self.imp_client.execute_query(query)
self.query_handle_closed = False
wait_to_finish = self.imp_client.wait_to_finish(self.last_query_handle)
# retrieve the error log
warning_log = self.imp_client.get_warning_log(self.last_query_handle)
if is_insert:
num_rows = self.imp_client.close_insert(self.last_query_handle)
else:
# impalad does not support the fetching of metadata for certain types of queries.
if not self.imp_client.expect_result_metadata(query.query):
self.query_handle_closed = True
return CmdStatus.SUCCESS
self._format_outputstream()
# fetch returns a generator
rows_fetched = self.imp_client.fetch(self.last_query_handle)
num_rows = 0
for rows in rows_fetched:
self.output_stream.write(rows)
num_rows += len(rows)
end_time = time.time()
if warning_log:
self._print_if_verbose(warning_log)
# print insert when is_insert is true (which is 1)
# print fetch when is_insert is false (which is 0)
verb = ["Fetch", "Insert"][is_insert]
self._print_if_verbose("%sed %d row(s) in %2.2fs" % (verb, num_rows,
end_time - start_time))
if not is_insert:
self.imp_client.close_query(self.last_query_handle, self.query_handle_closed)
self.query_handle_closed = True
profile = self.imp_client.get_runtime_profile(self.last_query_handle)
self.print_runtime_profile(profile)
return CmdStatus.SUCCESS
except RPCException, e:
# could not complete the rpc successfully
# suppress error if reason is cancellation
if self._no_cancellation_error(e):
print_to_stderr(e)
except QueryStateException, e:
# an exception occurred while executing the query
if self._no_cancellation_error(e):
self.imp_client.close_query(self.last_query_handle, self.query_handle_closed)
print_to_stderr(e)
except DisconnectedException, e:
# the client has lost the connection
print_to_stderr(e)
self.imp_client.connected = False
self.prompt = ImpalaShell.DISCONNECTED_PROMPT
except socket.error as (code, e):
# if the socket was interrupted, reconnect the connection with the client
if code == errno.EINTR:
print ImpalaShell.CANCELLATION_MESSAGE
self._reconnect_cancellation()
else:
print_to_stderr("Socket error %s: %s" % (code, e))
self.prompt = self.DISCONNECTED_PROMPT
self.imp_client.connected = False
except Exception, u:
# if the exception is unknown, there was possibly an issue with the connection
# set the shell as disconnected
print_to_stderr('Unknown Exception : %s' % (u,))
self.imp_client.connected = False
self.prompt = ImpalaShell.DISCONNECTED_PROMPT
return CmdStatus.ERROR
def _no_cancellation_error(self, error):
if ImpalaShell.CANCELLATION_ERROR not in str(error):
return True
def construct_table_header(self, column_names):
""" Constructs the table header for a given query handle.
Should be called after the query has finished and before data is fetched.
All data is left aligned.
"""
table = ImpalaPrettyTable()
for column in column_names:
# Column names may be encoded as utf-8
table.add_column(column.decode('utf-8', 'ignore'), [])
table.align = "l"
return table
def do_values(self, args):
"""Executes a VALUES(...) query, fetching all rows"""
query = self.imp_client.create_beeswax_query("values %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_with(self, args):
"""Executes a query with a WITH clause, fetching all rows"""
query = self.imp_client.create_beeswax_query("with %s" % args,
self.set_query_options)
# Set posix=True and add "'" to escaped quotes
# to deal with escaped quotes in string literals
lexer = shlex.shlex(query.query.lstrip(), posix=True)
lexer.escapedquotes += "'"
# Because the WITH clause may precede INSERT or SELECT queries,
# just checking the first token is insufficient.
is_insert = False
tokens = list(lexer)
if filter(self.INSERT_REGEX.match, tokens): is_insert = True
return self._execute_stmt(query, is_insert=is_insert)
def do_use(self, args):
"""Executes a USE... query"""
query = self.imp_client.create_beeswax_query("use %s" % args,
self.set_query_options)
if self._execute_stmt(query) is CmdStatus.SUCCESS:
self.current_db = args
else:
return CmdStatus.ERROR
def do_show(self, args):
"""Executes a SHOW... query, fetching all rows"""
query = self.imp_client.create_beeswax_query("show %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_describe(self, args):
"""Executes a DESCRIBE... query, fetching all rows"""
query = self.imp_client.create_beeswax_query("describe %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_desc(self, args):
return self.do_describe(args)
def do_insert(self, args):
"""Executes an INSERT query"""
query = self.imp_client.create_beeswax_query("insert %s" % args,
self.set_query_options)
return self._execute_stmt(query, is_insert=True)
def do_explain(self, args):
"""Explain the query execution plan"""
query = self.imp_client.create_beeswax_query("explain %s" % args,
self.set_query_options)
return self._execute_stmt(query)
def do_history(self, args):
"""Display command history"""
# Deal with readline peculiarity. When history does not exists,
# readline returns 1 as the history length and stores 'None' at index 0.
if self.readline and self.readline.get_current_history_length() > 0:
for index in xrange(1, self.readline.get_current_history_length() + 1):
cmd = self.readline.get_history_item(index)
print_to_stderr('[%d]: %s' % (index, cmd))
else:
print_to_stderr("The readline module was either not found or disabled. Command "
"history will not be collected.")
def preloop(self):
"""Load the history file if it exists"""
if self.readline:
# The history file is created when the Impala shell is invoked and commands are
# issued. In the first invocation of the shell, the history file will not exist.
# Clearly, this is not an error, return.
if not os.path.exists(self.history_file): return
try:
self.readline.read_history_file(self.history_file)
except IOError, i:
msg = "Unable to load command history (disabling history collection): %s" % i
print_to_stderr(msg)
# This history file exists but is not readable, disable readline.
self._disable_readline()
def postloop(self):
"""Save session commands in history."""
if self.readline:
try:
self.readline.write_history_file(self.history_file)
except IOError, i:
msg = "Unable to save command history (disabling history collection): %s" % i
print_to_stderr(msg)
# The history file is not writable, disable readline.
self._disable_readline()
def default(self, args):
query = self.imp_client.create_beeswax_query(args, self.set_query_options)
return self._execute_stmt(query)
def emptyline(self):
"""If an empty line is entered, do nothing"""
def do_version(self, args):
"""Prints the Impala build version"""
print_to_stderr("Shell version: %s" % VERSION_STRING)
print_to_stderr("Server version: %s" % self.server_version)
def completenames(self, text, *ignored):
"""Make tab completion of commands case agnostic
Override the superclass's completenames() method to support tab completion for
upper case and mixed case commands.
"""
cmd_names = [cmd for cmd in self.commands if cmd.startswith(text.lower())]
# If the user input is upper case, return commands in upper case.
if text.isupper(): return [cmd_names.upper() for cmd_names in cmd_names]
# If the user input is lower case or mixed case, return lower case commands.
return cmd_names
WELCOME_STRING = """Welcome to the Impala shell. Press TAB twice to see a list of \
available commands.
Copyright (c) 2012 Cloudera, Inc. All rights reserved.
(Shell build version: %s)""" % VERSION_STRING
def print_to_stderr(message):
print >> sys.stderr, message
def parse_query_text(query_text, utf8_encode_policy='strict'):
"""Parse query file text, by stripping comments and encoding into utf-8"""
return [strip_comments_from_query(q).encode('utf-8', utf8_encode_policy)
for q in sqlparse.split(query_text)]
def strip_comments_from_query(query):
"""Strip comments from an individual query """
# We only use the strip_comments filter, using other filters can lead to a significant
# performance hit if the query is very large.
return sqlparse.format(query, strip_comments=True)
def execute_queries_non_interactive_mode(options):
"""Run queries in non-interactive mode."""
queries = []
if options.query_file:
try:
query_file_handle = open(options.query_file, 'r')
queries = parse_query_text(query_file_handle.read())
query_file_handle.close()
except Exception, e:
print_to_stderr('Error: %s' % e)
sys.exit(1)
elif options.query:
queries = parse_query_text(options.query)
shell = ImpalaShell(options)
# The impalad was specified on the command line and the connection failed.
# Return with an error, no need to process the query.
if options.impalad and shell.imp_client.connected == False:
sys.exit(1)
queries = shell.cmdqueue + queries
# Deal with case.
sanitized_queries = []
for query in queries:
sanitized_queries.append(shell.sanitise_input(query, interactive=False))
for query in sanitized_queries:
# check if an error was encountered
if shell.onecmd(query) is CmdStatus.ERROR:
print_to_stderr('Could not execute command: %s' % query)
if not options.ignore_query_failure:
sys.exit(1)
if __name__ == "__main__":
# pass defaults into option parser
parser = get_option_parser(impala_shell_defaults)
options, args = parser.parse_args()
# use path to file specified by user in config_file option
user_config = os.path.expanduser(options.config_file);
# by default, use the .impalarc in the home directory
config_to_load = impala_shell_defaults.get("config_file")
# verify user_config, if found
if os.path.isfile(user_config) and user_config != config_to_load:
if options.verbose:
print_to_stderr("Loading in options from config file: %s \n" % user_config)
# Command line overrides loading ~/.impalarc
config_to_load = user_config
elif user_config != config_to_load:
print_to_stderr('%s not found.\n' % user_config)
sys.exit(1)
# default options loaded in from impala_shell_config_defaults.py
# options defaults overwritten by those in config file
try:
impala_shell_defaults.update(get_config_from_file(config_to_load))
except Exception, e:
msg = "Unable to read configuration file correctly. Check formatting: %s\n" % e
print_to_stderr(msg)
sys.exit(1)
parser = get_option_parser(impala_shell_defaults)
options, args = parser.parse_args()
# Arguments that could not be parsed are stored in args. Print an error and exit.
if len(args) > 0:
print_to_stderr('Error, could not parse arguments "%s"' % (' ').join(args))
parser.print_help()
sys.exit(1)
if options.version:
print VERSION_STRING
sys.exit(0)
if options.use_kerberos and options.use_ldap:
print_to_stderr("Please specify at most one authentication mechanism (-k or -l)")
sys.exit(1)
if options.use_kerberos:
print_to_stderr("Starting Impala Shell using Kerberos authentication")
print_to_stderr("Using service name '%s'" % options.kerberos_service_name)
# Check if the user has a ticket in the credentials cache
try:
if call(['klist', '-s']) != 0:
print_to_stderr(("-k requires a valid kerberos ticket but no valid kerberos "
"ticket found."))
sys.exit(1)
except OSError, e:
print_to_stderr('klist not found on the system, install kerberos clients')
sys.exit(1)
elif options.use_ldap:
print_to_stderr("Starting Impala Shell using LDAP-based authentication")
else:
print_to_stderr("Starting Impala Shell without Kerberos authentication")
if options.ssl:
if options.ca_cert is None:
print_to_stderr("SSL is enabled. Impala server certificates will NOT be verified"\
" (set --ca_cert to change)")
else:
print_to_stderr("SSL is enabled")
if options.output_file:
try:
# Make sure the given file can be opened for writing. This will also clear the file
# if successful.
open(options.output_file, 'wb')
except IOError, e:
print_to_stderr('Error opening output file for writing: %s' % e)
sys.exit(1)
if options.query or options.query_file:
execute_queries_non_interactive_mode(options)
sys.exit(0)
intro = WELCOME_STRING
shell = ImpalaShell(options)
while shell.is_alive:
try:
shell.cmdloop(intro)
except KeyboardInterrupt:
intro = '\n'
# a last measure agaisnt any exceptions thrown by an rpc
# not caught in the shell
except socket.error as (code, e):
# if the socket was interrupted, reconnect the connection with the client
if code == errno.EINTR:
print shell.CANCELLATION_MESSAGE
shell._reconnect_cancellation()
else:
print_to_stderr("Socket error %s: %s" % (code, e))
shell.imp_client.connected = False
shell.prompt = shell.DISCONNECTED_PROMPT
except DisconnectedException, e:
# the client has lost the connection
print_to_stderr(e)
shell.imp_client.connected = False
shell.prompt = shell.DISCONNECTED_PROMPT
except QueryStateException, e:
# an exception occurred while executing the query
if shell._no_cancellation_error(e):
shell.imp_client.close_query(shell.last_query_handle,
shell.query_handle_closed)
print_to_stderr(e)
except RPCException, e:
# could not complete the rpc successfully
# suppress error if reason is cancellation
if shell._no_cancellation_error(e):
print_to_stderr(e)
finally:
intro = ''
| |
'''
Created on 16/11/2010
@author: faga
'''
from django.dispatch import dispatcher
from django.db import models
from django.core.exceptions import ImproperlyConfigured
from django.contrib import admin
import copy
import re
import types
try:
import settings_audit
except ImportError:
settings_audit = None
value_error_re = re.compile("^.+'(.+)'$")
class AuditTrail(object):
def __init__(self, show_in_admin=False, save_change_type=True, audit_deletes=True,
track_fields=None):
self.opts = {}
self.opts['show_in_admin'] = show_in_admin
self.opts['save_change_type'] = save_change_type
self.opts['audit_deletes'] = audit_deletes
if track_fields:
self.opts['track_fields'] = track_fields
else:
self.opts['track_fields'] = []
def contribute_to_class(self, cls, name):
# This should only get added once the class is otherwise complete
def _contribute(sender, **kwargs):
model = create_audit_model(sender, **self.opts)
if self.opts['show_in_admin']:
# Enable admin integration
# If ModelAdmin needs options or different base class, find
# some way to make the commented code work
# cls_admin_name = cls.__name__ + 'Admin'
# clsAdmin = type(cls_admin_name, (admin.ModelAdmin,),{})
# admin.site.register(cls, clsAdmin)
# Otherwise, register class with default ModelAdmin
admin.site.register(model)
descriptor = AuditTrailDescriptor(model._default_manager, sender._meta.pk.attname)
setattr(sender, name, descriptor)
def _audit_track(instance, field_arr, **kwargs):
field_name = field_arr[0]
try:
return getattr(instance, field_name)
except:
if len(field_arr) > 2:
if callable(field_arr[2]):
fn = field_arr[2]
return fn(instance)
else:
return field_arr[2]
def _audit(sender, instance, created, **kwargs):
# Write model changes to the audit model.
# instance is the current (non-audit) model.
kwargs = {}
for field in sender._meta.fields:
#kwargs[field.attname] = getattr(instance, field.attname)
kwargs[field.name] = getattr(instance, field.name)
if self.opts['save_change_type']:
if created:
kwargs['_audit_change_type'] = 'I'
else:
kwargs['_audit_change_type'] = 'U'
for field_arr in model._audit_track:
kwargs[field_arr[0]] = _audit_track(instance, field_arr)
model._default_manager.create(**kwargs)
## Uncomment this line for pre r8223 Django builds
#dispatcher.connect(_audit, signal=models.signals.post_save, sender=cls, weak=False)
## Comment this line for pre r8223 Django builds
models.signals.post_save.connect(_audit, sender=cls, weak=False)
if self.opts['audit_deletes']:
def _audit_delete(sender, instance, **kwargs):
# Write model changes to the audit model
kwargs = {}
for field in sender._meta.fields:
kwargs[field.name] = getattr(instance, field.name)
if self.opts['save_change_type']:
kwargs['_audit_change_type'] = 'D'
for field_arr in model._audit_track:
kwargs[field_arr[0]] = _audit_track(instance, field_arr)
model._default_manager.create(**kwargs)
## Uncomment this line for pre r8223 Django builds
#dispatcher.connect(_audit_delete, signal=models.signals.pre_delete, sender=cls, weak=False)
## Comment this line for pre r8223 Django builds
models.signals.pre_delete.connect(_audit_delete, sender=cls, weak=False)
## Uncomment this line for pre r8223 Django builds
#dispatcher.connect(_contribute, signal=models.signals.class_prepared, sender=cls, weak=False)
## Comment this line for pre r8223 Django builds
models.signals.class_prepared.connect(_contribute, sender=cls, weak=False)
class AuditTrailDescriptor(object):
def __init__(self, manager, pk_attribute):
self.manager = manager
self.pk_attribute = pk_attribute
def __get__(self, instance=None, owner=None):
if instance == None:
#raise AttributeError, "Audit trail is only accessible via %s instances." % type.__name__
return create_audit_manager_class(self.manager)
else:
return create_audit_manager_with_pk(self.manager, self.pk_attribute, instance._get_pk_val())
def __set__(self, instance, value):
raise AttributeError, "Audit trail may not be edited in this manner."
def create_audit_manager_with_pk(manager, pk_attribute, pk):
"""Create an audit trail manager based on the current object"""
class AuditTrailWithPkManager(manager.__class__):
def __init__(self, *arg, **kw):
super(AuditTrailWithPkManager, self).__init__(*arg, **kw)
self.model = manager.model
def get_query_set(self):
qs = super(AuditTrailWithPkManager, self).get_query_set().filter(**{pk_attribute: pk})
if self._db is not None:
qs = qs.using(self._db)
return qs
return AuditTrailWithPkManager()
def create_audit_manager_class(manager):
"""Create an audit trail manager based on the current object"""
class AuditTrailManager(manager.__class__):
def __init__(self, *arg, **kw):
super(AuditTrailManager, self).__init__(*arg, **kw)
self.model = manager.model
return AuditTrailManager()
def create_audit_model(cls, **kwargs):
"""Create an audit model for the specific class"""
name = cls.__name__ + 'Audit'
class Meta:
db_table = '%s_audit' % cls._meta.db_table
app_label = cls._meta.app_label
verbose_name_plural = '%s audit trail' % cls._meta.verbose_name
ordering = ['-_audit_timestamp']
# Set up a dictionary to simulate declarations within a class
attrs = {
'__module__': cls.__module__,
'Meta': Meta,
'_audit_id': models.AutoField(primary_key=True),
'_audit_timestamp': models.DateTimeField(auto_now_add=True, db_index=True),
'_audit__str__': cls.__str__.im_func,
'__str__': lambda self: '%s as of %s' % (self._audit__str__(), self._audit_timestamp),
'_audit_track': _track_fields(track_fields=kwargs['track_fields'], unprocessed=True)
}
if 'save_change_type' in kwargs and kwargs['save_change_type']:
attrs['_audit_change_type'] = models.CharField(max_length=1)
# Copy the fields from the existing model to the audit model
for field in cls._meta.fields:
#if field.attname in attrs:
if field.name in attrs:
raise ImproperlyConfigured, "%s cannot use %s as it is needed by AuditTrail." % (cls.__name__, field.attname)
if isinstance(field, models.AutoField):
# Audit models have a separate AutoField
attrs[field.name] = models.IntegerField(db_index=True, editable=False)
else:
attrs[field.name] = copy.copy(field)
# If 'unique' is in there, we need to remove it, otherwise the index
# is created and multiple audit entries for one item fail.
attrs[field.name]._unique = False
# If a model has primary_key = True, a second primary key would be
# created in the audit model. Set primary_key to false.
attrs[field.name].primary_key = False
# Rebuild and replace the 'rel' object to avoid foreign key clashes.
# Borrowed from the Basie project - please check if adding this is allowed by the license.
if isinstance(field, models.ForeignKey):
rel = copy.copy(field.rel)
rel.related_name = '_audit_' + field.related_query_name()
attrs[field.name].rel = rel
for track_field in _track_fields(kwargs['track_fields']):
if track_field['name'] in attrs:
raise NameError('Field named "%s" already exists in audit version of %s' % (track_field['name'], cls.__name__))
attrs[track_field['name']] = copy.copy(track_field['field'])
return type(name, (models.Model,), attrs)
def _build_track_field(track_item):
track = {}
track['name'] = track_item[0]
if isinstance(track_item[1], models.Field):
track['field'] = track_item[1]
elif issubclass(track_item[1], models.Model):
track['field'] = models.ForeignKey(track_item[1])
else:
raise TypeError('Track fields only support items that are Fields or Models.')
return track
def _track_fields(track_fields=None, unprocessed=False):
# Add in the fields from the Audit class "track" attribute.
tracks_found = []
if settings_audit:
global_track_fields = getattr(settings_audit, 'GLOBAL_TRACK_FIELDS', [])
for track_item in global_track_fields:
if unprocessed:
tracks_found.append(track_item)
else:
tracks_found.append(_build_track_field(track_item))
if track_fields:
for track_item in track_fields:
if unprocessed:
tracks_found.append(track_item)
else:
tracks_found.append(_build_track_field(track_item))
return tracks_found
| |
import re
from copy import copy
from datetime import datetime
from django.conf import settings
from django.contrib.sites.models import Site
from django.core import mail
from django.core.cache import cache
import bleach
import mock
import waffle
from nose.tools import eq_
from test_utils import RequestFactory
from kitsune.sumo.tests import TestCase
from kitsune.users.tests import add_permission, user
from kitsune.wiki.models import Revision, Document
from kitsune.wiki.tasks import (
send_reviewed_notification, rebuild_kb, schedule_rebuild_kb,
_rebuild_kb_chunk, render_document_cascade)
from kitsune.wiki.tests import TestCaseBase, revision
from kitsune.wiki.tests.test_parser import doc_rev_parser
REVIEWED_EMAIL_CONTENT = """Your revision has been reviewed.
%s has approved your revision to the document %s.
Message from the reviewer:
%s
To view the history of this document, click the following link, or paste \
it into your browser's location bar:
https://testserver/en-US/kb/%s/history
"""
class RebuildTestCase(TestCase):
rf = RequestFactory()
ALWAYS_EAGER = settings.CELERY_ALWAYS_EAGER
def setUp(self):
# create some random revisions.
revision(save=True)
revision(is_approved=True, save=True)
revision(is_approved=True, save=True)
revision(is_approved=True, save=True)
revision(is_approved=True, save=True)
# TODO: fix this crap
self.old_settings = copy(settings._wrapped.__dict__)
settings.CELERY_ALWAYS_EAGER = True
def tearDown(self):
cache.delete(settings.WIKI_REBUILD_TOKEN)
settings._wrapped.__dict__ = self.old_settings
settings.CELERY_ALWAYS_EAGER = self.ALWAYS_EAGER
@mock.patch.object(rebuild_kb, 'delay')
@mock.patch.object(waffle, 'switch_is_active')
def test_eager_queue(self, switch_is_active, delay):
switch_is_active.return_value = True
schedule_rebuild_kb()
assert not cache.get(settings.WIKI_REBUILD_TOKEN)
assert not delay.called
@mock.patch.object(rebuild_kb, 'delay')
@mock.patch.object(waffle, 'switch_is_active')
def test_task_queue(self, switch_is_active, delay):
switch_is_active.return_value = True
settings.CELERY_ALWAYS_EAGER = False
schedule_rebuild_kb()
assert cache.get(settings.WIKI_REBUILD_TOKEN)
assert delay.called
@mock.patch.object(rebuild_kb, 'delay')
@mock.patch.object(waffle, 'switch_is_active')
def test_already_queued(self, switch_is_active, delay):
switch_is_active.return_value = True
cache.set(settings.WIKI_REBUILD_TOKEN, True)
schedule_rebuild_kb()
assert cache.get(settings.WIKI_REBUILD_TOKEN)
assert not delay.called
@mock.patch.object(rebuild_kb, 'delay')
@mock.patch.object(cache, 'get')
@mock.patch.object(waffle, 'switch_is_active')
def test_dont_queue(self, switch_is_active, get, delay):
switch_is_active.return_value = False
schedule_rebuild_kb()
assert not get.called
assert not delay.called
@mock.patch.object(_rebuild_kb_chunk, 'apply_async')
def test_rebuild_chunk(self, apply_async):
cache.set(settings.WIKI_REBUILD_TOKEN, True)
rebuild_kb()
assert not cache.get(settings.WIKI_REBUILD_TOKEN)
assert 'args' in apply_async.call_args[1]
# There should be 4 documents with an approved revision
eq_(4, len(apply_async.call_args[1]['args'][0]))
class ReviewMailTestCase(TestCaseBase):
"""Test that the review mail gets sent."""
def setUp(self):
self.user = user(save=True)
add_permission(self.user, Revision, 'review_revision')
def _approve_and_send(self, revision, reviewer, message):
revision.reviewer = reviewer
revision.reviewed = datetime.now()
revision.is_approved = True
revision.save()
send_reviewed_notification(revision, revision.document, message)
@mock.patch.object(Site.objects, 'get_current')
def test_reviewed_notification(self, get_current):
get_current.return_value.domain = 'testserver'
rev = revision()
doc = rev.document
msg = 'great work!'
self._approve_and_send(rev, self.user, msg)
# Two emails will be sent, one each for the reviewer and the reviewed.
eq_(2, len(mail.outbox))
eq_('Your revision has been approved: %s' % doc.title,
mail.outbox[0].subject)
eq_([rev.creator.email], mail.outbox[0].to)
eq_(REVIEWED_EMAIL_CONTENT % (
self.user.username, doc.title, msg, doc.slug), mail.outbox[0].body)
@mock.patch.object(Site.objects, 'get_current')
def test_reviewed_by_creator_no_notification(self, get_current):
get_current.return_value.domain = 'testserver'
rev = revision()
msg = "great work!"
self._approve_and_send(rev, rev.creator, msg)
# Verify no email was sent
eq_(0, len(mail.outbox))
@mock.patch.object(Site.objects, 'get_current')
def test_unicode_notifications(self, get_current):
get_current.return_value.domain = 'testserver'
rev = revision()
doc = rev.document
doc.title = u'Foo \xe8 incode'
msg = 'foo'
self._approve_and_send(rev, self.user, msg)
# Two emails will be sent, one each for the reviewer and the reviewed.
eq_(2, len(mail.outbox))
eq_('Your revision has been approved: %s' % doc.title,
mail.outbox[0].subject)
@mock.patch.object(Site.objects, 'get_current')
def test_escaping(self, get_current):
get_current.return_value.domain = 'testserver'
rev = revision()
doc = rev.document
doc.title = '"All about quotes"'
msg = 'foo & "bar"'
self._approve_and_send(rev, self.user, msg)
# Two emails will be sent, one each for the reviewer and the reviewed.
eq_(2, len(mail.outbox))
eq_('Your revision has been approved: %s' % doc.title,
mail.outbox[0].subject)
assert '"' not in mail.outbox[0].body
assert '"All about quotes"' in mail.outbox[0].body
assert 'foo & "bar"' in mail.outbox[0].body
class TestDocumentRenderCascades(TestCaseBase):
def _clean(self, d):
"""
Get a clean and normalized version of a documents html.
This grabs uncached copies from the DB, because the in memory
objects used in the test don't get updated during the cascade.
"""
html = Document.uncached.get(slug=d.slug).html
return re.sub(r'\s+', ' ', bleach.clean(html, strip=True)).strip()
def test_cascade(self):
d1, _, _ = doc_rev_parser('one ', title='Template:D1')
d2, _, _ = doc_rev_parser('[[T:D1]] two', title='Template:D2')
d3, _, _ = doc_rev_parser('[[T:D1]] [[T:D2]] three', title='D3')
eq_(self._clean(d3), u'one one two three')
revision(document=d1, content='ONE', is_approved=True, save=True)
render_document_cascade(d1)
eq_(self._clean(d1), u'ONE')
eq_(self._clean(d2), u'ONE two')
eq_(self._clean(d3), u'ONE ONE two three')
| |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
import numpy as np
from common import with_seed, assertRaises
from mxnet.contrib.svrg_optimization.svrg_module import SVRGModule
from mxnet.test_utils import *
import pytest
def setup():
train_data = np.random.randint(1, 5, [1000, 2])
weights = np.array([1.0, 2.0])
train_label = train_data.dot(weights)
di = mx.io.NDArrayIter(train_data, train_label, batch_size=32, shuffle=True, label_name='lin_reg_label')
X = mx.sym.Variable('data')
Y = mx.symbol.Variable('lin_reg_label')
fully_connected_layer = mx.sym.FullyConnected(data=X, name='fc1', num_hidden=1)
lro = mx.sym.LinearRegressionOutput(data=fully_connected_layer, label=Y, name="lro")
mod = SVRGModule(
symbol=lro,
data_names=['data'],
label_names=['lin_reg_label'], update_freq=2)
mod.bind(data_shapes=di.provide_data, label_shapes=di.provide_label)
mod.init_params(initializer=mx.init.Uniform(0.01), allow_missing=False, force_init=False, allow_extra=False)
return di, mod
def test_bind_module():
_, mod = setup()
assert mod.binded == True
assert mod._mod_aux.binded == True
def test_module_init():
_, mod = setup()
assert mod._mod_aux is not None
def test_module_initializer():
def regression_model(m):
x = mx.symbol.var("data", stype='csr')
v = mx.symbol.var("v", shape=(m, 1), init=mx.init.Uniform(scale=.1),
stype='row_sparse')
model = mx.symbol.dot(lhs=x, rhs=v)
y = mx.symbol.Variable("label")
model = mx.symbol.LinearRegressionOutput(data=model, label=y, name="out")
return model
#shape of the data
n, m = 128, 100
model = regression_model(m)
data = mx.nd.zeros(shape=(n, m), stype='csr')
label = mx.nd.zeros((n, 1))
iterator = mx.io.NDArrayIter(data=data, label={'label': label},
batch_size=n, last_batch_handle='discard')
# create module
mod = SVRGModule(symbol=model, data_names=['data'], label_names=['label'], update_freq=2)
mod.bind(data_shapes=iterator.provide_data, label_shapes=iterator.provide_label)
mod.init_params()
v = mod._arg_params['v']
assert v.stype == 'row_sparse'
assert np.sum(v.asnumpy()) != 0
def test_module_bind():
x = mx.sym.Variable("data")
net = mx.sym.FullyConnected(x, num_hidden=1)
mod = SVRGModule(symbol=net, data_names=['data'], label_names=None, update_freq=2)
assertRaises(TypeError, mod.bind, data_shapes=['data', mx.nd.zeros(shape=(2, 1))])
mod.bind(data_shapes=[('data', (2, 1))])
assert mod.binded == True
assert mod._mod_aux.binded == True
@pytest.mark.skip(reason="Flaky test https://gitsvrhub.com/apache/incubator-mxnet/issues/12510")
@with_seed()
def test_module_save_load(tmpdir):
import os
x = mx.sym.Variable("data")
y = mx.sym.Variable("softmax_label")
net = mx.sym.FullyConnected(x, y, num_hidden=1)
mod = SVRGModule(symbol=net, data_names=['data'], label_names=['softmax_label'], update_freq=2)
mod.bind(data_shapes=[('data', (1, 1))])
mod.init_params()
mod.init_optimizer(optimizer='sgd', optimizer_params={'learning_rate': 0.1})
mod.update()
tmp = str(tmpdir)
tmp_file = os.path.join(tmp, 'svrg_test_output')
mod.save_checkpoint(tmp_file, 0, save_optimizer_states=True)
mod2 = SVRGModule.load(tmp_file, 0, load_optimizer_states=True, data_names=('data', ))
mod2.bind(data_shapes=[('data', (1, 1))])
mod2.init_optimizer(optimizer_params={'learning_rate': 0.1})
assert mod._symbol.tojson() == mod2._symbol.tojson()
# Multi-device
mod3 = SVRGModule(symbol=net, data_names=['data'], label_names=['softmax_label'], update_freq=3,
context=[mx.cpu(0), mx.cpu(1)])
mod3.bind(data_shapes=[('data', (10, 10))])
mod3.init_params()
mod3.init_optimizer(optimizer_params={'learning_rate': 1.0})
mod3.update()
mod3.save_checkpoint(tmp_file, 0, save_optimizer_states=True)
mod4 = SVRGModule.load(tmp_file, 0, load_optimizer_states=True, data_names=('data', ))
mod4.bind(data_shapes=[('data', (10, 10))])
mod4.init_optimizer(optimizer_params={'learning_rate': 1.0})
assert mod3._symbol.tojson() == mod4._symbol.tojson()
@pytest.mark.skip(reason="Flaky test https://github.com/apache/incubator-mxnet/issues/12510")
@with_seed()
def test_svrgmodule_reshape():
data = mx.sym.Variable("data")
sym = mx.sym.FullyConnected(data=data, num_hidden=4, name='fc')
dshape=(3, 4)
mod = SVRGModule(sym, data_names=["data"], label_names=None, context=[mx.cpu(0), mx.cpu(1)], update_freq=2)
mod.bind(data_shapes=[('data', dshape)])
mod.init_params()
mod._mod_aux.init_params()
mod.init_optimizer(optimizer_params={"learning_rate": 1.0})
data_batch = mx.io.DataBatch(data=[mx.nd.ones(dshape)], label=None)
mod.forward(data_batch)
mod.backward([mx.nd.ones(dshape)])
mod.update()
assert mod.get_outputs()[0].shape == dshape
dshape = (2, 4)
mod.reshape(data_shapes=[('data', dshape)])
mod.forward(mx.io.DataBatch(data=[mx.nd.ones(dshape)],
label=None))
mod.backward([mx.nd.ones(dshape)])
mod.update()
assert mod.get_outputs()[0].shape == dshape
@pytest.mark.skip(reason="Flaky test https://github.com/apache/incubator-mxnet/issues/12510")
@with_seed()
def test_update_full_grad():
def create_network():
train_data = np.random.randint(1, 5, [10, 2])
weights = np.array([1.0, 2.0])
train_label = train_data.dot(weights)
di = mx.io.NDArrayIter(train_data, train_label, batch_size=5, shuffle=True, label_name='lin_reg_label')
X = mx.sym.Variable('data')
Y = mx.symbol.Variable('lin_reg_label')
fully_connected_layer = mx.sym.FullyConnected(data=X, name='fc1', num_hidden=1)
lro = mx.sym.LinearRegressionOutput(data=fully_connected_layer, label=Y, name="lro")
mod = SVRGModule(
symbol=lro,
data_names=['data'],
label_names=['lin_reg_label'], update_freq=2)
mod.bind(data_shapes=di.provide_data, label_shapes=di.provide_label)
mod.init_params(initializer=mx.init.One(), allow_missing=False, force_init=False, allow_extra=False)
mod.init_optimizer(kvstore='local', optimizer='sgd', optimizer_params=(('learning_rate', 0.01),),
force_init=False)
return di, mod
di, svrg_mod = create_network()
# Calculates the average of full gradients over number batches
full_grads_weights = mx.nd.zeros(shape=svrg_mod.get_params()[0]['fc1_weight'].shape)
arg, aux = svrg_mod.get_params()
svrg_mod._mod_aux.set_params(arg_params=arg, aux_params=aux)
num_batch = 2
for batch in di:
svrg_mod.forward(batch)
svrg_mod.backward()
full_grads_weights = mx.nd.broadcast_add(svrg_mod._exec_group.grad_arrays[0][0], full_grads_weights, axis=0)
full_grads_weights /= num_batch
di.reset()
svrg_mod.update_full_grads(di)
assert same(full_grads_weights, svrg_mod._param_dict[0]['fc1_weight'])
@pytest.mark.skip(reason="Flaky test https://github.com/apache/incubator-mxnet/issues/12510")
@with_seed()
def test_svrg_with_sgd():
def create_module_with_sgd():
train_data = np.random.randint(1, 5, [100, 2])
weights = np.array([1.0, 2.0])
train_label = train_data.dot(weights)
di = mx.io.NDArrayIter(train_data, train_label, batch_size=10, shuffle=True, label_name='lin_reg_label')
X = mx.sym.Variable('data')
Y = mx.symbol.Variable('lin_reg_label')
fully_connected_layer = mx.sym.FullyConnected(data=X, name='fc1', num_hidden=1)
lro = mx.sym.LinearRegressionOutput(data=fully_connected_layer, label=Y, name="lro")
reg_mod = mx.mod.Module(
symbol=lro,
data_names=['data'],
label_names=['lin_reg_label'])
reg_mod.bind(data_shapes=di.provide_data, label_shapes=di.provide_label)
reg_mod.init_params(initializer=mx.init.One(), allow_missing=False, force_init=False, allow_extra=False)
reg_mod.init_optimizer(kvstore='local', optimizer='sgd', optimizer_params=(('learning_rate', 0.01),))
svrg_mod = SVRGModule(symbol=lro,
data_names=['data'],
label_names=['lin_reg_label'],
update_freq=2)
svrg_mod.bind(data_shapes=di.provide_data, label_shapes=di.provide_label)
svrg_mod.init_params(initializer=mx.init.One(), allow_missing=False, force_init=False, allow_extra=False)
svrg_mod.init_optimizer(kvstore='local', optimizer='sgd', optimizer_params=(('learning_rate', 0.01),))
return di,reg_mod, svrg_mod
di, reg_mod, svrg_mod = create_module_with_sgd()
num_epoch = 10
# Use metric MSE
metrics = mx.gluon.metric.create("mse")
# Train with SVRGModule
for e in range(num_epoch):
metrics.reset()
if e % svrg_mod.update_freq == 0:
svrg_mod.update_full_grads(di)
di.reset()
for batch in di:
svrg_mod.forward_backward(data_batch=batch)
svrg_mod.update()
svrg_mod.update_metric(metrics, batch.label)
svrg_mse = metrics.get()[1]
# Train with SGD standard Module
di.reset()
for e in range(num_epoch):
metrics.reset()
di.reset()
for batch in di:
reg_mod.forward_backward(data_batch=batch)
reg_mod.update()
reg_mod.update_metric(metrics, batch.label)
sgd_mse = metrics.get()[1]
assert svrg_mse < sgd_mse
@pytest.mark.skip(reason="Flaky test https://github.com/apache/incubator-mxnet/issues/12510")
@with_seed()
def test_accumulate_kvstore():
# Test KVStore behavior when push a list of values
kv = mx.kv.create('local')
kv.init("fc1_weight", mx.nd.zeros(shape=(1, 2)))
kv.init("fc1_weight_full", mx.nd.zeros(shape=(1, 2)))
b = [mx.nd.ones(shape=(1, 2)) for i in range(4)]
a = mx.nd.zeros(shape=(1, 2))
kv.push("fc1_weight_full", b)
kv.pull("fc1_weight_full", out=a)
assert same(a, [mx.nd.array([4, 4])])
assert kv.num_workers == 1
# Test accumulate in KVStore and allocate gradients
kv_test = mx.kv.create('local')
_, svrg_mod = setup()
svrg_mod.init_optimizer(kvstore=kv_test, optimizer='sgd', optimizer_params=(('learning_rate', 0.01),),
force_init=False)
svrg_mod._accumulate_kvstore("fc1_weight", b)
assert len(svrg_mod._param_dict) == svrg_mod._ctx_len
assert same(svrg_mod._param_dict[0]["fc1_weight"], b[0])
@pytest.mark.skip(reason="Flaky test https://github.com/apache/incubator-mxnet/issues/12510")
@with_seed()
def test_fit():
di, mod = setup()
num_epoch = 100
metric = mx.gluon.metric.create("mse")
mod.fit(di, eval_metric=metric, optimizer='sgd', optimizer_params=(('learning_rate', 0.025),), num_epoch=num_epoch,
kvstore='local')
# Estimated MSE for using SGD optimizer of lr = 0.025, SVRG MSE should be smaller
estimated_mse = 1e-5
assert metric.get()[1] < estimated_mse
| |
import numpy
import pytest
import chainerx
import chainerx.testing
def _make_onehot_arrays(shape, dtype, value1, value2):
a = numpy.zeros(shape, dtype)
b = numpy.zeros(shape, dtype)
indices = list(numpy.ndindex(*shape))
a[indices[len(indices) // 2]] = value1
b[indices[len(indices) // 2]] = value2
return a, b
@pytest.mark.parametrize(
'dtype1,dtype2',
list(zip(chainerx.testing.all_dtypes, chainerx.testing.all_dtypes)) + [
(numpy.float32, numpy.int64), # arrays with different dtypes
])
@pytest.mark.parametrize('shape,transpose', [
((), False),
((0,), False),
((1,), False),
((2, 3), False),
((2, 3), True), # arrays with different strides
])
def test_assert_array_equal(shape, transpose, dtype1, dtype2):
np_a = numpy.arange(2, 2 + numpy.prod(shape)).astype(dtype1).reshape(shape)
if transpose:
np_b = numpy.empty(np_a.T.shape, dtype=dtype2).T
np_b[:] = np_a
else:
np_b = numpy.arange(2, 2 + numpy.prod(shape)
).astype(dtype2).reshape(shape)
chx_a = chainerx.array(np_a)
chx_b = chainerx.array(np_b)
# Test precondition checks
assert np_a.shape == np_b.shape
if transpose:
assert np_a.strides != np_b.strides, 'transpose=True is meaningless'
# Test checks
chainerx.testing.assert_array_equal(np_a, np_a) # np-np (same obj)
chainerx.testing.assert_array_equal(chx_a, chx_a) # chx-chx (same obj)
chainerx.testing.assert_array_equal(np_a, np_b) # np-np (diff. obj)
chainerx.testing.assert_array_equal(chx_a, chx_b) # chx-chx (diff. obj)
chainerx.testing.assert_array_equal(np_a, chx_b) # np-chx
chainerx.testing.assert_array_equal(chx_a, np_b) # chx-np
@pytest.mark.parametrize('shape', [(), (1,), (2, 3)])
def test_assert_array_equal_fail(shape, dtype):
a, b = _make_onehot_arrays(shape, dtype, 0, 2)
with pytest.raises(AssertionError):
chainerx.testing.assert_array_equal(a, b)
@pytest.mark.parametrize('value1,value2', [
(True, 1),
(True, 1.0),
(False, 0),
(False, 0.0),
(2.0, 2),
(numpy.int32(2), 2.0),
(float('nan'), numpy.float32('nan')),
])
def test_assert_array_equal_scalar(value1, value2):
chainerx.testing.assert_array_equal(value1, value2)
chainerx.testing.assert_array_equal(value2, value1)
@pytest.mark.parametrize('value1,value2', [
(2, 3),
(2.0, 3),
(True, 0),
(True, -1),
(False, 1),
(float('nan'), float('inf')),
])
def test_assert_array_equal_fail_scalar(value1, value2):
with pytest.raises(AssertionError):
chainerx.testing.assert_array_equal(value1, value2)
with pytest.raises(AssertionError):
chainerx.testing.assert_array_equal(value2, value1)
@pytest.mark.parametrize(
'dtype1,dtype2',
list(zip(chainerx.testing.all_dtypes, chainerx.testing.all_dtypes)) + [
(numpy.float32, numpy.int64), # arrays with different dtypes
])
@pytest.mark.parametrize('shape,transpose', [
((), False),
((0,), False),
((1,), False),
((2, 3), False),
((2, 3), True), # arrays with different strides
])
def test_assert_allclose(shape, transpose, dtype1, dtype2):
atol = 1e-3 if numpy.dtype('float16') in [dtype1, dtype2] else 1e-5
np_a = numpy.arange(2, 2 + numpy.prod(shape)).astype(dtype1).reshape(shape)
if transpose:
np_b = numpy.empty(np_a.T.shape, dtype=dtype2).T
np_b[:] = np_a
else:
np_b = numpy.arange(2, 2 + numpy.prod(shape)
).astype(dtype2).reshape(shape)
# Give some perturbation only if dtype is float
if np_a.dtype.kind in ('f', 'c'):
np_a += atol * 1e-1
if np_b.dtype.kind in ('f', 'c'):
np_b -= atol * 1e-1
chx_a = chainerx.array(np_a)
chx_b = chainerx.array(np_b)
# Test precondition checks
assert np_a.shape == np_b.shape
if transpose:
assert np_a.strides != np_b.strides, 'transpose=True is meaningless'
# Test checks
chainerx.testing.assert_allclose(np_a, np_a, atol=atol) # np-np (same obj)
chainerx.testing.assert_allclose(
chx_a, chx_a, atol=atol) # chx-chx (same obj)
chainerx.testing.assert_allclose(
np_a, np_b, atol=atol) # np-np (diff. obj)
chainerx.testing.assert_allclose(
chx_a, chx_b, atol=atol) # chx-chx (diff. obj)
chainerx.testing.assert_allclose(np_a, chx_b, atol=atol) # np-chx
chainerx.testing.assert_allclose(chx_a, np_b, atol=atol) # chx-np
@pytest.mark.parametrize('shape', [(), (1,), (2, 3)])
def test_assert_allclose_fail(shape, dtype):
a, b = _make_onehot_arrays(shape, dtype, 0, 2)
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(a, b)
@pytest.mark.parametrize('value1,value2', [
(True, 1),
(True, 1.0),
(False, 0),
(False, 0.0),
(2.0, 2),
(numpy.int32(2), 2.0),
(float('nan'), numpy.float32('nan')),
])
def test_assert_allclose_scalar(value1, value2):
chainerx.testing.assert_allclose(value1, value2)
chainerx.testing.assert_allclose(value2, value1)
@pytest.mark.parametrize('value1,value2', [
(2, 3),
(2.0, 3),
(True, 0),
(True, -1),
(False, 1),
(float('nan'), float('inf')),
])
def test_assert_allclose_fail_scalar(value1, value2):
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(value1, value2)
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(value2, value1)
def test_assert_allclose_fail_equal_nan():
chainerx.testing.assert_allclose(float('nan'), float('nan'))
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(
float('nan'), float('nan'), equal_nan=False)
shape = (2, 3)
dtype = numpy.float32
a, b = _make_onehot_arrays(shape, dtype, float('nan'), float('nan'))
chainerx.testing.assert_allclose(a, b)
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(a, b, equal_nan=False)
@pytest.mark.parametrize('shape', [(), (1,), (2, 3)])
def test_assert_allclose_exact(shape, dtype):
a, b = _make_onehot_arrays(shape, dtype, 1.0, 1.0)
chainerx.testing.assert_allclose(a, b)
def test_assert_allclose_close_default_tol():
dtype = numpy.float64
shape = (2, 3)
# small absolute error
a, b = _make_onehot_arrays(shape, dtype, 1.0, 1.0 + 5e-8)
chainerx.testing.assert_allclose(a, b)
# large absolute error
a, b = _make_onehot_arrays(shape, dtype, 1e8, 1e8 + 5)
chainerx.testing.assert_allclose(a, b)
# expected failure: small absolute error
a, b = _make_onehot_arrays(shape, dtype, 1.0, 1.0 + 2e-7)
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(a, b)
# expected failure: large absolute error
a, b = _make_onehot_arrays(shape, dtype, 1e8, 1e8 + 20)
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(a, b)
def test_assert_allclose_rtol(float_dtype):
dtype = float_dtype
shape = (2, 3)
# relative error < rtol
a, b = _make_onehot_arrays(shape, dtype, 1e4, 1e4 + 50)
chainerx.testing.assert_allclose(a, b, rtol=1e-2, atol=0)
# relative error > rtol
a, b = _make_onehot_arrays(shape, dtype, 1e4, 1e4 + 200)
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(a, b, rtol=1e-2, atol=0)
def test_assert_allclose_atol(float_dtype):
dtype = float_dtype
shape = (2, 3)
# absolute error < atol
a, b = _make_onehot_arrays(shape, dtype, 1e-3, 1e-3 + 1e-2)
chainerx.testing.assert_allclose(a, b, rtol=0, atol=2e-2)
# absolute error > atol
a, b = _make_onehot_arrays(shape, dtype, 1e-3, 1e-3 + 1e-2)
with pytest.raises(AssertionError):
chainerx.testing.assert_allclose(a, b, rtol=0, atol=5e-3)
def test_assert_array_equal_ex_fail_dtype():
shape = (3, 2)
dtype1 = numpy.float32
dtype2 = numpy.int64
a = numpy.arange(2, 2 + numpy.prod(shape)).astype(dtype1).reshape(shape)
b = a.astype(dtype2)
with pytest.raises(AssertionError):
chainerx.testing.assert_array_equal_ex(a, b)
with pytest.raises(AssertionError):
# strides_check does not affect dtype_check
chainerx.testing.assert_array_equal_ex(a, b, strides_check=False)
chainerx.testing.assert_array_equal_ex(a, b, dtype_check=False)
def test_assert_array_equal_ex_fail_strides():
shape = (3, 2)
dtype = numpy.float32
a = numpy.arange(2, 2 + numpy.prod(shape)).astype(dtype).reshape(shape)
b = numpy.empty(a.T.shape, dtype).T
b[:] = a
with pytest.raises(AssertionError):
chainerx.testing.assert_array_equal_ex(a, b)
chainerx.testing.assert_array_equal_ex(a, b, strides_check=False)
# dtype_check=False implies strides_check=False
chainerx.testing.assert_array_equal_ex(a, b, dtype_check=False)
| |
"""Standalone Authenticator."""
import argparse
import collections
import logging
import socket
import threading
import OpenSSL
import six
import zope.interface
from acme import challenges
from acme import standalone as acme_standalone
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt.plugins import common
from letsencrypt.plugins import util
logger = logging.getLogger(__name__)
class ServerManager(object):
"""Standalone servers manager.
Manager for `ACMEServer` and `ACMETLSServer` instances.
`certs` and `http_01_resources` correspond to
`acme.crypto_util.SSLSocket.certs` and
`acme.crypto_util.SSLSocket.http_01_resources` respectively. All
created servers share the same certificates and resources, so if
you're running both TLS and non-TLS instances, HTTP01 handlers
will serve the same URLs!
"""
_Instance = collections.namedtuple("_Instance", "server thread")
def __init__(self, certs, http_01_resources):
self._instances = {}
self.certs = certs
self.http_01_resources = http_01_resources
def run(self, port, challenge_type):
"""Run ACME server on specified ``port``.
This method is idempotent, i.e. all calls with the same pair of
``(port, challenge_type)`` will reuse the same server.
:param int port: Port to run the server on.
:param challenge_type: Subclass of `acme.challenges.Challenge`,
either `acme.challenge.HTTP01` or `acme.challenges.TLSSNI01`.
:returns: Server instance.
:rtype: ACMEServerMixin
"""
assert challenge_type in (challenges.TLSSNI01, challenges.HTTP01)
if port in self._instances:
return self._instances[port].server
address = ("", port)
try:
if challenge_type is challenges.TLSSNI01:
server = acme_standalone.TLSSNI01Server(address, self.certs)
else: # challenges.HTTP01
server = acme_standalone.HTTP01Server(
address, self.http_01_resources)
except socket.error as error:
raise errors.StandaloneBindError(error, port)
thread = threading.Thread(
# pylint: disable=no-member
target=server.serve_forever)
thread.start()
# if port == 0, then random free port on OS is taken
# pylint: disable=no-member
real_port = server.socket.getsockname()[1]
self._instances[real_port] = self._Instance(server, thread)
return server
def stop(self, port):
"""Stop ACME server running on the specified ``port``.
:param int port:
"""
instance = self._instances[port]
logger.debug("Stopping server at %s:%d...",
*instance.server.socket.getsockname()[:2])
instance.server.shutdown()
instance.thread.join()
del self._instances[port]
def running(self):
"""Return all running instances.
Once the server is stopped using `stop`, it will not be
returned.
:returns: Mapping from ``port`` to ``server``.
:rtype: tuple
"""
return dict((port, instance.server) for port, instance
in six.iteritems(self._instances))
SUPPORTED_CHALLENGES = [challenges.TLSSNI01, challenges.HTTP01]
def supported_challenges_validator(data):
"""Supported challenges validator for the `argparse`.
It should be passed as `type` argument to `add_argument`.
"""
challs = data.split(",")
unrecognized = [name for name in challs
if name not in challenges.Challenge.TYPES]
if unrecognized:
raise argparse.ArgumentTypeError(
"Unrecognized challenges: {0}".format(", ".join(unrecognized)))
choices = set(chall.typ for chall in SUPPORTED_CHALLENGES)
if not set(challs).issubset(choices):
raise argparse.ArgumentTypeError(
"Plugin does not support the following (valid) "
"challenges: {0}".format(", ".join(set(challs) - choices)))
return data
class Authenticator(common.Plugin):
"""Standalone Authenticator.
This authenticator creates its own ephemeral TCP listener on the
necessary port in order to respond to incoming tls-sni-01 and http-01
challenges from the certificate authority. Therefore, it does not
rely on any existing server program.
"""
zope.interface.implements(interfaces.IAuthenticator)
zope.interface.classProvides(interfaces.IPluginFactory)
description = "Automatically use a temporary webserver"
def __init__(self, *args, **kwargs):
super(Authenticator, self).__init__(*args, **kwargs)
# one self-signed key for all tls-sni-01 certificates
self.key = OpenSSL.crypto.PKey()
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
self.served = collections.defaultdict(set)
# Stuff below is shared across threads (i.e. servers read
# values, main thread writes). Due to the nature of CPython's
# GIL, the operations are safe, c.f.
# https://docs.python.org/2/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
self.certs = {}
self.http_01_resources = set()
self.servers = ServerManager(self.certs, self.http_01_resources)
@classmethod
def add_parser_arguments(cls, add):
add("supported-challenges",
help="Supported challenges. Preferred in the order they are listed.",
type=supported_challenges_validator,
default=",".join(chall.typ for chall in SUPPORTED_CHALLENGES))
@property
def supported_challenges(self):
"""Challenges supported by this plugin."""
return [challenges.Challenge.TYPES[name] for name in
self.conf("supported-challenges").split(",")]
@property
def _necessary_ports(self):
necessary_ports = set()
if challenges.HTTP01 in self.supported_challenges:
necessary_ports.add(self.config.http01_port)
if challenges.TLSSNI01 in self.supported_challenges:
necessary_ports.add(self.config.tls_sni_01_port)
return necessary_ports
def more_info(self): # pylint: disable=missing-docstring
return("This authenticator creates its own ephemeral TCP listener "
"on the necessary port in order to respond to incoming "
"tls-sni-01 and http-01 challenges from the certificate "
"authority. Therefore, it does not rely on any existing "
"server program.")
def prepare(self): # pylint: disable=missing-docstring
pass
def get_chall_pref(self, domain):
# pylint: disable=unused-argument,missing-docstring
return self.supported_challenges
def perform(self, achalls): # pylint: disable=missing-docstring
if any(util.already_listening(port) for port in self._necessary_ports):
raise errors.MisconfigurationError(
"At least one of the (possibly) required ports is "
"already taken.")
try:
return self.perform2(achalls)
except errors.StandaloneBindError as error:
display = zope.component.getUtility(interfaces.IDisplay)
if error.socket_error.errno == socket.errno.EACCES:
display.notification(
"Could not bind TCP port {0} because you don't have "
"the appropriate permissions (for example, you "
"aren't running this program as "
"root).".format(error.port))
elif error.socket_error.errno == socket.errno.EADDRINUSE:
display.notification(
"Could not bind TCP port {0} because it is already in "
"use by another process on this system (such as a web "
"server). Please stop the program in question and then "
"try again.".format(error.port))
else:
raise # XXX: How to handle unknown errors in binding?
def perform2(self, achalls):
"""Perform achallenges without IDisplay interaction."""
responses = []
for achall in achalls:
if isinstance(achall.chall, challenges.HTTP01):
server = self.servers.run(
self.config.http01_port, challenges.HTTP01)
response, validation = achall.response_and_validation()
self.http_01_resources.add(
acme_standalone.HTTP01RequestHandler.HTTP01Resource(
chall=achall.chall, response=response,
validation=validation))
else: # tls-sni-01
server = self.servers.run(
self.config.tls_sni_01_port, challenges.TLSSNI01)
response, (cert, _) = achall.response_and_validation(
cert_key=self.key)
self.certs[response.z_domain] = (self.key, cert)
self.served[server].add(achall)
responses.append(response)
return responses
def cleanup(self, achalls): # pylint: disable=missing-docstring
# reduce self.served and close servers if none challenges are served
for server, server_achalls in self.served.items():
for achall in achalls:
if achall in server_achalls:
server_achalls.remove(achall)
for port, server in six.iteritems(self.servers.running()):
if not self.served[server]:
self.servers.stop(port)
| |
import pytest
from .context import aorun
import numpy as np
import torch
from aorun.models import Model
from aorun.layers import Dense
from aorun.layers import Conv2D
from aorun.layers import Dropout
from aorun.layers import Recurrent
from aorun.layers import Activation
from aorun.layers import Flatten
from aorun.layers import TimeDistributed
from aorun.optimizers import SGD
from aorun.losses import mean_squared_error
def test_model_constructor_empty():
model = Model()
assert len(model.layers) == 0
def test_model_constructor_layers():
model = Model(
Dense(10),
Activation('relu'),
Dense(1)
)
assert len(model.layers) == 3
assert type(model.layers[0]) == Dense
assert type(model.layers[1]) == Activation
def test_model_add_layers():
model = Model()
model.add(Dense(10))
model.add(Activation('relu'))
model.add(Dense(1))
assert len(model.layers) == 3
assert type(model.layers[0]) == Dense
assert type(model.layers[1]) == Activation
def test_model_forward():
model = Model(
Dense(10, input_dim=4),
Dense(1),
Dense(20)
)
x = torch.randn(2, 4)
y = model.forward(x)
assert y.size() == (2, 20)
model = Model(
Dense(10, input_dim=4),
Dense(1),
Dense(20)
)
x = torch.randn(2, 4)
y = model.predict(x)
assert y.size() == (2, 20)
def test_model_simple_fit():
x = torch.rand(20, 4)
y = torch.rand(20, 10)
model = Model(
Dense(10, input_dim=x.size()[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.size()[-1])
)
opt = SGD(lr=0.01, momentum=0.9)
loss = mean_squared_error
history = model.fit(x, y, loss=loss, optimizer='sgd', epochs=10, verbose=1)
assert len(history['loss']) == 10
assert all(type(v) is float for v in history['loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_fit_unknown_loss():
x = torch.rand(20, 4)
y = torch.rand(20, 10)
model = Model(
Dense(10, input_dim=x.size()[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.size()[-1])
)
assert len(model.params) > 0
with pytest.raises(Exception) as e:
model.fit(x, y, loss='UNKNOWN_TEST', batch_size=10, n_epoch=5)
def test_model_loss_str_param():
x = torch.rand(20, 4)
y = torch.rand(20, 10)
model = Model(
Dense(10, input_dim=x.size()[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.size()[-1])
)
opt = SGD(lr=0.01, momentum=0.9)
history = model.fit(x, y, loss='mse', optimizer=opt, epochs=10)
assert len(history['loss']) == 10
assert all(type(v) is float for v in history['loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
loss = 'mean_squared_error'
history = model.fit(x, y, loss=loss, optimizer=opt, epochs=10)
assert len(history['loss']) == 10
assert all(type(v) is float for v in history['loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_custom_loss():
x = torch.rand(20, 4)
y = torch.rand(20, 10)
model = Model(
Dense(10, input_dim=x.size()[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.size()[-1])
)
opt = SGD(lr=0.01, momentum=0.9)
def mae(y_true, y_pred):
return torch.mean(torch.abs(y_true - y_pred))
history = model.fit(x, y, loss=mae, optimizer=opt, epochs=10)
assert len(history['loss']) == 10
assert all(type(v) is float for v in history['loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_numpy_friendly():
X = np.random.normal(size=[10, 10]).astype('float32')
y = np.random.normal(size=[10, 1]).astype('float32')
model = Model(
Dense(10, input_dim=X.shape[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.shape[-1])
)
history = model.fit(X, y=y, loss='mse', optimizer='sgd', epochs=10)
y_pred = model.predict(X)
assert type(y_pred) is np.ndarray
assert len(history['loss']) == 10
assert all(type(v) is float for v in history['loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_adam_optmizer():
X = np.random.normal(size=[10, 10]).astype('float32')
y = np.random.normal(size=[10, 1]).astype('float32')
model = Model(
Dense(10, input_dim=X.shape[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.shape[-1])
)
history = model.fit(X, y=y, loss='mse', optimizer='adam', epochs=10)
y_pred = model.predict(X)
assert type(y_pred) is np.ndarray
assert len(history['loss']) == 10
assert all(type(v) is float for v in history['loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_validation_split():
X = np.random.normal(size=[10, 10]).astype('float32')
y = np.random.normal(size=[10, 1]).astype('float32')
model = Model(
Dense(10, input_dim=X.shape[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.shape[-1])
)
history = model.fit(X, y=y, loss='mse', val_split=0.1)
y_pred = model.predict(X)
assert type(y_pred) is np.ndarray
assert 'loss' in history
assert 'val_loss' in history
assert all(type(v) is float for v in history['loss'])
assert all(type(v) is float for v in history['val_loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_validation_data():
X = np.random.normal(size=[10, 10]).astype('float32')
y = np.random.normal(size=[10, 1]).astype('float32')
model = Model(
Dense(10, input_dim=X.shape[-1]),
Activation('relu'),
Dense(5),
Activation('relu'),
Dense(y.shape[-1])
)
history = model.fit(X, y=y, loss='mse', val_data=(X, y))
y_pred = model.predict(X)
assert type(y_pred) is np.ndarray
assert 'loss' in history
assert 'val_loss' in history
assert all(type(v) is float for v in history['loss'])
assert all(type(v) is float for v in history['val_loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_conv2d():
X = np.random.normal(size=[10, 3, 10, 10]).astype('float32')
y = np.random.normal(size=[10, 1]).astype('float32')
model = Model(
Conv2D(4, kernel_size=(3, 3), input_dim=X.shape[1:]),
Flatten(),
Dense(5),
Activation('relu'),
Dense(y.shape[-1])
)
history = model.fit(X, y=y, loss='mse', val_data=(X, y))
y_pred = model.predict(X)
assert type(y_pred) is np.ndarray
assert 'loss' in history
assert 'val_loss' in history
assert all(type(v) is float for v in history['loss'])
assert all(type(v) is float for v in history['val_loss'])
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_conv2d_dropout():
X = np.random.normal(size=[10, 3, 10, 10]).astype('float32')
y = np.random.normal(size=[10, 1]).astype('float32')
model = Model(
Conv2D(4, kernel_size=(3, 3), input_dim=X.shape[1:]),
Flatten(),
Dense(5),
Activation('relu'),
Dense(5),
Dropout(0.5),
Activation('relu'),
Dense(y.shape[-1])
)
history = model.fit(X, y=y, loss='mse', epochs=10, val_data=(X, y))
y_pred = model.predict(X)
assert type(y_pred) is np.ndarray
assert 'loss' in history
assert 'val_loss' in history
assert all(type(v) is float for v in history['loss'])
assert all(type(v) is float for v in history['val_loss'])
assert history['val_loss'] == sorted(history['val_loss'], reverse=True)
def test_model_recurrent():
X = np.random.normal(size=[2, 3, 4]).astype('float32')
y = np.random.normal(size=[2, 3, 2]).astype('float32')
model = Model(
Recurrent(units=2, length=3, input_dim=4),
Activation('relu')
)
history = model.fit(X, y, loss='mse')
y_pred = model.predict(X)
assert type(y_pred) is np.ndarray
assert 'loss' in history
assert history['loss'] == sorted(history['loss'], reverse=True)
def test_model_recurrent_time_distributed():
X = np.random.normal(size=[2, 3, 4]).astype('float32')
y = np.random.normal(size=[2, 3, 10]).astype('float32')
model = Model(
Recurrent(units=2, length=3, input_dim=4),
Activation('relu'),
TimeDistributed(Dense(units=10)),
)
history = model.fit(X, y, loss='mse')
y_pred = model.predict(X)
assert history['loss'] == sorted(history['loss'], reverse=True)
| |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""`LinearOperator` acting like a lower triangular matrix."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"LinearOperatorLowerTriangular",
]
@tf_export("linalg.LinearOperatorLowerTriangular")
class LinearOperatorLowerTriangular(linear_operator.LinearOperator):
"""`LinearOperator` acting like a [batch] square lower triangular matrix.
This operator acts like a [batch] lower triangular matrix `A` with shape
`[B1,...,Bb, N, N]` for some `b >= 0`. The first `b` indices index a
batch member. For every batch index `(i1,...,ib)`, `A[i1,...,ib, : :]` is
an `N x N` matrix.
`LinearOperatorLowerTriangular` is initialized with a `Tensor` having
dimensions `[B1,...,Bb, N, N]`. The upper triangle of the last two
dimensions is ignored.
```python
# Create a 2 x 2 lower-triangular linear operator.
tril = [[1., 2.], [3., 4.]]
operator = LinearOperatorLowerTriangular(tril)
# The upper triangle is ignored.
operator.to_dense()
==> [[1., 0.]
[3., 4.]]
operator.shape
==> [2, 2]
operator.log_abs_determinant()
==> scalar Tensor
x = ... Shape [2, 4] Tensor
operator.matmul(x)
==> Shape [2, 4] Tensor
# Create a [2, 3] batch of 4 x 4 linear operators.
tril = tf.random_normal(shape=[2, 3, 4, 4])
operator = LinearOperatorLowerTriangular(tril)
```
#### Shape compatibility
This operator acts on [batch] matrix with compatible shape.
`x` is a batch matrix with compatible shape for `matmul` and `solve` if
```
operator.shape = [B1,...,Bb] + [N, N], with b >= 0
x.shape = [B1,...,Bb] + [N, R], with R >= 0.
```
#### Performance
Suppose `operator` is a `LinearOperatorLowerTriangular` of shape `[N, N]`,
and `x.shape = [N, R]`. Then
* `operator.matmul(x)` involves `N^2 * R` multiplications.
* `operator.solve(x)` involves `N * R` size `N` back-substitutions.
* `operator.determinant()` involves a size `N` `reduce_prod`.
If instead `operator` and `x` have shape `[B1,...,Bb, N, N]` and
`[B1,...,Bb, N, R]`, every operation increases in complexity by `B1*...*Bb`.
#### Matrix property hints
This `LinearOperator` is initialized with boolean flags of the form `is_X`,
for `X = non_singular, self_adjoint, positive_definite, square`.
These have the following meaning:
* If `is_X == True`, callers should expect the operator to have the
property `X`. This is a promise that should be fulfilled, but is *not* a
runtime assert. For example, finite floating point precision may result
in these promises being violated.
* If `is_X == False`, callers should expect the operator to not have `X`.
* If `is_X == None` (the default), callers should have no expectation either
way.
"""
def __init__(self,
tril,
is_non_singular=None,
is_self_adjoint=None,
is_positive_definite=None,
is_square=None,
name="LinearOperatorLowerTriangular"):
r"""Initialize a `LinearOperatorLowerTriangular`.
Args:
tril: Shape `[B1,...,Bb, N, N]` with `b >= 0`, `N >= 0`.
The lower triangular part of `tril` defines this operator. The strictly
upper triangle is ignored. Allowed dtypes: `float16`, `float32`,
`float64`.
is_non_singular: Expect that this operator is non-singular.
This operator is non-singular if and only if its diagonal elements are
all non-zero.
is_self_adjoint: Expect that this operator is equal to its hermitian
transpose. This operator is self-adjoint only if it is diagonal with
real-valued diagonal entries. In this case it is advised to use
`LinearOperatorDiag`.
is_positive_definite: Expect that this operator is positive definite,
meaning the quadratic form `x^H A x` has positive real part for all
nonzero `x`. Note that we do not require the operator to be
self-adjoint to be positive-definite. See:
https://en.wikipedia.org/wiki/Positive-definite_matrix#Extension_for_non-symmetric_matrices
is_square: Expect that this operator acts like square [batch] matrices.
name: A name for this `LinearOperator`.
Raises:
TypeError: If `diag.dtype` is not an allowed type.
ValueError: If `is_square` is `False`.
"""
if is_square is False:
raise ValueError(
"Only square lower triangular operators supported at this time.")
is_square = True
with ops.name_scope(name, values=[tril]):
self._tril = ops.convert_to_tensor(tril, name="tril")
self._check_tril(self._tril)
self._tril = array_ops.matrix_band_part(tril, -1, 0)
self._diag = array_ops.matrix_diag_part(self._tril)
super(LinearOperatorLowerTriangular, self).__init__(
dtype=self._tril.dtype,
graph_parents=[self._tril],
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
name=name)
def _check_tril(self, tril):
"""Static check of the `tril` argument."""
# TODO(langmore) Add complex types once matrix_triangular_solve works for
# them.
allowed_dtypes = [
dtypes.float16,
dtypes.float32,
dtypes.float64,
]
dtype = tril.dtype
if dtype not in allowed_dtypes:
raise TypeError(
"Argument tril must have dtype in %s. Found: %s"
% (allowed_dtypes, dtype))
if tril.get_shape().ndims is not None and tril.get_shape().ndims < 2:
raise ValueError(
"Argument tril must have at least 2 dimensions. Found: %s"
% tril)
def _shape(self):
return self._tril.get_shape()
def _shape_tensor(self):
return array_ops.shape(self._tril)
def _assert_non_singular(self):
return linear_operator_util.assert_no_entries_with_modulus_zero(
self._diag,
message="Singular operator: Diagonal contained zero values.")
def _matmul(self, x, adjoint=False, adjoint_arg=False):
return linear_operator_util.matmul_with_broadcast(
self._tril, x, adjoint_a=adjoint, adjoint_b=adjoint_arg)
def _determinant(self):
return math_ops.reduce_prod(self._diag, reduction_indices=[-1])
def _log_abs_determinant(self):
return math_ops.reduce_sum(
math_ops.log(math_ops.abs(self._diag)), reduction_indices=[-1])
def _solve(self, rhs, adjoint=False, adjoint_arg=False):
rhs = linalg.adjoint(rhs) if adjoint_arg else rhs
return linear_operator_util.matrix_triangular_solve_with_broadcast(
self._tril, rhs, lower=True, adjoint=adjoint)
def _to_dense(self):
return self._tril
def _add_to_tensor(self, x):
return self._tril + x
| |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Windowing concepts.
A WindowInto transform logically divides up or groups the elements of a
PCollection into finite windows according to a windowing function (derived from
WindowFn).
The output of WindowInto contains the same elements as input, but they have been
logically assigned to windows. The next GroupByKey(s) transforms, including one
within a composite transform, will group by the combination of keys and windows.
Windowing a PCollection allows chunks of it to be processed individually, before
the entire PCollection is available. This is especially important for
PCollection(s) with unbounded size, since the full PCollection is never
available at once, since more data is continually arriving. For PCollection(s)
with a bounded size (aka. conventional batch mode), by default, all data is
implicitly in a single window (see GlobalWindows), unless WindowInto is
applied.
For example, a simple form of windowing divides up the data into fixed-width
time intervals, using FixedWindows.
Seconds are used as the time unit for the built-in windowing primitives here.
Integer or floating point seconds can be passed to these primitives.
Internally, seconds, with microsecond granularity, are stored as
timeutil.Timestamp and timeutil.Duration objects. This is done to avoid
precision errors that would occur with floating point representations.
Custom windowing function classes can be created, by subclassing from
WindowFn.
"""
from __future__ import absolute_import
import abc
from google.protobuf import struct_pb2
from apache_beam.coders import coders
from apache_beam.runners.api import beam_runner_api_pb2
from apache_beam.transforms import timeutil
from apache_beam.utils import proto_utils
from apache_beam.utils import urns
from apache_beam.utils.timestamp import Duration
from apache_beam.utils.timestamp import MAX_TIMESTAMP
from apache_beam.utils.timestamp import MIN_TIMESTAMP
from apache_beam.utils.timestamp import Timestamp
from apache_beam.utils.windowed_value import WindowedValue
__all__ = [
'TimestampCombiner',
'WindowFn',
'BoundedWindow',
'IntervalWindow',
'TimestampedValue',
'GlobalWindow',
'NonMergingWindowFn',
'GlobalWindows',
'FixedWindows',
'SlidingWindows',
'Sessions',
]
# TODO(ccy): revisit naming and semantics once Java Apache Beam finalizes their
# behavior.
class TimestampCombiner(object):
"""Determines how output timestamps of grouping operations are assigned."""
OUTPUT_AT_EOW = beam_runner_api_pb2.END_OF_WINDOW
OUTPUT_AT_EARLIEST = beam_runner_api_pb2.EARLIEST_IN_PANE
OUTPUT_AT_LATEST = beam_runner_api_pb2.LATEST_IN_PANE
# TODO(robertwb): Add this to the runner API or remove it.
OUTPUT_AT_EARLIEST_TRANSFORMED = 'OUTPUT_AT_EARLIEST_TRANSFORMED'
@staticmethod
def get_impl(timestamp_combiner, window_fn):
if timestamp_combiner == TimestampCombiner.OUTPUT_AT_EOW:
return timeutil.OutputAtEndOfWindowImpl()
elif timestamp_combiner == TimestampCombiner.OUTPUT_AT_EARLIEST:
return timeutil.OutputAtEarliestInputTimestampImpl()
elif timestamp_combiner == TimestampCombiner.OUTPUT_AT_LATEST:
return timeutil.OutputAtLatestInputTimestampImpl()
elif timestamp_combiner == TimestampCombiner.OUTPUT_AT_EARLIEST_TRANSFORMED:
return timeutil.OutputAtEarliestTransformedInputTimestampImpl(window_fn)
else:
raise ValueError('Invalid TimestampCombiner: %s.' % timestamp_combiner)
class WindowFn(urns.RunnerApiFn):
"""An abstract windowing function defining a basic assign and merge."""
__metaclass__ = abc.ABCMeta
class AssignContext(object):
"""Context passed to WindowFn.assign()."""
def __init__(self, timestamp, element=None):
self.timestamp = Timestamp.of(timestamp)
self.element = element
@abc.abstractmethod
def assign(self, assign_context):
"""Associates a timestamp to an element."""
raise NotImplementedError
class MergeContext(object):
"""Context passed to WindowFn.merge() to perform merging, if any."""
def __init__(self, windows):
self.windows = list(windows)
def merge(self, to_be_merged, merge_result):
raise NotImplementedError
@abc.abstractmethod
def merge(self, merge_context):
"""Returns a window that is the result of merging a set of windows."""
raise NotImplementedError
def is_merging(self):
"""Returns whether this WindowFn merges windows."""
return True
@abc.abstractmethod
def get_window_coder(self):
raise NotImplementedError
def get_transformed_output_time(self, window, input_timestamp): # pylint: disable=unused-argument
"""Given input time and output window, returns output time for window.
If TimestampCombiner.OUTPUT_AT_EARLIEST_TRANSFORMED is used in the
Windowing, the output timestamp for the given window will be the earliest
of the timestamps returned by get_transformed_output_time() for elements
of the window.
Arguments:
window: Output window of element.
input_timestamp: Input timestamp of element as a timeutil.Timestamp
object.
Returns:
Transformed timestamp.
"""
# By default, just return the input timestamp.
return input_timestamp
urns.RunnerApiFn.register_pickle_urn(urns.PICKLED_WINDOW_FN)
class BoundedWindow(object):
"""A window for timestamps in range (-infinity, end).
Attributes:
end: End of window.
"""
def __init__(self, end):
self.end = Timestamp.of(end)
def max_timestamp(self):
return self.end.predecessor()
def __cmp__(self, other):
# Order first by endpoint, then arbitrarily.
return cmp(self.end, other.end) or cmp(hash(self), hash(other))
def __eq__(self, other):
raise NotImplementedError
def __hash__(self):
return hash(self.end)
def __repr__(self):
return '[?, %s)' % float(self.end)
class IntervalWindow(BoundedWindow):
"""A window for timestamps in range [start, end).
Attributes:
start: Start of window as seconds since Unix epoch.
end: End of window as seconds since Unix epoch.
"""
def __init__(self, start, end):
super(IntervalWindow, self).__init__(end)
self.start = Timestamp.of(start)
def __hash__(self):
return hash((self.start, self.end))
def __eq__(self, other):
return self.start == other.start and self.end == other.end
def __repr__(self):
return '[%s, %s)' % (float(self.start), float(self.end))
def intersects(self, other):
return other.start < self.end or self.start < other.end
def union(self, other):
return IntervalWindow(
min(self.start, other.start), max(self.end, other.end))
class TimestampedValue(object):
"""A timestamped value having a value and a timestamp.
Attributes:
value: The underlying value.
timestamp: Timestamp associated with the value as seconds since Unix epoch.
"""
def __init__(self, value, timestamp):
self.value = value
self.timestamp = Timestamp.of(timestamp)
def __cmp__(self, other):
if type(self) is not type(other):
return cmp(type(self), type(other))
return cmp((self.value, self.timestamp), (other.value, other.timestamp))
class GlobalWindow(BoundedWindow):
"""The default window into which all data is placed (via GlobalWindows)."""
_instance = None
def __new__(cls):
if cls._instance is None:
cls._instance = super(GlobalWindow, cls).__new__(cls)
return cls._instance
def __init__(self):
super(GlobalWindow, self).__init__(MAX_TIMESTAMP)
self.start = MIN_TIMESTAMP
def __repr__(self):
return 'GlobalWindow'
def __hash__(self):
return hash(type(self))
def __eq__(self, other):
# Global windows are always and only equal to each other.
return self is other or type(self) is type(other)
class NonMergingWindowFn(WindowFn):
def is_merging(self):
return False
def merge(self, merge_context):
pass # No merging.
class GlobalWindows(NonMergingWindowFn):
"""A windowing function that assigns everything to one global window."""
@classmethod
def windowed_value(cls, value, timestamp=MIN_TIMESTAMP):
return WindowedValue(value, timestamp, (GlobalWindow(),))
def assign(self, assign_context):
return [GlobalWindow()]
def get_window_coder(self):
return coders.GlobalWindowCoder()
def __hash__(self):
return hash(type(self))
def __eq__(self, other):
# Global windowfn is always and only equal to each other.
return self is other or type(self) is type(other)
def __ne__(self, other):
return not self == other
def to_runner_api_parameter(self, context):
return urns.GLOBAL_WINDOWS_FN, None
@urns.RunnerApiFn.register_urn(urns.GLOBAL_WINDOWS_FN, None)
def from_runner_api_parameter(unused_fn_parameter, unused_context):
return GlobalWindows()
class FixedWindows(NonMergingWindowFn):
"""A windowing function that assigns each element to one time interval.
The attributes size and offset determine in what time interval a timestamp
will be slotted. The time intervals have the following formula:
[N * size + offset, (N + 1) * size + offset)
Attributes:
size: Size of the window as seconds.
offset: Offset of this window as seconds since Unix epoch. Windows start at
t=N * size + offset where t=0 is the epoch. The offset must be a value
in range [0, size). If it is not it will be normalized to this range.
"""
def __init__(self, size, offset=0):
if size <= 0:
raise ValueError('The size parameter must be strictly positive.')
self.size = Duration.of(size)
self.offset = Timestamp.of(offset) % self.size
def assign(self, context):
timestamp = context.timestamp
start = timestamp - (timestamp - self.offset) % self.size
return [IntervalWindow(start, start + self.size)]
def get_window_coder(self):
return coders.IntervalWindowCoder()
def __eq__(self, other):
if type(self) == type(other) == FixedWindows:
return self.size == other.size and self.offset == other.offset
def __ne__(self, other):
return not self == other
def to_runner_api_parameter(self, context):
return (urns.FIXED_WINDOWS_FN,
proto_utils.pack_Struct(size=self.size.micros,
offset=self.offset.micros))
@urns.RunnerApiFn.register_urn(urns.FIXED_WINDOWS_FN, struct_pb2.Struct)
def from_runner_api_parameter(fn_parameter, unused_context):
return FixedWindows(
size=Duration(micros=fn_parameter['size']),
offset=Timestamp(micros=fn_parameter['offset']))
class SlidingWindows(NonMergingWindowFn):
"""A windowing function that assigns each element to a set of sliding windows.
The attributes size and offset determine in what time interval a timestamp
will be slotted. The time intervals have the following formula:
[N * period + offset, N * period + offset + size)
Attributes:
size: Size of the window as seconds.
period: Period of the windows as seconds.
offset: Offset of this window as seconds since Unix epoch. Windows start at
t=N * period + offset where t=0 is the epoch. The offset must be a value
in range [0, period). If it is not it will be normalized to this range.
"""
def __init__(self, size, period, offset=0):
if size <= 0:
raise ValueError('The size parameter must be strictly positive.')
self.size = Duration.of(size)
self.period = Duration.of(period)
self.offset = Timestamp.of(offset) % period
def assign(self, context):
timestamp = context.timestamp
start = timestamp - ((timestamp - self.offset) % self.period)
return [
IntervalWindow(Timestamp(micros=s), Timestamp(micros=s) + self.size)
for s in range(start.micros, timestamp.micros - self.size.micros,
-self.period.micros)]
def get_window_coder(self):
return coders.IntervalWindowCoder()
def __eq__(self, other):
if type(self) == type(other) == SlidingWindows:
return (self.size == other.size
and self.offset == other.offset
and self.period == other.period)
def to_runner_api_parameter(self, context):
return (urns.SLIDING_WINDOWS_FN,
proto_utils.pack_Struct(
size=self.size.micros,
offset=self.offset.micros,
period=self.period.micros))
@urns.RunnerApiFn.register_urn(urns.SLIDING_WINDOWS_FN, struct_pb2.Struct)
def from_runner_api_parameter(fn_parameter, unused_context):
return SlidingWindows(
size=Duration(micros=fn_parameter['size']),
offset=Timestamp(micros=fn_parameter['offset']),
period=Duration(micros=fn_parameter['period']))
class Sessions(WindowFn):
"""A windowing function that groups elements into sessions.
A session is defined as a series of consecutive events
separated by a specified gap size.
Attributes:
gap_size: Size of the gap between windows as floating-point seconds.
"""
def __init__(self, gap_size):
if gap_size <= 0:
raise ValueError('The size parameter must be strictly positive.')
self.gap_size = Duration.of(gap_size)
def assign(self, context):
timestamp = context.timestamp
return [IntervalWindow(timestamp, timestamp + self.gap_size)]
def get_window_coder(self):
return coders.IntervalWindowCoder()
def merge(self, merge_context):
to_merge = []
end = MIN_TIMESTAMP
for w in sorted(merge_context.windows, key=lambda w: w.start):
if to_merge:
if end > w.start:
to_merge.append(w)
if w.end > end:
end = w.end
else:
if len(to_merge) > 1:
merge_context.merge(to_merge,
IntervalWindow(to_merge[0].start, end))
to_merge = [w]
end = w.end
else:
to_merge = [w]
end = w.end
if len(to_merge) > 1:
merge_context.merge(to_merge, IntervalWindow(to_merge[0].start, end))
def __eq__(self, other):
if type(self) == type(other) == Sessions:
return self.gap_size == other.gap_size
@urns.RunnerApiFn.register_urn(urns.SESSION_WINDOWS_FN, struct_pb2.Struct)
def from_runner_api_parameter(fn_parameter, unused_context):
return Sessions(gap_size=Duration(micros=fn_parameter['gap_size']))
def to_runner_api_parameter(self, context):
return (urns.SESSION_WINDOWS_FN,
proto_utils.pack_Struct(gap_size=self.gap_size.micros))
| |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import functools
import pytest
import numpy as np
from astropy import units as u
from astropy.coordinates import (PhysicsSphericalRepresentation, CartesianRepresentation,
CylindricalRepresentation, SphericalRepresentation,
UnitSphericalRepresentation, SphericalDifferential,
CartesianDifferential, UnitSphericalDifferential,
SphericalCosLatDifferential, UnitSphericalCosLatDifferential,
PhysicsSphericalDifferential, CylindricalDifferential,
RadialRepresentation, RadialDifferential, Longitude, Latitude)
from astropy.coordinates.representation import DIFFERENTIAL_CLASSES
from astropy.coordinates.angle_utilities import angular_separation
from astropy.tests.helper import assert_quantity_allclose, quantity_allclose
def assert_representation_allclose(actual, desired, rtol=1.e-7, atol=None,
**kwargs):
actual_xyz = actual.to_cartesian().get_xyz(xyz_axis=-1)
desired_xyz = desired.to_cartesian().get_xyz(xyz_axis=-1)
actual_xyz, desired_xyz = np.broadcast_arrays(actual_xyz, desired_xyz,
subok=True)
assert_quantity_allclose(actual_xyz, desired_xyz, rtol, atol, **kwargs)
def assert_differential_allclose(actual, desired, rtol=1.e-7, **kwargs):
assert actual.components == desired.components
for component in actual.components:
actual_c = getattr(actual, component)
atol = 1.e-10 * actual_c.unit
assert_quantity_allclose(actual_c, getattr(desired, component),
rtol, atol, **kwargs)
def representation_equal(first, second):
return functools.reduce(np.logical_and,
(getattr(first, component) ==
getattr(second, component)
for component in first.components))
class TestArithmetic():
def setup(self):
# Choose some specific coordinates, for which ``sum`` and ``dot``
# works out nicely.
self.lon = Longitude(np.arange(0, 12.1, 2), u.hourangle)
self.lat = Latitude(np.arange(-90, 91, 30), u.deg)
self.distance = [5., 12., 4., 2., 4., 12., 5.] * u.kpc
self.spherical = SphericalRepresentation(self.lon, self.lat,
self.distance)
self.unit_spherical = self.spherical.represent_as(
UnitSphericalRepresentation)
self.cartesian = self.spherical.to_cartesian()
def test_norm_spherical(self):
norm_s = self.spherical.norm()
assert isinstance(norm_s, u.Quantity)
# Just to be sure, test against getting object arrays.
assert norm_s.dtype.kind == 'f'
assert np.all(norm_s == self.distance)
@pytest.mark.parametrize('representation',
(PhysicsSphericalRepresentation,
CartesianRepresentation,
CylindricalRepresentation))
def test_norm(self, representation):
in_rep = self.spherical.represent_as(representation)
norm_rep = in_rep.norm()
assert isinstance(norm_rep, u.Quantity)
assert_quantity_allclose(norm_rep, self.distance)
def test_norm_unitspherical(self):
norm_rep = self.unit_spherical.norm()
assert norm_rep.unit == u.dimensionless_unscaled
assert np.all(norm_rep == 1. * u.dimensionless_unscaled)
@pytest.mark.parametrize('representation',
(SphericalRepresentation,
PhysicsSphericalRepresentation,
CartesianRepresentation,
CylindricalRepresentation,
UnitSphericalRepresentation))
def test_neg_pos(self, representation):
in_rep = self.cartesian.represent_as(representation)
pos_rep = +in_rep
assert type(pos_rep) is type(in_rep)
assert pos_rep is not in_rep
assert np.all(representation_equal(pos_rep, in_rep))
neg_rep = -in_rep
assert type(neg_rep) is type(in_rep)
assert np.all(neg_rep.norm() == in_rep.norm())
in_rep_xyz = in_rep.to_cartesian().xyz
assert_quantity_allclose(neg_rep.to_cartesian().xyz,
-in_rep_xyz, atol=1.e-10*in_rep_xyz.unit)
def test_mul_div_spherical(self):
s0 = self.spherical / (1. * u.Myr)
assert isinstance(s0, SphericalRepresentation)
assert s0.distance.dtype.kind == 'f'
assert np.all(s0.lon == self.spherical.lon)
assert np.all(s0.lat == self.spherical.lat)
assert np.all(s0.distance == self.distance / (1. * u.Myr))
s1 = (1./u.Myr) * self.spherical
assert isinstance(s1, SphericalRepresentation)
assert np.all(representation_equal(s1, s0))
s2 = self.spherical * np.array([[1.], [2.]])
assert isinstance(s2, SphericalRepresentation)
assert s2.shape == (2, self.spherical.shape[0])
assert np.all(s2.lon == self.spherical.lon)
assert np.all(s2.lat == self.spherical.lat)
assert np.all(s2.distance ==
self.spherical.distance * np.array([[1.], [2.]]))
s3 = np.array([[1.], [2.]]) * self.spherical
assert isinstance(s3, SphericalRepresentation)
assert np.all(representation_equal(s3, s2))
s4 = -self.spherical
assert isinstance(s4, SphericalRepresentation)
assert quantity_allclose(s4.to_cartesian().xyz,
-self.spherical.to_cartesian().xyz,
atol=1e-15*self.spherical.distance.unit)
assert np.all(s4.distance == self.spherical.distance)
s5 = +self.spherical
assert s5 is not self.spherical
assert np.all(representation_equal(s5, self.spherical))
@pytest.mark.parametrize('representation',
(PhysicsSphericalRepresentation,
CartesianRepresentation,
CylindricalRepresentation))
def test_mul_div(self, representation):
in_rep = self.spherical.represent_as(representation)
r1 = in_rep / (1. * u.Myr)
assert isinstance(r1, representation)
for component in in_rep.components:
in_rep_comp = getattr(in_rep, component)
r1_comp = getattr(r1, component)
if in_rep_comp.unit == self.distance.unit:
assert np.all(r1_comp == in_rep_comp / (1.*u.Myr))
else:
assert np.all(r1_comp == in_rep_comp)
r2 = np.array([[1.], [2.]]) * in_rep
assert isinstance(r2, representation)
assert r2.shape == (2, in_rep.shape[0])
assert_quantity_allclose(r2.norm(),
self.distance * np.array([[1.], [2.]]))
r3 = -in_rep
assert np.all(representation_equal(r3, in_rep * -1.))
with pytest.raises(TypeError):
in_rep * in_rep
with pytest.raises(TypeError):
dict() * in_rep
def test_mul_div_unit_spherical(self):
s1 = self.unit_spherical * self.distance
assert isinstance(s1, SphericalRepresentation)
assert np.all(s1.lon == self.unit_spherical.lon)
assert np.all(s1.lat == self.unit_spherical.lat)
assert np.all(s1.distance == self.spherical.distance)
s2 = self.unit_spherical / u.s
assert isinstance(s2, SphericalRepresentation)
assert np.all(s2.lon == self.unit_spherical.lon)
assert np.all(s2.lat == self.unit_spherical.lat)
assert np.all(s2.distance == 1./u.s)
u3 = -self.unit_spherical
assert isinstance(u3, UnitSphericalRepresentation)
assert_quantity_allclose(u3.lon, self.unit_spherical.lon + 180.*u.deg)
assert np.all(u3.lat == -self.unit_spherical.lat)
assert_quantity_allclose(u3.to_cartesian().xyz,
-self.unit_spherical.to_cartesian().xyz,
atol=1.e-10*u.dimensionless_unscaled)
u4 = +self.unit_spherical
assert isinstance(u4, UnitSphericalRepresentation)
assert u4 is not self.unit_spherical
assert np.all(representation_equal(u4, self.unit_spherical))
def test_add_sub_cartesian(self):
c1 = self.cartesian + self.cartesian
assert isinstance(c1, CartesianRepresentation)
assert c1.x.dtype.kind == 'f'
assert np.all(representation_equal(c1, 2. * self.cartesian))
with pytest.raises(TypeError):
self.cartesian + 10.*u.m
with pytest.raises(u.UnitsError):
self.cartesian + (self.cartesian / u.s)
c2 = self.cartesian - self.cartesian
assert isinstance(c2, CartesianRepresentation)
assert np.all(representation_equal(
c2, CartesianRepresentation(0.*u.m, 0.*u.m, 0.*u.m)))
c3 = self.cartesian - self.cartesian / 2.
assert isinstance(c3, CartesianRepresentation)
assert np.all(representation_equal(c3, self.cartesian / 2.))
@pytest.mark.parametrize('representation',
(PhysicsSphericalRepresentation,
SphericalRepresentation,
CylindricalRepresentation))
def test_add_sub(self, representation):
in_rep = self.cartesian.represent_as(representation)
r1 = in_rep + in_rep
assert isinstance(r1, representation)
expected = 2. * in_rep
for component in in_rep.components:
assert_quantity_allclose(getattr(r1, component),
getattr(expected, component))
with pytest.raises(TypeError):
10.*u.m + in_rep
with pytest.raises(u.UnitsError):
in_rep + (in_rep / u.s)
r2 = in_rep - in_rep
assert isinstance(r2, representation)
assert np.all(representation_equal(
r2.to_cartesian(), CartesianRepresentation(0.*u.m, 0.*u.m, 0.*u.m)))
r3 = in_rep - in_rep / 2.
assert isinstance(r3, representation)
expected = in_rep / 2.
assert_representation_allclose(r3, expected)
def test_add_sub_unit_spherical(self):
s1 = self.unit_spherical + self.unit_spherical
assert isinstance(s1, SphericalRepresentation)
expected = 2. * self.unit_spherical
for component in s1.components:
assert_quantity_allclose(getattr(s1, component),
getattr(expected, component))
with pytest.raises(TypeError):
10.*u.m - self.unit_spherical
with pytest.raises(u.UnitsError):
self.unit_spherical + (self.unit_spherical / u.s)
s2 = self.unit_spherical - self.unit_spherical / 2.
assert isinstance(s2, SphericalRepresentation)
expected = self.unit_spherical / 2.
for component in s2.components:
assert_quantity_allclose(getattr(s2, component),
getattr(expected, component))
@pytest.mark.parametrize('representation',
(CartesianRepresentation,
PhysicsSphericalRepresentation,
SphericalRepresentation,
CylindricalRepresentation))
def test_sum_mean(self, representation):
in_rep = self.spherical.represent_as(representation)
r_sum = in_rep.sum()
assert isinstance(r_sum, representation)
expected = SphericalRepresentation(
90. * u.deg, 0. * u.deg, 14. * u.kpc).represent_as(representation)
for component in expected.components:
exp_component = getattr(expected, component)
assert_quantity_allclose(getattr(r_sum, component),
exp_component,
atol=1e-10*exp_component.unit)
r_mean = in_rep.mean()
assert isinstance(r_mean, representation)
expected = expected / len(in_rep)
for component in expected.components:
exp_component = getattr(expected, component)
assert_quantity_allclose(getattr(r_mean, component),
exp_component,
atol=1e-10*exp_component.unit)
def test_sum_mean_unit_spherical(self):
s_sum = self.unit_spherical.sum()
assert isinstance(s_sum, SphericalRepresentation)
expected = SphericalRepresentation(
90. * u.deg, 0. * u.deg, 3. * u.dimensionless_unscaled)
for component in expected.components:
exp_component = getattr(expected, component)
assert_quantity_allclose(getattr(s_sum, component),
exp_component,
atol=1e-10*exp_component.unit)
s_mean = self.unit_spherical.mean()
assert isinstance(s_mean, SphericalRepresentation)
expected = expected / len(self.unit_spherical)
for component in expected.components:
exp_component = getattr(expected, component)
assert_quantity_allclose(getattr(s_mean, component),
exp_component,
atol=1e-10*exp_component.unit)
@pytest.mark.parametrize('representation',
(CartesianRepresentation,
PhysicsSphericalRepresentation,
SphericalRepresentation,
CylindricalRepresentation))
def test_dot(self, representation):
in_rep = self.cartesian.represent_as(representation)
r_dot_r = in_rep.dot(in_rep)
assert isinstance(r_dot_r, u.Quantity)
assert r_dot_r.shape == in_rep.shape
assert_quantity_allclose(np.sqrt(r_dot_r), self.distance)
r_dot_r_rev = in_rep.dot(in_rep[::-1])
assert isinstance(r_dot_r_rev, u.Quantity)
assert r_dot_r_rev.shape == in_rep.shape
expected = [-25., -126., 2., 4., 2., -126., -25.] * u.kpc**2
assert_quantity_allclose(r_dot_r_rev, expected)
for axis in 'xyz':
project = CartesianRepresentation(*(
(1. if axis == _axis else 0.) * u.dimensionless_unscaled
for _axis in 'xyz'))
assert_quantity_allclose(in_rep.dot(project),
getattr(self.cartesian, axis),
atol=1.*u.upc)
with pytest.raises(TypeError):
in_rep.dot(self.cartesian.xyz)
def test_dot_unit_spherical(self):
u_dot_u = self.unit_spherical.dot(self.unit_spherical)
assert isinstance(u_dot_u, u.Quantity)
assert u_dot_u.shape == self.unit_spherical.shape
assert_quantity_allclose(u_dot_u, 1.*u.dimensionless_unscaled)
cartesian = self.unit_spherical.to_cartesian()
for axis in 'xyz':
project = CartesianRepresentation(*(
(1. if axis == _axis else 0.) * u.dimensionless_unscaled
for _axis in 'xyz'))
assert_quantity_allclose(self.unit_spherical.dot(project),
getattr(cartesian, axis), atol=1.e-10)
@pytest.mark.parametrize('representation',
(CartesianRepresentation,
PhysicsSphericalRepresentation,
SphericalRepresentation,
CylindricalRepresentation))
def test_cross(self, representation):
in_rep = self.cartesian.represent_as(representation)
r_cross_r = in_rep.cross(in_rep)
assert isinstance(r_cross_r, representation)
assert_quantity_allclose(r_cross_r.norm(), 0.*u.kpc**2,
atol=1.*u.mpc**2)
r_cross_r_rev = in_rep.cross(in_rep[::-1])
sep = angular_separation(self.lon, self.lat,
self.lon[::-1], self.lat[::-1])
expected = self.distance * self.distance[::-1] * np.sin(sep)
assert_quantity_allclose(r_cross_r_rev.norm(), expected,
atol=1.*u.mpc**2)
unit_vectors = CartesianRepresentation(
[1., 0., 0.]*u.one,
[0., 1., 0.]*u.one,
[0., 0., 1.]*u.one)[:, np.newaxis]
r_cross_uv = in_rep.cross(unit_vectors)
assert r_cross_uv.shape == (3, 7)
assert_quantity_allclose(r_cross_uv.dot(unit_vectors), 0.*u.kpc,
atol=1.*u.upc)
assert_quantity_allclose(r_cross_uv.dot(in_rep), 0.*u.kpc**2,
atol=1.*u.mpc**2)
zeros = np.zeros(len(in_rep)) * u.kpc
expected = CartesianRepresentation(
u.Quantity((zeros, -self.cartesian.z, self.cartesian.y)),
u.Quantity((self.cartesian.z, zeros, -self.cartesian.x)),
u.Quantity((-self.cartesian.y, self.cartesian.x, zeros)))
# Comparison with spherical is hard since some distances are zero,
# implying the angles are undefined.
r_cross_uv_cartesian = r_cross_uv.to_cartesian()
assert_representation_allclose(r_cross_uv_cartesian,
expected, atol=1.*u.upc)
# A final check, with the side benefit of ensuring __div__ and norm
# work on multi-D representations.
r_cross_uv_by_distance = r_cross_uv / self.distance
uv_sph = unit_vectors.represent_as(UnitSphericalRepresentation)
sep = angular_separation(self.lon, self.lat, uv_sph.lon, uv_sph.lat)
assert_quantity_allclose(r_cross_uv_by_distance.norm(), np.sin(sep),
atol=1e-9)
with pytest.raises(TypeError):
in_rep.cross(self.cartesian.xyz)
def test_cross_unit_spherical(self):
u_cross_u = self.unit_spherical.cross(self.unit_spherical)
assert isinstance(u_cross_u, SphericalRepresentation)
assert_quantity_allclose(u_cross_u.norm(), 0.*u.one, atol=1.e-10*u.one)
u_cross_u_rev = self.unit_spherical.cross(self.unit_spherical[::-1])
assert isinstance(u_cross_u_rev, SphericalRepresentation)
sep = angular_separation(self.lon, self.lat,
self.lon[::-1], self.lat[::-1])
expected = np.sin(sep)
assert_quantity_allclose(u_cross_u_rev.norm(), expected,
atol=1.e-10*u.one)
class TestUnitVectorsAndScales():
@staticmethod
def check_unit_vectors(e):
for v in e.values():
assert type(v) is CartesianRepresentation
assert_quantity_allclose(v.norm(), 1. * u.one)
return e
@staticmethod
def check_scale_factors(sf, rep):
unit = rep.norm().unit
for c, f in sf.items():
assert type(f) is u.Quantity
assert (f.unit * getattr(rep, c).unit).is_equivalent(unit)
def test_spherical(self):
s = SphericalRepresentation(lon=[0., 6., 21.] * u.hourangle,
lat=[0., -30., 85.] * u.deg,
distance=[1, 2, 3] * u.kpc)
e = s.unit_vectors()
self.check_unit_vectors(e)
sf = s.scale_factors()
self.check_scale_factors(sf, s)
s_lon = s + s.distance * 1e-5 * np.cos(s.lat) * e['lon']
assert_quantity_allclose(s_lon.lon, s.lon + 1e-5*u.rad,
atol=1e-10*u.rad)
assert_quantity_allclose(s_lon.lat, s.lat, atol=1e-10*u.rad)
assert_quantity_allclose(s_lon.distance, s.distance)
s_lon2 = s + 1e-5 * u.radian * sf['lon'] * e['lon']
assert_representation_allclose(s_lon2, s_lon)
s_lat = s + s.distance * 1e-5 * e['lat']
assert_quantity_allclose(s_lat.lon, s.lon)
assert_quantity_allclose(s_lat.lat, s.lat + 1e-5*u.rad,
atol=1e-10*u.rad)
assert_quantity_allclose(s_lon.distance, s.distance)
s_lat2 = s + 1.e-5 * u.radian * sf['lat'] * e['lat']
assert_representation_allclose(s_lat2, s_lat)
s_distance = s + 1. * u.pc * e['distance']
assert_quantity_allclose(s_distance.lon, s.lon, atol=1e-10*u.rad)
assert_quantity_allclose(s_distance.lat, s.lat, atol=1e-10*u.rad)
assert_quantity_allclose(s_distance.distance, s.distance + 1.*u.pc)
s_distance2 = s + 1. * u.pc * sf['distance'] * e['distance']
assert_representation_allclose(s_distance2, s_distance)
def test_unit_spherical(self):
s = UnitSphericalRepresentation(lon=[0., 6., 21.] * u.hourangle,
lat=[0., -30., 85.] * u.deg)
e = s.unit_vectors()
self.check_unit_vectors(e)
sf = s.scale_factors()
self.check_scale_factors(sf, s)
s_lon = s + 1e-5 * np.cos(s.lat) * e['lon']
assert_quantity_allclose(s_lon.lon, s.lon + 1e-5*u.rad,
atol=1e-10*u.rad)
assert_quantity_allclose(s_lon.lat, s.lat, atol=1e-10*u.rad)
s_lon2 = s + 1e-5 * u.radian * sf['lon'] * e['lon']
assert_representation_allclose(s_lon2, s_lon)
s_lat = s + 1e-5 * e['lat']
assert_quantity_allclose(s_lat.lon, s.lon)
assert_quantity_allclose(s_lat.lat, s.lat + 1e-5*u.rad,
atol=1e-10*u.rad)
s_lat2 = s + 1.e-5 * u.radian * sf['lat'] * e['lat']
assert_representation_allclose(s_lat2, s_lat)
def test_radial(self):
r = RadialRepresentation(10.*u.kpc)
with pytest.raises(NotImplementedError):
r.unit_vectors()
sf = r.scale_factors()
assert np.all(sf['distance'] == 1.*u.one)
assert np.all(r.norm() == r.distance)
with pytest.raises(TypeError):
r + r
def test_physical_spherical(self):
s = PhysicsSphericalRepresentation(phi=[0., 6., 21.] * u.hourangle,
theta=[90., 120., 5.] * u.deg,
r=[1, 2, 3] * u.kpc)
e = s.unit_vectors()
self.check_unit_vectors(e)
sf = s.scale_factors()
self.check_scale_factors(sf, s)
s_phi = s + s.r * 1e-5 * np.sin(s.theta) * e['phi']
assert_quantity_allclose(s_phi.phi, s.phi + 1e-5*u.rad,
atol=1e-10*u.rad)
assert_quantity_allclose(s_phi.theta, s.theta, atol=1e-10*u.rad)
assert_quantity_allclose(s_phi.r, s.r)
s_phi2 = s + 1e-5 * u.radian * sf['phi'] * e['phi']
assert_representation_allclose(s_phi2, s_phi)
s_theta = s + s.r * 1e-5 * e['theta']
assert_quantity_allclose(s_theta.phi, s.phi)
assert_quantity_allclose(s_theta.theta, s.theta + 1e-5*u.rad,
atol=1e-10*u.rad)
assert_quantity_allclose(s_theta.r, s.r)
s_theta2 = s + 1.e-5 * u.radian * sf['theta'] * e['theta']
assert_representation_allclose(s_theta2, s_theta)
s_r = s + 1. * u.pc * e['r']
assert_quantity_allclose(s_r.phi, s.phi, atol=1e-10*u.rad)
assert_quantity_allclose(s_r.theta, s.theta, atol=1e-10*u.rad)
assert_quantity_allclose(s_r.r, s.r + 1.*u.pc)
s_r2 = s + 1. * u.pc * sf['r'] * e['r']
assert_representation_allclose(s_r2, s_r)
def test_cartesian(self):
s = CartesianRepresentation(x=[1, 2, 3] * u.pc,
y=[2, 3, 4] * u.Mpc,
z=[3, 4, 5] * u.kpc)
e = s.unit_vectors()
sf = s.scale_factors()
for v, expected in zip(e.values(), ([1., 0., 0.] * u.one,
[0., 1., 0.] * u.one,
[0., 0., 1.] * u.one)):
assert np.all(v.get_xyz(xyz_axis=-1) == expected)
for f in sf.values():
assert np.all(f == 1.*u.one)
def test_cylindrical(self):
s = CylindricalRepresentation(rho=[1, 2, 3] * u.pc,
phi=[0., 90., -45.] * u.deg,
z=[3, 4, 5] * u.kpc)
e = s.unit_vectors()
self.check_unit_vectors(e)
sf = s.scale_factors()
self.check_scale_factors(sf, s)
s_rho = s + 1. * u.pc * e['rho']
assert_quantity_allclose(s_rho.rho, s.rho + 1.*u.pc)
assert_quantity_allclose(s_rho.phi, s.phi)
assert_quantity_allclose(s_rho.z, s.z)
s_rho2 = s + 1. * u.pc * sf['rho'] * e['rho']
assert_representation_allclose(s_rho2, s_rho)
s_phi = s + s.rho * 1e-5 * e['phi']
assert_quantity_allclose(s_phi.rho, s.rho)
assert_quantity_allclose(s_phi.phi, s.phi + 1e-5*u.rad)
assert_quantity_allclose(s_phi.z, s.z)
s_phi2 = s + 1e-5 * u.radian * sf['phi'] * e['phi']
assert_representation_allclose(s_phi2, s_phi)
s_z = s + 1. * u.pc * e['z']
assert_quantity_allclose(s_z.rho, s.rho)
assert_quantity_allclose(s_z.phi, s.phi, atol=1e-10*u.rad)
assert_quantity_allclose(s_z.z, s.z + 1.*u.pc)
s_z2 = s + 1. * u.pc * sf['z'] * e['z']
assert_representation_allclose(s_z2, s_z)
@pytest.mark.parametrize('omit_coslat', [False, True], scope='class')
class TestSphericalDifferential():
# these test cases are subclassed for SphericalCosLatDifferential,
# hence some tests depend on omit_coslat.
def _setup(self, omit_coslat):
if omit_coslat:
self.SD_cls = SphericalCosLatDifferential
else:
self.SD_cls = SphericalDifferential
s = SphericalRepresentation(lon=[0., 6., 21.] * u.hourangle,
lat=[0., -30., 85.] * u.deg,
distance=[1, 2, 3] * u.kpc)
self.s = s
self.e = s.unit_vectors()
self.sf = s.scale_factors(omit_coslat=omit_coslat)
def test_name_coslat(self, omit_coslat):
self._setup(omit_coslat)
if omit_coslat:
assert self.SD_cls is SphericalCosLatDifferential
assert self.SD_cls.get_name() == 'sphericalcoslat'
else:
assert self.SD_cls is SphericalDifferential
assert self.SD_cls.get_name() == 'spherical'
assert self.SD_cls.get_name() in DIFFERENTIAL_CLASSES
def test_simple_differentials(self, omit_coslat):
self._setup(omit_coslat)
s, e, sf = self.s, self.e, self.sf
o_lon = self.SD_cls(1.*u.arcsec, 0.*u.arcsec, 0.*u.kpc)
o_lonc = o_lon.to_cartesian(base=s)
o_lon2 = self.SD_cls.from_cartesian(o_lonc, base=s)
assert_differential_allclose(o_lon, o_lon2)
# simple check by hand for first element.
# lat[0] is 0, so cos(lat) term doesn't matter.
assert_quantity_allclose(o_lonc[0].xyz,
[0., np.pi/180./3600., 0.]*u.kpc)
# check all using unit vectors and scale factors.
s_lon = s + 1.*u.arcsec * sf['lon'] * e['lon']
assert_representation_allclose(o_lonc, s_lon - s, atol=1*u.npc)
s_lon2 = s + o_lon
assert_representation_allclose(s_lon2, s_lon, atol=1*u.npc)
o_lat = self.SD_cls(0.*u.arcsec, 1.*u.arcsec, 0.*u.kpc)
o_latc = o_lat.to_cartesian(base=s)
assert_quantity_allclose(o_latc[0].xyz,
[0., 0., np.pi/180./3600.]*u.kpc,
atol=1.*u.npc)
s_lat = s + 1.*u.arcsec * sf['lat'] * e['lat']
assert_representation_allclose(o_latc, s_lat - s, atol=1*u.npc)
s_lat2 = s + o_lat
assert_representation_allclose(s_lat2, s_lat, atol=1*u.npc)
o_distance = self.SD_cls(0.*u.arcsec, 0.*u.arcsec, 1.*u.mpc)
o_distancec = o_distance.to_cartesian(base=s)
assert_quantity_allclose(o_distancec[0].xyz,
[1e-6, 0., 0.]*u.kpc, atol=1.*u.npc)
s_distance = s + 1.*u.mpc * sf['distance'] * e['distance']
assert_representation_allclose(o_distancec, s_distance - s,
atol=1*u.npc)
s_distance2 = s + o_distance
assert_representation_allclose(s_distance2, s_distance)
def test_differential_arithmetic(self, omit_coslat):
self._setup(omit_coslat)
s = self.s
o_lon = self.SD_cls(1.*u.arcsec, 0.*u.arcsec, 0.*u.kpc)
o_lon_by_2 = o_lon / 2.
assert_representation_allclose(o_lon_by_2.to_cartesian(s) * 2.,
o_lon.to_cartesian(s), atol=1e-10*u.kpc)
assert_representation_allclose(s + o_lon, s + 2 * o_lon_by_2,
atol=1e-10*u.kpc)
o_lon_rec = o_lon_by_2 + o_lon_by_2
assert_representation_allclose(s + o_lon, s + o_lon_rec,
atol=1e-10*u.kpc)
o_lon_0 = o_lon - o_lon
for c in o_lon_0.components:
assert np.all(getattr(o_lon_0, c) == 0.)
o_lon2 = self.SD_cls(1*u.mas/u.yr, 0*u.mas/u.yr, 0*u.km/u.s)
assert_quantity_allclose(o_lon2.norm(s)[0], 4.74*u.km/u.s,
atol=0.01*u.km/u.s)
assert_representation_allclose(o_lon2.to_cartesian(s) * 1000.*u.yr,
o_lon.to_cartesian(s), atol=1e-10*u.kpc)
s_off = s + o_lon
s_off2 = s + o_lon2 * 1000.*u.yr
assert_representation_allclose(s_off, s_off2, atol=1e-10*u.kpc)
factor = 1e5 * u.radian/u.arcsec
if not omit_coslat:
factor = factor / np.cos(s.lat)
s_off_big = s + o_lon * factor
assert_representation_allclose(
s_off_big, SphericalRepresentation(s.lon + 90.*u.deg, 0.*u.deg,
1e5*s.distance),
atol=5.*u.kpc)
o_lon3c = CartesianRepresentation(0., 4.74047, 0., unit=u.km/u.s)
o_lon3 = self.SD_cls.from_cartesian(o_lon3c, base=s)
expected0 = self.SD_cls(1.*u.mas/u.yr, 0.*u.mas/u.yr, 0.*u.km/u.s)
assert_differential_allclose(o_lon3[0], expected0)
s_off_big2 = s + o_lon3 * 1e5 * u.yr * u.radian/u.mas
assert_representation_allclose(
s_off_big2, SphericalRepresentation(90.*u.deg, 0.*u.deg,
1e5*u.kpc), atol=5.*u.kpc)
with pytest.raises(TypeError):
o_lon - s
with pytest.raises(TypeError):
s.to_cartesian() + o_lon
def test_differential_init_errors(self, omit_coslat):
self._setup(omit_coslat)
s = self.s
with pytest.raises(u.UnitsError):
self.SD_cls(1.*u.arcsec, 0., 0.)
with pytest.raises(TypeError):
self.SD_cls(1.*u.arcsec, 0.*u.arcsec, 0.*u.kpc,
False, False)
with pytest.raises(TypeError):
self.SD_cls(1.*u.arcsec, 0.*u.arcsec, 0.*u.kpc,
copy=False, d_lat=0.*u.arcsec)
with pytest.raises(TypeError):
self.SD_cls(1.*u.arcsec, 0.*u.arcsec, 0.*u.kpc,
copy=False, flying='circus')
with pytest.raises(ValueError):
self.SD_cls(np.ones(2)*u.arcsec,
np.zeros(3)*u.arcsec, np.zeros(2)*u.kpc)
with pytest.raises(u.UnitsError):
self.SD_cls(1.*u.arcsec, 1.*u.s, 0.*u.kpc)
with pytest.raises(u.UnitsError):
self.SD_cls(1.*u.kpc, 1.*u.arcsec, 0.*u.kpc)
o = self.SD_cls(1.*u.arcsec, 1.*u.arcsec, 0.*u.km/u.s)
with pytest.raises(u.UnitsError):
o.to_cartesian(s)
with pytest.raises(AttributeError):
o.d_lat = 0.*u.arcsec
with pytest.raises(AttributeError):
del o.d_lat
o = self.SD_cls(1.*u.arcsec, 1.*u.arcsec, 0.*u.km)
with pytest.raises(TypeError):
o.to_cartesian()
c = CartesianRepresentation(10., 0., 0., unit=u.km)
with pytest.raises(TypeError):
self.SD_cls.to_cartesian(c)
with pytest.raises(TypeError):
self.SD_cls.from_cartesian(c)
with pytest.raises(TypeError):
self.SD_cls.from_cartesian(c, SphericalRepresentation)
with pytest.raises(TypeError):
self.SD_cls.from_cartesian(c, c)
@pytest.mark.parametrize('omit_coslat', [False, True], scope='class')
class TestUnitSphericalDifferential():
def _setup(self, omit_coslat):
if omit_coslat:
self.USD_cls = UnitSphericalCosLatDifferential
else:
self.USD_cls = UnitSphericalDifferential
s = UnitSphericalRepresentation(lon=[0., 6., 21.] * u.hourangle,
lat=[0., -30., 85.] * u.deg)
self.s = s
self.e = s.unit_vectors()
self.sf = s.scale_factors(omit_coslat=omit_coslat)
def test_name_coslat(self, omit_coslat):
self._setup(omit_coslat)
if omit_coslat:
assert self.USD_cls is UnitSphericalCosLatDifferential
assert self.USD_cls.get_name() == 'unitsphericalcoslat'
else:
assert self.USD_cls is UnitSphericalDifferential
assert self.USD_cls.get_name() == 'unitspherical'
assert self.USD_cls.get_name() in DIFFERENTIAL_CLASSES
def test_simple_differentials(self, omit_coslat):
self._setup(omit_coslat)
s, e, sf = self.s, self.e, self.sf
o_lon = self.USD_cls(1.*u.arcsec, 0.*u.arcsec)
o_lonc = o_lon.to_cartesian(base=s)
o_lon2 = self.USD_cls.from_cartesian(o_lonc, base=s)
assert_differential_allclose(o_lon, o_lon2)
# simple check by hand for first element
# (lat[0]=0, so works for both normal and CosLat differential)
assert_quantity_allclose(o_lonc[0].xyz,
[0., np.pi/180./3600., 0.]*u.one)
# check all using unit vectors and scale factors.
s_lon = s + 1.*u.arcsec * sf['lon'] * e['lon']
assert type(s_lon) is SphericalRepresentation
assert_representation_allclose(o_lonc, s_lon - s, atol=1e-10*u.one)
s_lon2 = s + o_lon
assert_representation_allclose(s_lon2, s_lon, atol=1e-10*u.one)
o_lat = self.USD_cls(0.*u.arcsec, 1.*u.arcsec)
o_latc = o_lat.to_cartesian(base=s)
assert_quantity_allclose(o_latc[0].xyz,
[0., 0., np.pi/180./3600.]*u.one,
atol=1e-10*u.one)
s_lat = s + 1.*u.arcsec * sf['lat'] * e['lat']
assert type(s_lat) is SphericalRepresentation
assert_representation_allclose(o_latc, s_lat - s, atol=1e-10*u.one)
s_lat2 = s + o_lat
assert_representation_allclose(s_lat2, s_lat, atol=1e-10*u.one)
def test_differential_arithmetic(self, omit_coslat):
self._setup(omit_coslat)
s = self.s
o_lon = self.USD_cls(1.*u.arcsec, 0.*u.arcsec)
o_lon_by_2 = o_lon / 2.
assert type(o_lon_by_2) is self.USD_cls
assert_representation_allclose(o_lon_by_2.to_cartesian(s) * 2.,
o_lon.to_cartesian(s), atol=1e-10*u.one)
s_lon = s + o_lon
s_lon2 = s + 2 * o_lon_by_2
assert type(s_lon) is SphericalRepresentation
assert_representation_allclose(s_lon, s_lon2, atol=1e-10*u.one)
o_lon_rec = o_lon_by_2 + o_lon_by_2
assert type(o_lon_rec) is self.USD_cls
assert representation_equal(o_lon, o_lon_rec)
assert_representation_allclose(s + o_lon, s + o_lon_rec,
atol=1e-10*u.one)
o_lon_0 = o_lon - o_lon
assert type(o_lon_0) is self.USD_cls
for c in o_lon_0.components:
assert np.all(getattr(o_lon_0, c) == 0.)
o_lon2 = self.USD_cls(1.*u.mas/u.yr, 0.*u.mas/u.yr)
kks = u.km/u.kpc/u.s
assert_quantity_allclose(o_lon2.norm(s)[0], 4.74047*kks, atol=1e-4*kks)
assert_representation_allclose(o_lon2.to_cartesian(s) * 1000.*u.yr,
o_lon.to_cartesian(s), atol=1e-10*u.one)
s_off = s + o_lon
s_off2 = s + o_lon2 * 1000.*u.yr
assert_representation_allclose(s_off, s_off2, atol=1e-10*u.one)
factor = 1e5 * u.radian/u.arcsec
if not omit_coslat:
factor = factor / np.cos(s.lat)
s_off_big = s + o_lon * factor
assert_representation_allclose(
s_off_big, SphericalRepresentation(s.lon + 90.*u.deg,
0.*u.deg, 1e5),
atol=5.*u.one)
o_lon3c = CartesianRepresentation(0., 4.74047, 0., unit=kks)
# This looses information!!
o_lon3 = self.USD_cls.from_cartesian(o_lon3c, base=s)
expected0 = self.USD_cls(1.*u.mas/u.yr, 0.*u.mas/u.yr)
assert_differential_allclose(o_lon3[0], expected0)
# Part of motion kept.
part_kept = s.cross(CartesianRepresentation(0, 1, 0, unit=u.one)).norm()
assert_quantity_allclose(o_lon3.norm(s), 4.74047*part_kept*kks,
atol=1e-10*kks)
# (lat[0]=0, so works for both normal and CosLat differential)
s_off_big2 = s + o_lon3 * 1e5 * u.yr * u.radian/u.mas
expected0 = SphericalRepresentation(90.*u.deg, 0.*u.deg,
1e5*u.one)
assert_representation_allclose(s_off_big2[0], expected0, atol=5.*u.one)
def test_differential_init_errors(self, omit_coslat):
self._setup(omit_coslat)
with pytest.raises(u.UnitsError):
self.USD_cls(0.*u.deg, 10.*u.deg/u.yr)
class TestRadialDifferential():
def setup(self):
s = SphericalRepresentation(lon=[0., 6., 21.] * u.hourangle,
lat=[0., -30., 85.] * u.deg,
distance=[1, 2, 3] * u.kpc)
self.s = s
self.r = s.represent_as(RadialRepresentation)
self.e = s.unit_vectors()
self.sf = s.scale_factors()
def test_name(self):
assert RadialDifferential.get_name() == 'radial'
assert RadialDifferential.get_name() in DIFFERENTIAL_CLASSES
def test_simple_differentials(self):
r, s, e, sf = self.r, self.s, self.e, self.sf
o_distance = RadialDifferential(1.*u.mpc)
# Can be applied to RadialRepresentation, though not most useful.
r_distance = r + o_distance
assert_quantity_allclose(r_distance.distance,
r.distance + o_distance.d_distance)
r_distance2 = o_distance + r
assert_quantity_allclose(r_distance2.distance,
r.distance + o_distance.d_distance)
# More sense to apply it relative to spherical representation.
o_distancec = o_distance.to_cartesian(base=s)
assert_quantity_allclose(o_distancec[0].xyz,
[1e-6, 0., 0.]*u.kpc, atol=1.*u.npc)
o_recover = RadialDifferential.from_cartesian(o_distancec, base=s)
assert_quantity_allclose(o_recover.d_distance, o_distance.d_distance)
s_distance = s + 1.*u.mpc * sf['distance'] * e['distance']
assert_representation_allclose(o_distancec, s_distance - s,
atol=1*u.npc)
s_distance2 = s + o_distance
assert_representation_allclose(s_distance2, s_distance)
class TestPhysicsSphericalDifferential():
"""Test copied from SphericalDifferential, so less extensive."""
def setup(self):
s = PhysicsSphericalRepresentation(phi=[0., 90., 315.] * u.deg,
theta=[90., 120., 5.] * u.deg,
r=[1, 2, 3] * u.kpc)
self.s = s
self.e = s.unit_vectors()
self.sf = s.scale_factors()
def test_name(self):
assert PhysicsSphericalDifferential.get_name() == 'physicsspherical'
assert PhysicsSphericalDifferential.get_name() in DIFFERENTIAL_CLASSES
def test_simple_differentials(self):
s, e, sf = self.s, self.e, self.sf
o_phi = PhysicsSphericalDifferential(1*u.arcsec, 0*u.arcsec, 0*u.kpc)
o_phic = o_phi.to_cartesian(base=s)
o_phi2 = PhysicsSphericalDifferential.from_cartesian(o_phic, base=s)
assert_quantity_allclose(o_phi.d_phi, o_phi2.d_phi, atol=1.*u.narcsec)
assert_quantity_allclose(o_phi.d_theta, o_phi2.d_theta,
atol=1.*u.narcsec)
assert_quantity_allclose(o_phi.d_r, o_phi2.d_r, atol=1.*u.npc)
# simple check by hand for first element.
assert_quantity_allclose(o_phic[0].xyz,
[0., np.pi/180./3600., 0.]*u.kpc,
atol=1.*u.npc)
# check all using unit vectors and scale factors.
s_phi = s + 1.*u.arcsec * sf['phi'] * e['phi']
assert_representation_allclose(o_phic, s_phi - s, atol=1e-10*u.kpc)
o_theta = PhysicsSphericalDifferential(0*u.arcsec, 1*u.arcsec, 0*u.kpc)
o_thetac = o_theta.to_cartesian(base=s)
assert_quantity_allclose(o_thetac[0].xyz,
[0., 0., -np.pi/180./3600.]*u.kpc,
atol=1.*u.npc)
s_theta = s + 1.*u.arcsec * sf['theta'] * e['theta']
assert_representation_allclose(o_thetac, s_theta - s, atol=1e-10*u.kpc)
s_theta2 = s + o_theta
assert_representation_allclose(s_theta2, s_theta, atol=1e-10*u.kpc)
o_r = PhysicsSphericalDifferential(0*u.arcsec, 0*u.arcsec, 1*u.mpc)
o_rc = o_r.to_cartesian(base=s)
assert_quantity_allclose(o_rc[0].xyz, [1e-6, 0., 0.]*u.kpc,
atol=1.*u.npc)
s_r = s + 1.*u.mpc * sf['r'] * e['r']
assert_representation_allclose(o_rc, s_r - s, atol=1e-10*u.kpc)
s_r2 = s + o_r
assert_representation_allclose(s_r2, s_r)
def test_differential_init_errors(self):
with pytest.raises(u.UnitsError):
PhysicsSphericalDifferential(1.*u.arcsec, 0., 0.)
class TestCylindricalDifferential():
"""Test copied from SphericalDifferential, so less extensive."""
def setup(self):
s = CylindricalRepresentation(rho=[1, 2, 3] * u.kpc,
phi=[0., 90., 315.] * u.deg,
z=[3, 2, 1] * u.kpc)
self.s = s
self.e = s.unit_vectors()
self.sf = s.scale_factors()
def test_name(self):
assert CylindricalDifferential.get_name() == 'cylindrical'
assert CylindricalDifferential.get_name() in DIFFERENTIAL_CLASSES
def test_simple_differentials(self):
s, e, sf = self.s, self.e, self.sf
o_rho = CylindricalDifferential(1.*u.mpc, 0.*u.arcsec, 0.*u.kpc)
o_rhoc = o_rho.to_cartesian(base=s)
assert_quantity_allclose(o_rhoc[0].xyz, [1.e-6, 0., 0.]*u.kpc)
s_rho = s + 1.*u.mpc * sf['rho'] * e['rho']
assert_representation_allclose(o_rhoc, s_rho - s, atol=1e-10*u.kpc)
s_rho2 = s + o_rho
assert_representation_allclose(s_rho2, s_rho)
o_phi = CylindricalDifferential(0.*u.kpc, 1.*u.arcsec, 0.*u.kpc)
o_phic = o_phi.to_cartesian(base=s)
o_phi2 = CylindricalDifferential.from_cartesian(o_phic, base=s)
assert_quantity_allclose(o_phi.d_rho, o_phi2.d_rho, atol=1.*u.npc)
assert_quantity_allclose(o_phi.d_phi, o_phi2.d_phi, atol=1.*u.narcsec)
assert_quantity_allclose(o_phi.d_z, o_phi2.d_z, atol=1.*u.npc)
# simple check by hand for first element.
assert_quantity_allclose(o_phic[0].xyz,
[0., np.pi/180./3600., 0.]*u.kpc)
# check all using unit vectors and scale factors.
s_phi = s + 1.*u.arcsec * sf['phi'] * e['phi']
assert_representation_allclose(o_phic, s_phi - s, atol=1e-10*u.kpc)
o_z = CylindricalDifferential(0.*u.kpc, 0.*u.arcsec, 1.*u.mpc)
o_zc = o_z.to_cartesian(base=s)
assert_quantity_allclose(o_zc[0].xyz, [0., 0., 1.e-6]*u.kpc)
s_z = s + 1.*u.mpc * sf['z'] * e['z']
assert_representation_allclose(o_zc, s_z - s, atol=1e-10*u.kpc)
s_z2 = s + o_z
assert_representation_allclose(s_z2, s_z)
def test_differential_init_errors(self):
with pytest.raises(u.UnitsError):
CylindricalDifferential(1.*u.pc, 1.*u.arcsec, 3.*u.km/u.s)
class TestCartesianDifferential():
"""Test copied from SphericalDifferential, so less extensive."""
def setup(self):
s = CartesianRepresentation(x=[1, 2, 3] * u.kpc,
y=[2, 3, 1] * u.kpc,
z=[3, 1, 2] * u.kpc)
self.s = s
self.e = s.unit_vectors()
self.sf = s.scale_factors()
def test_name(self):
assert CartesianDifferential.get_name() == 'cartesian'
assert CartesianDifferential.get_name() in DIFFERENTIAL_CLASSES
def test_simple_differentials(self):
s, e, sf = self.s, self.e, self.sf
for d, differential in ( # test different inits while we're at it.
('x', CartesianDifferential(1.*u.pc, 0.*u.pc, 0.*u.pc)),
('y', CartesianDifferential([0., 1., 0.], unit=u.pc)),
('z', CartesianDifferential(np.array([[0., 0., 1.]]) * u.pc,
xyz_axis=1))):
o_c = differential.to_cartesian(base=s)
o_c2 = differential.to_cartesian()
assert np.all(representation_equal(o_c, o_c2))
assert all(np.all(getattr(differential, 'd_'+c) == getattr(o_c, c))
for c in ('x', 'y', 'z'))
differential2 = CartesianDifferential.from_cartesian(o_c)
assert np.all(representation_equal(differential2, differential))
differential3 = CartesianDifferential.from_cartesian(o_c, base=o_c)
assert np.all(representation_equal(differential3, differential))
s_off = s + 1.*u.pc * sf[d] * e[d]
assert_representation_allclose(o_c, s_off - s, atol=1e-10*u.kpc)
s_off2 = s + differential
assert_representation_allclose(s_off2, s_off)
def test_init_failures(self):
with pytest.raises(ValueError):
CartesianDifferential(1.*u.kpc/u.s, 2.*u.kpc)
with pytest.raises(u.UnitsError):
CartesianDifferential(1.*u.kpc/u.s, 2.*u.kpc, 3.*u.kpc)
with pytest.raises(ValueError):
CartesianDifferential(1.*u.kpc, 2.*u.kpc, 3.*u.kpc, xyz_axis=1)
class TestDifferentialConversion():
def setup(self):
self.s = SphericalRepresentation(lon=[0., 6., 21.] * u.hourangle,
lat=[0., -30., 85.] * u.deg,
distance=[1, 2, 3] * u.kpc)
@pytest.mark.parametrize('sd_cls', [SphericalDifferential,
SphericalCosLatDifferential])
def test_represent_as_own_class(self, sd_cls):
so = sd_cls(1.*u.deg, 2.*u.deg, 0.1*u.kpc)
so2 = so.represent_as(sd_cls)
assert so2 is so
def test_represent_other_coslat(self):
s = self.s
coslat = np.cos(s.lat)
so = SphericalDifferential(1.*u.deg, 2.*u.deg, 0.1*u.kpc)
so_coslat = so.represent_as(SphericalCosLatDifferential, base=s)
assert_quantity_allclose(so.d_lon * coslat,
so_coslat.d_lon_coslat)
so2 = so_coslat.represent_as(SphericalDifferential, base=s)
assert np.all(representation_equal(so2, so))
so3 = SphericalDifferential.from_representation(so_coslat, base=s)
assert np.all(representation_equal(so3, so))
so_coslat2 = SphericalCosLatDifferential.from_representation(so, base=s)
assert np.all(representation_equal(so_coslat2, so_coslat))
# Also test UnitSpherical
us = s.represent_as(UnitSphericalRepresentation)
uo = so.represent_as(UnitSphericalDifferential)
uo_coslat = so.represent_as(UnitSphericalCosLatDifferential, base=s)
assert_quantity_allclose(uo.d_lon * coslat,
uo_coslat.d_lon_coslat)
uo2 = uo_coslat.represent_as(UnitSphericalDifferential, base=us)
assert np.all(representation_equal(uo2, uo))
uo3 = UnitSphericalDifferential.from_representation(uo_coslat, base=us)
assert np.all(representation_equal(uo3, uo))
uo_coslat2 = UnitSphericalCosLatDifferential.from_representation(
uo, base=us)
assert np.all(representation_equal(uo_coslat2, uo_coslat))
uo_coslat3 = uo.represent_as(UnitSphericalCosLatDifferential, base=us)
assert np.all(representation_equal(uo_coslat3, uo_coslat))
@pytest.mark.parametrize('sd_cls', [SphericalDifferential,
SphericalCosLatDifferential])
@pytest.mark.parametrize('r_cls', (SphericalRepresentation,
UnitSphericalRepresentation,
PhysicsSphericalRepresentation,
CylindricalRepresentation))
def test_represent_regular_class(self, sd_cls, r_cls):
so = sd_cls(1.*u.deg, 2.*u.deg, 0.1*u.kpc)
r = so.represent_as(r_cls, base=self.s)
c = so.to_cartesian(self.s)
r_check = c.represent_as(r_cls)
assert np.all(representation_equal(r, r_check))
so2 = sd_cls.from_representation(r, base=self.s)
so3 = sd_cls.from_cartesian(r.to_cartesian(), self.s)
assert np.all(representation_equal(so2, so3))
@pytest.mark.parametrize('sd_cls', [SphericalDifferential,
SphericalCosLatDifferential])
def test_convert_physics(self, sd_cls):
# Conversion needs no base for SphericalDifferential, but does
# need one (to get the latitude) for SphericalCosLatDifferential.
if sd_cls is SphericalDifferential:
usd_cls = UnitSphericalDifferential
base_s = base_u = base_p = None
else:
usd_cls = UnitSphericalCosLatDifferential
base_s = self.s[1]
base_u = base_s.represent_as(UnitSphericalRepresentation)
base_p = base_s.represent_as(PhysicsSphericalRepresentation)
so = sd_cls(1.*u.deg, 2.*u.deg, 0.1*u.kpc)
po = so.represent_as(PhysicsSphericalDifferential, base=base_s)
so2 = sd_cls.from_representation(po, base=base_s)
assert_differential_allclose(so, so2)
po2 = PhysicsSphericalDifferential.from_representation(so, base=base_p)
assert_differential_allclose(po, po2)
so3 = po.represent_as(sd_cls, base=base_p)
assert_differential_allclose(so, so3)
s = self.s
p = s.represent_as(PhysicsSphericalRepresentation)
cso = so.to_cartesian(s[1])
cpo = po.to_cartesian(p[1])
assert_representation_allclose(cso, cpo)
assert_representation_allclose(s[1] + so, p[1] + po)
po2 = so.represent_as(PhysicsSphericalDifferential,
base=None if base_s is None else s)
assert_representation_allclose(s + so, p + po2)
suo = usd_cls.from_representation(so)
puo = usd_cls.from_representation(po, base=base_u)
assert_differential_allclose(suo, puo)
suo2 = so.represent_as(usd_cls)
puo2 = po.represent_as(usd_cls, base=base_p)
assert_differential_allclose(suo2, puo2)
assert_differential_allclose(puo, puo2)
sro = RadialDifferential.from_representation(so)
pro = RadialDifferential.from_representation(po)
assert representation_equal(sro, pro)
sro2 = so.represent_as(RadialDifferential)
pro2 = po.represent_as(RadialDifferential)
assert representation_equal(sro2, pro2)
assert representation_equal(pro, pro2)
@pytest.mark.parametrize(
('sd_cls', 'usd_cls'),
[(SphericalDifferential, UnitSphericalDifferential),
(SphericalCosLatDifferential, UnitSphericalCosLatDifferential)])
def test_convert_unit_spherical_radial(self, sd_cls, usd_cls):
s = self.s
us = s.represent_as(UnitSphericalRepresentation)
rs = s.represent_as(RadialRepresentation)
assert_representation_allclose(rs * us, s)
uo = usd_cls(2.*u.deg, 1.*u.deg)
so = uo.represent_as(sd_cls, base=s)
assert_quantity_allclose(so.d_distance, 0.*u.kpc, atol=1.*u.npc)
uo2 = so.represent_as(usd_cls)
assert_representation_allclose(uo.to_cartesian(us),
uo2.to_cartesian(us))
so1 = sd_cls(2.*u.deg, 1.*u.deg, 5.*u.pc)
uo_r = so1.represent_as(usd_cls)
ro_r = so1.represent_as(RadialDifferential)
assert np.all(representation_equal(uo_r, uo))
assert np.all(representation_equal(ro_r, RadialDifferential(5.*u.pc)))
@pytest.mark.parametrize('sd_cls', [SphericalDifferential,
SphericalCosLatDifferential])
def test_convert_cylindrial(self, sd_cls):
s = self.s
so = sd_cls(1.*u.deg, 2.*u.deg, 0.1*u.kpc)
cyo = so.represent_as(CylindricalDifferential, base=s)
cy = s.represent_as(CylindricalRepresentation)
so1 = cyo.represent_as(sd_cls, base=cy)
assert_representation_allclose(so.to_cartesian(s),
so1.to_cartesian(s))
cyo2 = CylindricalDifferential.from_representation(so, base=cy)
assert_representation_allclose(cyo2.to_cartesian(base=cy),
cyo.to_cartesian(base=cy))
so2 = sd_cls.from_representation(cyo2, base=s)
assert_representation_allclose(so.to_cartesian(s),
so2.to_cartesian(s))
@pytest.mark.parametrize('sd_cls', [SphericalDifferential,
SphericalCosLatDifferential])
def test_combinations(self, sd_cls):
if sd_cls is SphericalDifferential:
uo = UnitSphericalDifferential(2.*u.deg, 1.*u.deg)
uo_d_lon = uo.d_lon
else:
uo = UnitSphericalCosLatDifferential(2.*u.deg, 1.*u.deg)
uo_d_lon = uo.d_lon_coslat
ro = RadialDifferential(1.*u.mpc)
so1 = uo + ro
so1c = sd_cls(uo_d_lon, uo.d_lat, ro.d_distance)
assert np.all(representation_equal(so1, so1c))
so2 = uo - ro
so2c = sd_cls(uo_d_lon, uo.d_lat, -ro.d_distance)
assert np.all(representation_equal(so2, so2c))
so3 = so2 + ro
so3c = sd_cls(uo_d_lon, uo.d_lat, 0.*u.kpc)
assert np.all(representation_equal(so3, so3c))
so4 = so1 + ro
so4c = sd_cls(uo_d_lon, uo.d_lat, 2*ro.d_distance)
assert np.all(representation_equal(so4, so4c))
so5 = so1 - uo
so5c = sd_cls(0*u.deg, 0.*u.deg, ro.d_distance)
assert np.all(representation_equal(so5, so5c))
assert_representation_allclose(self.s + (uo+ro), self.s+so1)
@pytest.mark.parametrize('rep,dif', [
[CartesianRepresentation([1, 2, 3]*u.kpc),
CartesianDifferential([.1, .2, .3]*u.km/u.s)],
[SphericalRepresentation(90*u.deg, 0.*u.deg, 14.*u.kpc),
SphericalDifferential(1.*u.deg, 2.*u.deg, 0.1*u.kpc)]
])
def test_arithmetic_with_differentials_fail(rep, dif):
rep = rep.with_differentials(dif)
with pytest.raises(TypeError):
rep + rep
with pytest.raises(TypeError):
rep - rep
with pytest.raises(TypeError):
rep * rep
with pytest.raises(TypeError):
rep / rep
with pytest.raises(TypeError):
10. * rep
with pytest.raises(TypeError):
rep / 10.
with pytest.raises(TypeError):
-rep
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import re
import yaml
import datetime
import os.path
import exceptions
from functools import wraps
from docopt import docopt
__version__ = '0.1.0'
defaultconfigs = [ os.path.join(sys.path[0],'datasets.yml'),
'/etc/datasets.yml',
os.path.expanduser('~/.datasets.yml'),
'./datasets.yml' ]
class Dataset:
"""Represents a dataset"""
def __init__(self, path, basedir = None):
self.basedir = basedir or os.path.dirname(path)
self.path = path
self._info()
def _info(self):
readme = os.path.join(self.path,'README')
if not os.path.isfile(readme):
raise InvalidDatasetException(
'Invalid dataset %s. Does not contain a README file.'%self.path)
self.name = os.path.relpath(self.path, self.basedir)
doc = yaml_safe_load_first(open(readme))
if not doc.get('dataset'):
raise InvalidDatasetException(
"Invalid dataset %s. Expected README YAML frontmatter to have 'dataset: true'." % self.path)
self.description = doc.get('description', "")
def get_subdatasets(self):
sub = []
subfiles = [os.path.join(self.path, i) for i in os.listdir(self.path)]
for d in [i for i in subfiles if os.path.isdir(i)]:
try:
sub.append(Dataset(d, basedir=self.basedir))
except (InvalidDatasetException):
pass
return sub
class InvalidDatasetException(exceptions.Exception):
pass
def argparsed(func):
@wraps(func)
def wrapped(argv, config):
args = docopt(func.__doc__, argv=argv)
return func(args, config)
return wrapped
def load_configs(configs):
paths = set()
for i in [f for f in configs if os.path.isfile(f)]:
for y in yaml.safe_load_all(open(i)):
paths.update(set(y.get('datasets',[])))
break; # only read the first YAML document per file
datasets = []
for i in paths:
try:
datasets.append(Dataset(i))
except (InvalidDatasetException):
pass # TODO: verbose warning
return {"datasets":datasets}
def get_dataset(path, roots):
if path.startswith('/'):
return Dataset(path)
head = path.split('/')[0]
for root in roots:
if head == os.path.basename(root.path):
return Dataset(os.path.join(os.path.dirname(root.path),path))
raise InvalidDatasetException("%s is not a dataset" % path)
@argparsed
def list(args, config):
"""
Usage: datasets list [options] [<dataset>...]
Show a short description for each of the available <dataset>s.
Options:
<dataset> Dataset name.
-r --recursive List all subdatasets.
--verbose Include more detailed descriptions if available.
Notes:
A dataset is simply a folder that has a README file that begins with:
---
dataset: true
description: optionally, a description here
---
You can register datasets by creating a file in ~/.datasets.yml, or
./datasets.yml that starts with the following:
---
datasets:
# just include the path to the dataset folder
# NOTE: folder must include a README
- /data/all_nsa_data/
- /data/mitt_romney_taxes/
"""
def _print_dataset(ds, args):
print " - {:<30} {:<30}".format(ds.name, ds.description)
if args["--verbose"]:
print " {:<30} Location: {:<30}".format("", ds.path)
if args["--recursive"]:
for i in ds.get_subdatasets():
_print_dataset(i, args)
if not config["datasets"]:
print "No datasets found."
return
sets = config['datasets']
if args['<dataset>']:
args['--recursive'] = True
try:
sets = map(lambda x: get_dataset(x, sets), args['<dataset>'])
except InvalidDatasetException, e:
print >> sys.stderr, "ERROR: %s" % e
sys.exit(-1)
print "Datasets:"
for ds in sets:
_print_dataset(ds, args)
print
@argparsed
def copy(args, config):
"""
Usage: datasets copy[options] <dataset>...
Make a lightweight copy of a <dataset>.
Options:
<dataset> Dataset Name
-c, --clobber Clobber existing files [Default: False]
-n, --dry-run Show what would happen.
"""
if not config["datasets"]:
print "No datasets found."
return
try:
sets = map(lambda x: get_dataset(x, config['datasets']), args['<dataset>'])
except InvalidDatasetException, e:
print >> sys.stderr, "ERROR: %s" % e
sys.exit(-1)
for ds in sets:
rootdir = ds.name
os.mkdir(rootdir)
for (path, dirs, files) in os.walk(ds.path):
relpath = os.path.relpath(path,ds.path)
for f in files:
source = os.path.realpath(os.path.join(ds.path,path,f))
target = os.path.join(rootdir,relpath,f)
if relpath == "." and f == 'README':
frontmatter, rest = get_readme(source)
frontmatter['source'] = ds.path
frontmatter['datecopied'] = datetime.datetime.now()
t = open(target, "w")
t.write(yaml.dump(frontmatter,
explicit_start = True, default_flow_style = False))
t.write('---\n')
t.write(rest)
t.close()
else:
os.symlink(source, target)
for d in dirs:
os.mkdir(os.path.join(rootdir,relpath,d))
@argparsed
def create(args, config):
"""
A dataset itself is simply any folder with specially formatted README file in
it. The README file must start with the following:
---
dataset: true
description: A short one-liner description of the dataset
includes:
- folder1
- folder2
---
And may be followed by anything else.
"""
pass
def get_readme(path):
"""returns (yamldoc, rest)"""
content = open(path).read()
match = re.match( r'^(---\s*$.*?^---\s*$)(.*)', content, re.MULTILINE | re.DOTALL )
return (yaml_safe_load_first(match.group(1)), match.group(2))
def yaml_safe_load_first(content):
for i in yaml.safe_load_all(content):
return i
def main(argv = None):
"""datasets is a simple utility for discovering datasets and making lightweight
copies to use in analyses.
Usage:
datasets <command> [<options>...]
General Options:
-h, --help Show help.
--version Show version and exit.
Commands:
list List available datasets.
copy Get a lightweight copy of a dataset.
create Create an empty dataset.
register Register a dataset in ~/.datasets.yml.
bash_completion Add bash autocomplete code to your ~/.bashrc
See 'datasets help <command>' for more information on a specific command."""
args = docopt(main.__doc__,
version='datasets version %s' % __version__,
options_first=True,
argv=argv or sys.argv[1:])
cmd = args['<command>']
try:
method = globals()[cmd]
assert callable(method)
except (KeyError, AssertionError):
exit("%r is not a datasets command. See 'datasets help'." % cmd)
config = load_configs(defaultconfigs)
argv = [args['<command>']] + args['<options>']
return method(argv, config)
if __name__ == '__main__':
main()
| |
"""
LrCollection.py
Heikki.Huttunen@tut.fi, Jul 29th, 2014
Defines the class LrCollection: A hierarchical two-layer
structure for MEG decoding. The input consists of time slices
and sensor slices. Each 1st layer classifier will see one
slice of the data either in time or sensor dimension.
The 1st layer predictions are combined together using
a second layer classifier.
Classifiers can be any classifier with sklearn interface.
Alternatively, there can be a list of classifiers for
both layers, in which case all will be used.
===
Copyright (c) 2014, Heikki Huttunen
Department of Signal Processing
Tampere University of Technology
Heikki.Huttunen@tut.fi
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Tampere University of Technology nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import numpy as np
import copy
class LrCollection():
def __init__(self,
clf1,
clf2,
useCols = True,
useRows = True):
"""
Initialize the LR collection classifier.
Args:
clf1: First layer classifier. Can also be a list of
classifiers.
clf2: Second layer classifier. Can also be a list of
classifiers. In this case, the output will be
averaged over the predictions of the list.
useCols: If true, train a predictor for each sensor
useRows: If true, train a predictor for each timepoint
Returns:
self
"""
# If the first layer classifier is not inside a list, store it
# in a 1-element list.
if not isinstance(clf1, list):
self.clf1 = [clf1]
else:
self.clf1 = clf1
# If the second layer classifier is not inside a list, store it
# in a 1-element list.
if not isinstance(clf2, list):
self.clf2 = [clf2]
else:
self.clf2 = clf2
self.useCols = useCols
self.useRows = useRows
def getView(self, X, idx):
"""
Extract data from a single row or column in the data matrix.
Can also span multiple rows/columns.
Args:
X: Input data array of shape (n, p, t)
idx: A three element list of requested slice coordinates.
The element idx[0] is the list of trials to extract.
Second element idx[1] is a two element vector with
start and end indices in the sensor space (e.g.,
idx[1] = [0, 306]). The third element idx[1] is a
two element vector with start and end indices in the time
dimension (e.g., idx[1] = [0, 31]).
"""
cols = idx[1]
rows = idx[2]
# Extract the requested slice and reshape to a design matrix
# of shape (n, cols[1]-cols[0], rows[1]-rows[0]).
result = X[idx[0], cols[0]:cols[1], rows[0]:rows[1]]
result = np.reshape(result, (result.shape[0], -1))
return result
def fit(self, X, y):
"""
Train the hierarchical classification model.
Args:
X: Input training data array of shape (n, p, t)
y: Training class labels (shape: (n,))
"""
# All 1st layer classifiers will be stored here:
self.classifiers = []
# We will be using all data for training.
trials = range(X.shape[0])
# Generate column and row views for the selected trials.
if self.useCols:
for col in range(X.shape[1]):
# Train all classifiers in the list of 1st layer
for c in self.clf1:
clf = copy.deepcopy(c)
# Define the slice with one sensor and all time indices
rows = [0, X.shape[2]]
cols = [col, col+1]
# Get the data for this view
view = self.getView(X, [trials, cols, rows])
# Train a classifier with this view.
clf.fit(view, y)
self.classifiers.append((clf, (cols, rows)))
if self.useRows:
for row in range(X.shape[2]):
# Train all classifiers in the list of 1st layer
for c in self.clf1:
clf = copy.deepcopy(c)
# Define the slice with one time index and all sensors
rows = [row, row+1]
cols = [0, X.shape[1]]
# Get the data for this view
view = self.getView(X, [trials, cols, rows])
# Train a classifier with this view.
clf.fit(view, y)
self.classifiers.append((clf, (cols, rows)))
# The input to the second layer are the predicted probabilities
# from the first layer predictors.
yHat = self.predict_proba_l1(X)
# Train second layer classifiers to merge the inputs.
for c in self.clf2:
c.fit(yHat, y)
def predict_proba_l1(self, X):
"""
Predict class probabilities for the test data using all 1st layer
classifiers.
Args:
X: Input test data array of shape (n_t, p, t)
Returns:
Numpy array of probabilities (shape: (n_t, num_classifiers))
"""
yHat = []
trials = range(X.shape[0])
# Predict probability with every classifier in our list
for classifier in self.classifiers:
# classifier is a list with the "classifier"
# and the related "indices".
c = classifier[0]
idx = classifier[1]
# Get the same view as in training stage
view = self.getView(X, [trials, idx[0], idx[1]])
# Predict
p = c.predict_proba(view)[:, 1]
yHat.append(p)
return np.array(yHat).T
def predict_proba(self, X):
"""
Predict class probabilities for the test data.
Args:
X: Input test data array of shape (n_t, p, t)
Returns:
Numpy array of probabilities (shape: (n_t, num_classes))
"""
# Predict first using all 1st layer classifiers:
yHat = self.predict_proba_l1(X)
# Combine these together with each of the 2nd layer classifiers
y = []
for c in self.clf2:
if y == []:
y = c.predict_proba(yHat)
else:
y = y + c.predict_proba(yHat)
y = y / len(self.clf2)
return y
def predict(self, X):
"""
Predict class labels for the test data.
Args:
X: Input test data array of shape (n_t, p, t)
Returns:
Numpy array of probabilities (shape: (n_t, ))
"""
yHat = (self.predict_proba(X) > 0.5)
return yHat
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import paste.urlmap
import re
import urllib2
from nova.api.openstack import wsgi
from nova.openstack.common import log as logging
_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"'
_option_header_piece_re = re.compile(r';\s*([^\s;=]+|%s)\s*'
r'(?:=\s*([^;]+|%s))?\s*' %
(_quoted_string_re, _quoted_string_re))
LOG = logging.getLogger(__name__)
def unquote_header_value(value):
"""Unquotes a header value.
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
return value
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in urllib2.parse_http_list(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
def parse_options_header(value):
"""Parse a ``Content-Type`` like header into a tuple with the content
type and the options:
>>> parse_options_header('Content-Type: text/html; mimetype=text/html')
('Content-Type:', {'mimetype': 'text/html'})
:param value: the header to parse.
:return: (str, options)
"""
def _tokenize(string):
for match in _option_header_piece_re.finditer(string):
key, value = match.groups()
key = unquote_header_value(key)
if value is not None:
value = unquote_header_value(value)
yield key, value
if not value:
return '', {}
parts = _tokenize(';' + value)
name = parts.next()[0]
extra = dict(parts)
return name, extra
class Accept(object):
def __init__(self, value):
self._content_types = [parse_options_header(v) for v in
parse_list_header(value)]
def best_match(self, supported_content_types):
# FIXME: Should we have a more sophisticated matching algorithm that
# takes into account the version as well?
best_quality = -1
best_content_type = None
best_params = {}
best_match = '*/*'
for content_type in supported_content_types:
for content_mask, params in self._content_types:
try:
quality = float(params.get('q', 1))
except ValueError:
continue
if quality < best_quality:
continue
elif best_quality == quality:
if best_match.count('*') <= content_mask.count('*'):
continue
if self._match_mask(content_mask, content_type):
best_quality = quality
best_content_type = content_type
best_params = params
best_match = content_mask
return best_content_type, best_params
def content_type_params(self, best_content_type):
"""Find parameters in Accept header for given content type."""
for content_type, params in self._content_types:
if best_content_type == content_type:
return params
return {}
def _match_mask(self, mask, content_type):
if '*' not in mask:
return content_type == mask
if mask == '*/*':
return True
mask_major = mask[:-2]
content_type_major = content_type.split('/', 1)[0]
return content_type_major == mask_major
def urlmap_factory(loader, global_conf, **local_conf):
if 'not_found_app' in local_conf:
not_found_app = local_conf.pop('not_found_app')
else:
not_found_app = global_conf.get('not_found_app')
if not_found_app:
not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
urlmap = URLMap(not_found_app=not_found_app)
for path, app_name in local_conf.items():
path = paste.urlmap.parse_path_expression(path)
app = loader.get_app(app_name, global_conf=global_conf)
urlmap[path] = app
return urlmap
class URLMap(paste.urlmap.URLMap):
def _match(self, host, port, path_info):
"""Find longest match for a given URL path."""
for (domain, app_url), app in self.applications:
if domain and domain != host and domain != host + ':' + port:
continue
if (path_info == app_url
or path_info.startswith(app_url + '/')):
return app, app_url
return None, None
def _set_script_name(self, app, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
return app(environ, start_response)
return wrap
def _munge_path(self, app, path_info, app_url):
def wrap(environ, start_response):
environ['SCRIPT_NAME'] += app_url
environ['PATH_INFO'] = path_info[len(app_url):]
return app(environ, start_response)
return wrap
def _path_strategy(self, host, port, path_info):
"""Check path suffix for MIME type and path prefix for API version."""
mime_type = app = app_url = None
parts = path_info.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in wsgi.SUPPORTED_CONTENT_TYPES:
mime_type = possible_type
parts = path_info.split('/')
if len(parts) > 1:
possible_app, possible_app_url = self._match(host, port, path_info)
# Don't use prefix if it ends up matching default
if possible_app and possible_app_url:
app_url = possible_app_url
app = self._munge_path(possible_app, path_info, app_url)
return mime_type, app, app_url
def _content_type_strategy(self, host, port, environ):
"""Check Content-Type header for API version."""
app = None
params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1]
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return app
def _accept_strategy(self, host, port, environ, supported_content_types):
"""Check Accept header for best matching MIME type and API version."""
accept = Accept(environ.get('HTTP_ACCEPT', ''))
app = None
# Find the best match in the Accept header
mime_type, params = accept.best_match(supported_content_types)
if 'version' in params:
app, app_url = self._match(host, port, '/v' + params['version'])
if app:
app = self._set_script_name(app, app_url)
return mime_type, app
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
if ':' in host:
host, port = host.split(':', 1)
else:
if environ['wsgi.url_scheme'] == 'http':
port = '80'
else:
port = '443'
path_info = environ['PATH_INFO']
path_info = self.normalize_url(path_info, False)[1]
# The MIME type for the response is determined in one of two ways:
# 1) URL path suffix (eg /servers/detail.json)
# 2) Accept header (eg application/json;q=0.8, application/xml;q=0.2)
# The API version is determined in one of three ways:
# 1) URL path prefix (eg /v1.1/tenant/servers/detail)
# 2) Content-Type header (eg application/json;version=1.1)
# 3) Accept header (eg application/json;q=0.8;version=1.1)
supported_content_types = list(wsgi.SUPPORTED_CONTENT_TYPES)
mime_type, app, app_url = self._path_strategy(host, port, path_info)
# Accept application/atom+xml for the index query of each API
# version mount point as well as the root index
if (app_url and app_url + '/' == path_info) or path_info == '/':
supported_content_types.append('application/atom+xml')
if not app:
app = self._content_type_strategy(host, port, environ)
if not mime_type or not app:
possible_mime_type, possible_app = self._accept_strategy(
host, port, environ, supported_content_types)
if possible_mime_type and not mime_type:
mime_type = possible_mime_type
if possible_app and not app:
app = possible_app
if not mime_type:
mime_type = 'application/json'
if not app:
# Didn't match a particular version, probably matches default
app, app_url = self._match(host, port, path_info)
if app:
app = self._munge_path(app, path_info, app_url)
if app:
environ['nova.best_content_type'] = mime_type
return app(environ, start_response)
environ['paste.urlmap_object'] = self
return self.not_found_application(environ, start_response)
| |
#!/usr/bin/env python2
#
# This file is part of the dune-hdd project:
# https://github.com/pymor/dune-hdd
# Copyright Holders: Felix Schindler
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from __future__ import division, print_function
import numpy as np
from pymor.core.logger import getLogger
from pymor.operators.constructions import induced_norm
from pymor.reductors.basic import reduce_generic_rb
from pymor.playground.algorithms import gram_schmidt_block_basis_extension
from pymor.playground.reductors import GenericBlockRBReconstructor
from simdb.run import add_values
from dune.pymor.la.container import make_listvectorarray
from OS2015_SISC__6_2__estimators import DetailedEstimator, ReducedEstimator
class ConfigurationError(Exception):
"""Raised when an invalid configuration is given."""
class EnrichmentError(Exception):
"""Raised when the enrichment failed."""
def online_phase(cfg, detailed_data, offline_data):
logger = getLogger('.OS2015_SISC__6_2.online_phase')
logger.setLevel('INFO')
def doerfler_marking(indicators, theta):
assert 0.0 < theta <= 1.0
indices = list(range(len(indicators)))
indicators = [ii**2 for ii in indicators]
indicators, indices = [list(x) for x in zip(*sorted(zip(indicators, indices),
key=lambda pair: pair[0],
reverse=True))]
total = np.sum(indicators)
sums = np.array([np.sum(indicators[:ii+1]) for ii in np.arange(len(indicators))])
where = sums > theta*total
if np.any(where):
return indices[:np.argmax(where)+1]
else:
return indices
discretization = detailed_data['discretization']
example = detailed_data['example']
local_products = detailed_data['local_products']
mu_bar_dune = detailed_data['mu_bar_dune']
mu_hat_dune = detailed_data['mu_hat_dune']
norm = detailed_data['norm']
wrapper = detailed_data['wrapper']
basis = offline_data['basis']
basis_mus = offline_data['basis_mus']
rd = offline_data['rd']
rc = offline_data['rc']
reduced_estimator = ReducedEstimator(discretization, example, wrapper, mu_hat_dune, mu_bar_dune,
norm, cfg['estimator_compute'], cfg['estimator_return'])
reduced_estimator.extension_step += 1
reduced_estimator.rc = rc
num_test_samples = cfg['num_test_samples']
target_error = cfg['online_target_error']
logger.info('Started online phase for {} samples'.format(num_test_samples))
test_samples = list(discretization.parameter_space.sample_randomly(num_test_samples))
if cfg['estimate_some_errors'] and len(test_samples) > 0:
logger.info('Estimating discretization errors:')
detailed_estimator = DetailedEstimator(example, wrapper, mu_hat_dune, mu_bar_dune)
estimates = [detailed_estimator.estimate(
discretization.globalize_vectors(discretization.solve(mu))._list[0]._impl,
wrapper.dune_parameter(mu)) for mu in test_samples]
max_error = np.amax(estimates)
logger.info(' range: [{}, {}]'.format(np.amin(estimates), max_error))
logger.info(' mean: {}'.format(np.mean(estimates)))
add_values(estimates=estimates)
if max_error > cfg['online_target_error']:
logger.warn('Given target error of {} is below the worst discretization error {}!'.format(
cfg['online_target_error'], max_error))
print('')
failures = 0
successes = 0
for mu in test_samples:
mu_dune = wrapper.dune_parameter(mu)
mu_in_basis = mu in basis_mus
age = np.ones(discretization.num_subdomains)
logger.info('Solving for {} ...'.format(mu))
U_red = rd.solve(mu)
logger.info('Estimating (mu is {}in the basis) ...'.format('already ' if mu_in_basis else 'not '))
error = reduced_estimator.estimate(U_red, mu, discretization)
if error > target_error:
if mu_in_basis:
logger.error(('The error ({}) is larger than the target_error ({}), '
+ 'but {} is already in the basis: aborting!').format(
error, target_error, mu))
logger.error('This usually means that the tolerances are poorly chosen!')
failures += 1
print('')
else:
try:
logger.info('The error ({}) is too large, starting local enrichment phase:'.format(error))
num_extensions = 0
intermediate_basis = [bb.copy() for bb in basis]
if cfg['local_indicators'] == 'model_reduction_error':
U_h = discretization.solve(mu)
assert len(U_h) == 1
while error > target_error and num_extensions < cfg['online_max_extensions']:
U_red_h = rc.reconstruct(U_red)
assert len(U_red_h) == 1
U_red_global = discretization.globalize_vectors(U_red_h)
U_red_dune = U_red_global._list[0]._impl
if (cfg['uniform_enrichment_factor'] > 0
and error/target_error > cfg['uniform_enrichment_factor']):
logger.info('- Enriching on all subdomains, since error/target_error = {}'.format(
error/target_error))
marked_subdomains = range(discretization.num_subdomains)
if 'age' in cfg['marking_strategy']:
age = np.ones(discretization.num_subdomains)
else:
logger.info('- Estimating local error contributions ...')
# compute local error indicators
if cfg['local_indicators'] == 'model_reduction_error':
difference = U_h - U_red_h
local_indicators = [induced_norm(local_products[ss])(difference._blocks[ss])
for ss in np.arange(discretization.num_subdomains)]
elif cfg['local_indicators'] == 'eta_red':
local_indicators = list(example.estimate_local(U_red_dune,
'eta_OS2014_*',
mu_hat_dune,
mu_bar_dune,
mu_dune))
else:
raise ConfigurationError('Unknown local_indicators given: {}'.format(
cfg['local_indicators']))
# mark subdomains
if 'doerfler' in cfg['marking_strategy']:
marked_subdomains = set(doerfler_marking(local_indicators,
cfg['doerfler_marking_theta']))
else:
raise ConfigurationError('Unknown marking_strategy given: {}'.format(
cfg['local_indicators']))
if 'neighbours' in cfg['marking_strategy']:
for ss in list(marked_subdomains):
neighbours = (list(discretization._impl.neighbouring_subdomains(ss)))
for nn in neighbours:
marked_subdomains.append(nn)
marked_subdomains = set(marked_subdomains)
if 'age' in cfg['marking_strategy']:
only_marked = len(marked_subdomains)
too_old = np.where(age > cfg['marking_max_age'])[0]
for ss in too_old:
marked_subdomains.add(ss)
logger.info((' {} subdomains marked ({} bc. of age), '
+ 'computing local solutions ...').format(
len(marked_subdomains), len(marked_subdomains) - only_marked))
else:
logger.info(' {} subdomains marked, computing local solutions ...'.format(
len(marked_subdomains)))
for ss in np.arange(discretization.num_subdomains):
if ss in marked_subdomains:
age[ss] = 1
else:
age[ss] += 1
# compute updated local solution
local_solutions = [None for ss in np.arange(discretization.num_subdomains)]
for subdomain in marked_subdomains:
local_boundary_values = cfg['local_boundary_values']
if not (local_boundary_values == 'dirichlet' or local_boundary_values == 'neumann'):
raise ConfigurationError('Unknown local_boundary_values given: {}'.format(
local_boundary_values))
oversampled_discretization = discretization.get_oversampled_discretization(
subdomain, local_boundary_values)
local_discretization = discretization.get_local_discretization(subdomain)
U_red_oversampled_dune = example.project_global_to_oversampled(U_red_dune, subdomain)
U_h_improved_oversampled_dune = example.solve_oversampled(
subdomain, local_boundary_values, U_red_oversampled_dune, mu_dune)
U_h_improved_local_dune = example.project_oversampled_to_local(
U_h_improved_oversampled_dune, subdomain)
U_h_improved_local = make_listvectorarray(wrapper[U_h_improved_local_dune])
local_solutions[subdomain] = U_h_improved_local
# extend local bases
logger.info(' Extending bases on {} subdomain{}...'.format(
len(marked_subdomains), '' if len(marked_subdomains) == 1 else 's'))
old_basis_size = sum([len(bb) for bb in intermediate_basis])
extended_bases, _ = gram_schmidt_block_basis_extension(
[intermediate_basis[ss] for ss in marked_subdomains],
[local_solutions[ss] for ss in marked_subdomains],
product=[local_products[ss] for ss in marked_subdomains])
assert len(extended_bases) == len(marked_subdomains)
for ii, subdomain in enumerate(marked_subdomains):
intermediate_basis[subdomain] = extended_bases[ii]
new_basis_size = sum([len(bb) for bb in intermediate_basis])
num_extensions += 1
logger.info(' Reducing ...')
rd, _, _ = reduce_generic_rb(discretization, intermediate_basis)
rc = GenericBlockRBReconstructor(intermediate_basis)
reduced_estimator.rc = rc
reduced_estimator.extension_step += 1
U_red = rd.solve(mu)
logger.info(' Estimating (total basis size: {})'.format(
sum(len(bb) for bb in intermediate_basis)))
new_error = reduced_estimator.estimate(U_red, mu, discretization)
order = np.log(new_error/error)/np.log(old_basis_size/new_basis_size)
logger.info(' {} (relative improvement: {})'.format(new_error, order))
if new_error > error:
logger.warn('The error has increased (from {} to {}) after enrichment!'.format(error,
new_error))
elif order < 1:
logger.warn(('The error has decreased only slightly '
+ '(from {} to {}) after enrichment!').format(error, new_error))
if num_extensions >= cfg['online_max_extensions'] and new_error > cfg['online_target_error']:
basis = intermediate_basis
raise EnrichmentError('Reached maximum number of {} extensions!'.format(
cfg['online_max_extensions']))
error = new_error
logger.info(' The error ({}) is below the target error, continuing ...'.format(error))
successes += 1
basis = intermediate_basis
logger.info('Basis sizes range from {} to {}.'.format(np.min([len(bb) for bb in basis]),
np.max([len(bb) for bb in basis])))
except EnrichmentError, ee:
logger.critical('Enrichment stopped because: {}'.format(ee))
logger.info('Basis sizes range from {} to {}.'.format(np.min([len(bb) for bb in basis]),
np.max([len(bb) for bb in basis])))
logger.info('Continuing with the next parameter ...')
failures += 1
print('')
else:
logger.info('The error ({}) is below the target error, continuing ...'.format(error))
successes += 1
print('')
logger.info('Adaptive online phase finished.')
if failures == 0 and len(test_samples) > 0:
logger.info(' Target error could be reached for all {} parameters.'.format(len(test_samples)))
elif successes == 0 and len(test_samples) > 0:
logger.warn(' Target error could not be reached for any of the {} parameters!'.format(len(test_samples)))
else:
if successes > 0:
logger.info(' Target error could be reached for {} out of {} parameters.'.format(successes,
len(test_samples)))
if failures > 0:
logger.info(' Target error could not be reached for {} out of {} parameters.'.format(failures,
len(test_samples)))
final_basis_sizes = [len(bb) for bb in basis]
logger.info('Final global basis size is {}'.format(sum(final_basis_sizes)))
logger.info('Final local basis sizes range from {} to {}.'.format(np.min([final_basis_sizes]),
np.max([final_basis_sizes])))
add_values(final_basis_sizes=final_basis_sizes)
example.visualize_on_coarse_grid(final_basis_sizes, cfg['dune_example'] + '.final_basis_sizes', 'local_basis_size')
| |
'''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on Sep 27, 2011
@author: John Cornwell
@contact: JohnWCornwellV@gmail.com
@summary: Various simple trading strategies to generate allocations.
'''
''' Python imports '''
import datetime as dt
from math import sqrt
''' 3rd party imports '''
import numpy as np
import pandas as pand
''' QSTK imports '''
import QSTK.qstkutil.tsutil as tsu
def stratGiven( dtStart, dtEnd, dFuncArgs ):
"""
@summary Simplest strategy, weights are provided through args.
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
if not dFuncArgs.has_key('dmPrice'):
print 'Error: Strategy requires dmPrice information'
return
if not dFuncArgs.has_key('lfWeights'):
print 'Error: Strategy requires weight information'
return
dmPrice = dFuncArgs['dmPrice']
lfWeights = dFuncArgs['lfWeights']
''' Generate two allocations, one for the start day, one for the end '''
naAlloc = np.array( lfWeights ).reshape(1,-1)
dfAlloc = pand.DataFrame( index=[dtStart], data=naAlloc, columns=(dmPrice.columns) )
dfAlloc = dfAlloc.append( pand.DataMatrix(index=[dtEnd], data=naAlloc, columns=dmPrice.columns))
dfAlloc['_CASH'] = 0.0
return dfAlloc
def strat1OverN( dtStart, dtEnd, dFuncArgs ):
"""
@summary Evenly distributed strategy.
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
if not dFuncArgs.has_key('dmPrice'):
print 'Error: Strategy requires dmPrice information'
return
dmPrice = dFuncArgs['dmPrice']
lNumSym = len(dmPrice.columns)
''' Generate two allocations, one for the start day, one for the end '''
naAlloc = (np.array( np.ones(lNumSym) ) * (1.0 / lNumSym)).reshape(1,-1)
dfAlloc = pand.DataMatrix( index=[dtStart], data=naAlloc, columns=(dmPrice.columns) )
dfAlloc = dfAlloc.append( pand.DataMatrix(index=[dtEnd], data=naAlloc, columns=dmPrice.columns))
dfAlloc['_CASH'] = 0.0
return dfAlloc
def stratMark( dtStart, dtEnd, dFuncArgs ):
"""
@summary Markovitz strategy, generates a curve and then chooses a point on it.
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
if not dFuncArgs.has_key('dmPrice'):
print 'Error:', stratMark.__name__, 'requires dmPrice information'
return
if not dFuncArgs.has_key('sPeriod'):
print 'Error:', stratMark.__name__, 'requires rebalancing period'
return
if not dFuncArgs.has_key('lLookback'):
print 'Error:', stratMark.__name__, 'requires lookback'
return
if not dFuncArgs.has_key('sMarkPoint'):
print 'Error:', stratMark.__name__, 'requires markowitz point to choose'
return
''' Optional variables '''
if not dFuncArgs.has_key('bAddAlpha'):
bAddAlpha = False
else:
bAddAlpha = dFuncArgs['bAddAlpha']
dmPrice = dFuncArgs['dmPrice']
sPeriod = dFuncArgs['sPeriod']
lLookback = dFuncArgs['lLookback']
sMarkPoint = dFuncArgs['sMarkPoint']
''' Select rebalancing dates '''
drNewRange = pand.DateRange(dtStart, dtEnd, timeRule=sPeriod) + pand.DateOffset(hours=16)
dfAlloc = pand.DataMatrix()
''' Go through each rebalance date and calculate an efficient frontier for each '''
for i, dtDate in enumerate(drNewRange):
dtStart = dtDate - pand.DateOffset(days=lLookback)
if( dtStart < dmPrice.index[0] ):
print 'Error, not enough data to rebalance'
continue
naRets = dmPrice.ix[ dtStart:dtDate ].values.copy()
tsu.returnize1(naRets)
tsu.fillforward(naRets)
tsu.fillbackward(naRets)
''' Add alpha to returns '''
if bAddAlpha:
if i < len(drNewRange) - 1:
naFutureRets = dmPrice.ix[ dtDate:drNewRange[i+1] ].values.copy()
tsu.returnize1(naFutureRets)
tsu.fillforward(naFutureRets)
tsu.fillbackward(naFutureRets)
naAvg = np.mean( naFutureRets, axis=0 )
''' make a mix of past/future rets '''
for i in range( naRets.shape[0] ):
naRets[i,:] = (naRets[i,:] + (naAvg*0.05)) / 1.05
''' Generate the efficient frontier '''
(lfReturn, lfStd, lnaPortfolios) = getFrontier( naRets, fUpper=0.2, fLower=0.01 )
lInd = 0
'''
plt.clf()
plt.plot( lfStd, lfReturn)'''
if( sMarkPoint == 'Sharpe'):
''' Find portfolio with max sharpe '''
fMax = -1E300
for i in range( len(lfReturn) ):
fShrp = (lfReturn[i]-1) / (lfStd[i])
if fShrp > fMax:
fMax = fShrp
lInd = i
'''
plt.plot( [lfStd[lInd]], [lfReturn[lInd]], 'ro')
plt.draw()
time.sleep(2)
plt.show()'''
elif( sMarkPoint == 'MinVar'):
''' use portfolio with minimum variance '''
fMin = 1E300
for i in range( len(lfReturn) ):
if lfStd[i] < fMin:
fMin = lfStd[i]
lInd = i
elif( sMarkPoint == 'MaxRet'):
''' use Portfolio with max returns (not really markovitz) '''
lInd = len(lfReturn)-1
elif( sMarkPoint == 'MinRet'):
''' use Portfolio with min returns (not really markovitz) '''
lInd = 0
else:
print 'Warning: invalid sMarkPoint'''
return
''' Generate allocation based on selected portfolio '''
naAlloc = (np.array( lnaPortfolios[lInd] ).reshape(1,-1) )
dmNew = pand.DataMatrix( index=[dtDate], data=naAlloc, columns=(dmPrice.columns) )
dfAlloc = dfAlloc.append( dmNew )
dfAlloc['_CASH'] = 0.0
return dfAlloc
def stratMarkSharpe( dtStart, dtEnd, dFuncArgs ):
"""
@summary Calls stratMark with sharpe ratio point.
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
dFuncArgs['sMarkPoint'] = 'Sharpe'
return stratMark( dtStart, dtEnd, dFuncArgs )
def stratMarkLowVar( dtStart, dtEnd, dFuncArgs ):
"""
@summary Calls stratMark and uses lowest variance ratio point.
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
dFuncArgs['sMarkPoint'] = 'MinVar'
return stratMark( dtStart, dtEnd, dFuncArgs )
def stratMarkMaxRet( dtStart, dtEnd, dFuncArgs ):
"""
@summary Calls stratMark and uses maximum returns.
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
dFuncArgs['sMarkPoint'] = 'MaxRet'
return stratMark( dtStart, dtEnd, dFuncArgs )
def stratMarkMinRet( dtStart, dtEnd, dFuncArgs ):
"""
@summary Calls stratMark and uses minimum returns.
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
dFuncArgs['sMarkPoint'] = 'MinRet'
return stratMark( dtStart, dtEnd, dFuncArgs )
def stratMarkSharpeAlpha( dtStart, dtEnd, dFuncArgs ):
"""
@summary Calls stratMark and chooses the highest share point, uses future knowlege (alpha).
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
dFuncArgs['sMarkPoint'] = 'Sharpe'
dFuncArgs['bAddAlpha'] = True
return stratMark( dtStart, dtEnd, dFuncArgs )
def stratMarkMaxRetAlpha( dtStart, dtEnd, dFuncArgs ):
"""
@summary Calls stratMark chooses the highest returns point, uses future knowlege (alpha).
@param dtStart: Start date for portfolio
@param dtEnd: End date for portfolio
@param dFuncArgs: Dict of function args passed to the function
@return DataFrame corresponding to the portfolio allocations
"""
dFuncArgs['sMarkPoint'] = 'MaxRet'
dFuncArgs['bAddAlpha'] = True
return stratMark( dtStart, dtEnd, dFuncArgs )
| |
from numpy.testing import *
from numpy.lib import *
from numpy.core import *
def assert_all(x):
assert(all(x)), x
class TestCommonType(TestCase):
def test_basic(self):
ai32 = array([[1,2],[3,4]], dtype=int32)
af32 = array([[1,2],[3,4]], dtype=float32)
af64 = array([[1,2],[3,4]], dtype=float64)
acs = array([[1+5j,2+6j],[3+7j,4+8j]], dtype=csingle)
acd = array([[1+5j,2+6j],[3+7j,4+8j]], dtype=cdouble)
assert common_type(af32) == float32
assert common_type(af64) == float64
assert common_type(acs) == csingle
assert common_type(acd) == cdouble
class TestMintypecode(TestCase):
def test_default_1(self):
for itype in '1bcsuwil':
assert_equal(mintypecode(itype),'d')
assert_equal(mintypecode('f'),'f')
assert_equal(mintypecode('d'),'d')
assert_equal(mintypecode('F'),'F')
assert_equal(mintypecode('D'),'D')
def test_default_2(self):
for itype in '1bcsuwil':
assert_equal(mintypecode(itype+'f'),'f')
assert_equal(mintypecode(itype+'d'),'d')
assert_equal(mintypecode(itype+'F'),'F')
assert_equal(mintypecode(itype+'D'),'D')
assert_equal(mintypecode('ff'),'f')
assert_equal(mintypecode('fd'),'d')
assert_equal(mintypecode('fF'),'F')
assert_equal(mintypecode('fD'),'D')
assert_equal(mintypecode('df'),'d')
assert_equal(mintypecode('dd'),'d')
#assert_equal(mintypecode('dF',savespace=1),'F')
assert_equal(mintypecode('dF'),'D')
assert_equal(mintypecode('dD'),'D')
assert_equal(mintypecode('Ff'),'F')
#assert_equal(mintypecode('Fd',savespace=1),'F')
assert_equal(mintypecode('Fd'),'D')
assert_equal(mintypecode('FF'),'F')
assert_equal(mintypecode('FD'),'D')
assert_equal(mintypecode('Df'),'D')
assert_equal(mintypecode('Dd'),'D')
assert_equal(mintypecode('DF'),'D')
assert_equal(mintypecode('DD'),'D')
def test_default_3(self):
assert_equal(mintypecode('fdF'),'D')
#assert_equal(mintypecode('fdF',savespace=1),'F')
assert_equal(mintypecode('fdD'),'D')
assert_equal(mintypecode('fFD'),'D')
assert_equal(mintypecode('dFD'),'D')
assert_equal(mintypecode('ifd'),'d')
assert_equal(mintypecode('ifF'),'F')
assert_equal(mintypecode('ifD'),'D')
assert_equal(mintypecode('idF'),'D')
#assert_equal(mintypecode('idF',savespace=1),'F')
assert_equal(mintypecode('idD'),'D')
class TestIsscalar(TestCase):
def test_basic(self):
assert(isscalar(3))
assert(not isscalar([3]))
assert(not isscalar((3,)))
assert(isscalar(3j))
assert(isscalar(10L))
assert(isscalar(4.0))
class TestReal(TestCase):
def test_real(self):
y = rand(10,)
assert_array_equal(y,real(y))
def test_cmplx(self):
y = rand(10,)+1j*rand(10,)
assert_array_equal(y.real,real(y))
class TestImag(TestCase):
def test_real(self):
y = rand(10,)
assert_array_equal(0,imag(y))
def test_cmplx(self):
y = rand(10,)+1j*rand(10,)
assert_array_equal(y.imag,imag(y))
class TestIscomplex(TestCase):
def test_fail(self):
z = array([-1,0,1])
res = iscomplex(z)
assert(not sometrue(res,axis=0))
def test_pass(self):
z = array([-1j,1,0])
res = iscomplex(z)
assert_array_equal(res,[1,0,0])
class TestIsreal(TestCase):
def test_pass(self):
z = array([-1,0,1j])
res = isreal(z)
assert_array_equal(res,[1,1,0])
def test_fail(self):
z = array([-1j,1,0])
res = isreal(z)
assert_array_equal(res,[0,1,1])
class TestIscomplexobj(TestCase):
def test_basic(self):
z = array([-1,0,1])
assert(not iscomplexobj(z))
z = array([-1j,0,-1])
assert(iscomplexobj(z))
class TestIsrealobj(TestCase):
def test_basic(self):
z = array([-1,0,1])
assert(isrealobj(z))
z = array([-1j,0,-1])
assert(not isrealobj(z))
class TestIsnan(TestCase):
def test_goodvalues(self):
z = array((-1.,0.,1.))
res = isnan(z) == 0
assert_all(alltrue(res,axis=0))
def test_posinf(self):
olderr = seterr(divide='ignore')
assert_all(isnan(array((1.,))/0.) == 0)
seterr(**olderr)
def test_neginf(self):
olderr = seterr(divide='ignore')
assert_all(isnan(array((-1.,))/0.) == 0)
seterr(**olderr)
def test_ind(self):
olderr = seterr(divide='ignore', invalid='ignore')
assert_all(isnan(array((0.,))/0.) == 1)
seterr(**olderr)
#def test_qnan(self): log(-1) return pi*j now
# assert_all(isnan(log(-1.)) == 1)
def test_integer(self):
assert_all(isnan(1) == 0)
def test_complex(self):
assert_all(isnan(1+1j) == 0)
def test_complex1(self):
olderr = seterr(divide='ignore', invalid='ignore')
assert_all(isnan(array(0+0j)/0.) == 1)
seterr(**olderr)
class TestIsfinite(TestCase):
def test_goodvalues(self):
z = array((-1.,0.,1.))
res = isfinite(z) == 1
assert_all(alltrue(res,axis=0))
def test_posinf(self):
olderr = seterr(divide='ignore')
assert_all(isfinite(array((1.,))/0.) == 0)
seterr(**olderr)
def test_neginf(self):
olderr = seterr(divide='ignore')
assert_all(isfinite(array((-1.,))/0.) == 0)
seterr(**olderr)
def test_ind(self):
olderr = seterr(divide='ignore', invalid='ignore')
assert_all(isfinite(array((0.,))/0.) == 0)
seterr(**olderr)
#def test_qnan(self):
# assert_all(isfinite(log(-1.)) == 0)
def test_integer(self):
assert_all(isfinite(1) == 1)
def test_complex(self):
assert_all(isfinite(1+1j) == 1)
def test_complex1(self):
olderr = seterr(divide='ignore', invalid='ignore')
assert_all(isfinite(array(1+1j)/0.) == 0)
seterr(**olderr)
class TestIsinf(TestCase):
def test_goodvalues(self):
z = array((-1.,0.,1.))
res = isinf(z) == 0
assert_all(alltrue(res,axis=0))
def test_posinf(self):
olderr = seterr(divide='ignore')
assert_all(isinf(array((1.,))/0.) == 1)
seterr(**olderr)
def test_posinf_scalar(self):
olderr = seterr(divide='ignore')
assert_all(isinf(array(1.,)/0.) == 1)
seterr(**olderr)
def test_neginf(self):
olderr = seterr(divide='ignore')
assert_all(isinf(array((-1.,))/0.) == 1)
seterr(**olderr)
def test_neginf_scalar(self):
olderr = seterr(divide='ignore')
assert_all(isinf(array(-1.)/0.) == 1)
seterr(**olderr)
def test_ind(self):
olderr = seterr(divide='ignore', invalid='ignore')
assert_all(isinf(array((0.,))/0.) == 0)
seterr(**olderr)
#def test_qnan(self):
# assert_all(isinf(log(-1.)) == 0)
# assert_all(isnan(log(-1.)) == 1)
class TestIsposinf(TestCase):
def test_generic(self):
olderr = seterr(divide='ignore', invalid='ignore')
vals = isposinf(array((-1.,0,1))/0.)
seterr(**olderr)
assert(vals[0] == 0)
assert(vals[1] == 0)
assert(vals[2] == 1)
class TestIsneginf(TestCase):
def test_generic(self):
olderr = seterr(divide='ignore', invalid='ignore')
vals = isneginf(array((-1.,0,1))/0.)
seterr(**olderr)
assert(vals[0] == 1)
assert(vals[1] == 0)
assert(vals[2] == 0)
class TestNanToNum(TestCase):
def test_generic(self):
olderr = seterr(divide='ignore', invalid='ignore')
vals = nan_to_num(array((-1.,0,1))/0.)
seterr(**olderr)
assert_all(vals[0] < -1e10) and assert_all(isfinite(vals[0]))
assert(vals[1] == 0)
assert_all(vals[2] > 1e10) and assert_all(isfinite(vals[2]))
def test_integer(self):
vals = nan_to_num(1)
assert_all(vals == 1)
def test_complex_good(self):
vals = nan_to_num(1+1j)
assert_all(vals == 1+1j)
def test_complex_bad(self):
v = 1+1j
olderr = seterr(divide='ignore', invalid='ignore')
v += array(0+1.j)/0.
seterr(**olderr)
vals = nan_to_num(v)
# !! This is actually (unexpectedly) zero
assert_all(isfinite(vals))
def test_complex_bad2(self):
v = 1+1j
olderr = seterr(divide='ignore', invalid='ignore')
v += array(-1+1.j)/0.
seterr(**olderr)
vals = nan_to_num(v)
assert_all(isfinite(vals))
#assert_all(vals.imag > 1e10) and assert_all(isfinite(vals))
# !! This is actually (unexpectedly) positive
# !! inf. Comment out for now, and see if it
# !! changes
#assert_all(vals.real < -1e10) and assert_all(isfinite(vals))
class TestRealIfClose(TestCase):
def test_basic(self):
a = rand(10)
b = real_if_close(a+1e-15j)
assert_all(isrealobj(b))
assert_array_equal(a,b)
b = real_if_close(a+1e-7j)
assert_all(iscomplexobj(b))
b = real_if_close(a+1e-7j,tol=1e-6)
assert_all(isrealobj(b))
class TestArrayConversion(TestCase):
def test_asfarray(self):
a = asfarray(array([1,2,3]))
assert_equal(a.__class__,ndarray)
assert issubdtype(a.dtype,float)
if __name__ == "__main__":
run_module_suite()
| |
from functools import partial
import glob
import json
import logging
import os
import hashlib
import mimetypes
import pprint
import requests
from daf_fruit_dist.checksums import Checksums
from daf_fruit_dist.file_management import get_file_digests
_HEADER_USER_AGENT = 'User-Agent'
_HEADER_MD5_CHECKSUM = 'X-Checksum-Md5'
_HEADER_SHA1_CHECKSUM = 'X-Checksum-Sha1'
_HEADER_CONTENT_TYPE = 'Content-Type'
_HEADER_CONTENT_ENCODING = 'Content-Encoding'
_CONTENT_TYPE_PROMOTION_REQUEST = (
'application/vnd.org.jfrog.artifactory.build.PromotionRequest+json')
_CONTENT_TYPE_PUBLISH_BUILD_INFO = (
'application/vnd.org.jfrog.artifactory+json')
def deploy_file(
repo_base_url,
repo_push_id,
path,
filename,
attributes=None,
username=None,
password=None,
verify_cert=True):
"""
Deploy the file to the /path/ directory at the given URL. A
dictionary (or pre-formatted string) of attributes may also be
supplied.
"""
def store_hashes_in_headers(headers):
md5, sha1 = get_file_digests(
filename,
digests=(hashlib.md5(), hashlib.sha1()))
headers[_HEADER_MD5_CHECKSUM] = md5.hexdigest()
headers[_HEADER_SHA1_CHECKSUM] = sha1.hexdigest()
def store_mimetypes_in_headers(headers):
content_type, content_enc = mimetypes.guess_type(filename)
if content_type:
headers[_HEADER_CONTENT_TYPE] = content_type
if content_enc:
headers[_HEADER_CONTENT_ENCODING] = content_enc
def generate_uri():
basename = os.path.basename(filename)
norm_path = _normalize_path(path)
uri = '{url}/{repo_push_id}/{path}/{basename}'.format(
url=repo_base_url,
repo_push_id=repo_push_id,
path=norm_path,
basename=basename)
if attributes:
if isinstance(attributes, dict):
uri += ';' + ';'.join(
'{}={}'.format(k, v) for k, v in attributes.iteritems())
elif isinstance(attributes, basestring):
uri += ';' + attributes
else:
raise TypeError(
'"attributes" must be either a dictionary or a pre-'
'formatted string of "key1=value1;key2=value2" pairs')
return uri
def upload_file(deploy_uri):
logging.info('Deploying: ' + deploy_uri)
auth = (username, password) if (username or password) else None
with open(filename, 'rb') as f:
response = requests.put(
deploy_uri,
data=f,
auth=auth,
headers=headers,
verify=verify_cert)
_log_response(response)
headers = _make_headers()
store_hashes_in_headers(headers)
store_mimetypes_in_headers(headers)
upload_file(generate_uri())
def deploy_globbed_files(
repo_base_url,
repo_push_id,
path,
glob_patterns,
attributes=None,
username=None,
password=None,
verify_cert=True):
"""
Like deploy_file, except this function takes a list of globbing
patterns. All files (NOT directories) matched by these patterns are
deployed to the server.
"""
logging.debug("Entering deploy_globbed_files with:")
logging.debug(" repo_base_url: {}".format(repo_base_url))
logging.debug(" repo_push_id: {}".format(repo_push_id))
logging.debug(" path: {}".format(path))
logging.debug(" glob_patterns: {}".format(glob_patterns))
# Create a version of deploy_file() with every field filled out
# except for filename.
deploy = partial(
deploy_file,
repo_base_url=repo_base_url,
repo_push_id=repo_push_id,
path=path,
attributes=attributes,
username=username,
password=password,
verify_cert=verify_cert)
# Set of all files being uploaded. Note that a set is being used
# here instead of a list so that files matched by more than one
# globbing pattern are only uploaded once.
filenames = set()
for pattern in glob_patterns:
filenames.update(filter(os.path.isfile, glob.glob(pattern)))
logging.debug("Found filenames: {}".format(", ".join(filenames)))
for f in filenames:
deploy(filename=f)
return filenames
def build_promote(
username,
password,
repo_base_url,
build_name,
build_number,
promotion_request,
verify_cert=True):
uri = '{url}/api/build/promote/{build_name}/{build_number}'.format(
url=repo_base_url,
build_name=build_name,
build_number=build_number)
json_data = promotion_request.as_json_data
json_to_put_on_wire = json.dumps(json_data, sort_keys=True)
auth = _make_auth(username, password)
headers = _make_headers()
headers[_HEADER_CONTENT_TYPE] = _CONTENT_TYPE_PROMOTION_REQUEST
put_req = requests.post(
uri,
data=json_to_put_on_wire,
headers=headers,
auth=auth,
verify=verify_cert)
_log_response(put_req)
put_req.raise_for_status()
response_json = put_req.json()
return response_json
def publish_build_info(
username,
password,
repo_base_url,
build_info,
verify_cert=True):
json_data = build_info.as_json_data
json_to_put_on_wire = json.dumps(json_data, sort_keys=True)
uri = '{url}/api/build'.format(url=repo_base_url)
auth = _make_auth(username, password)
headers = _make_headers()
headers[_HEADER_CONTENT_TYPE] = _CONTENT_TYPE_PUBLISH_BUILD_INFO
put_req = requests.put(
uri,
data=json_to_put_on_wire,
headers=headers,
auth=auth,
verify=verify_cert)
_log_response(response=put_req)
put_req.raise_for_status()
def determine_checksums(
username,
password,
repo_base_url,
repo_pull_id,
file_path,
verify_cert=True):
uri = '{url}/api/storage/{repo_pull_id}/{file_path}'.format(
url=repo_base_url,
repo_pull_id=repo_pull_id,
file_path=file_path)
auth = _make_auth(username, password)
get_response = requests.get(
uri,
headers=_make_headers(),
auth=auth,
verify=verify_cert)
get_response.raise_for_status()
response_json = get_response.json()
if 'checksums' in response_json:
checksum_data = response_json['checksums']
md5 = checksum_data.get('md5', None)
sha1 = checksum_data.get('sha1', None)
else:
raise RuntimeError(
"Artifact found in Artifactory but no checksums were available.")
return Checksums(sha1=sha1, md5=md5)
def _normalize_path(path):
return path.strip('/')
def _make_auth(username=None, password=None):
return (username, password) if (username or password) else None
def _make_headers():
return {_HEADER_USER_AGENT: 'FruitDist/1.0'}
def _log_response(response):
_log_data_structure('response_headers', response.headers)
try:
_log_data_structure('response_json', response.json())
except StandardError:
response_text = getattr(response, 'text', None)
if response_text:
logging.debug('response_text: {}'.format(response_text))
def _log_data_structure(title, data_structure):
unindented = pprint.pformat(data_structure)
shifted = '\n '.join(unindented.splitlines())
log_msg = '{}:\n {}'.format(title, shifted)
logging.debug(log_msg)
| |
#!/usr/bin/env python
# encoding: utf-8
"""
open.py
Created by Thomas Morin, Orange on 2015-07-10.
Copyright (c) 2009-2015 Orange. All rights reserved.
"""
import unittest
from exabgp.reactor.protocol import AFI, SAFI
from exabgp.bgp.message.update import Attributes
from exabgp.bgp.message.update.attribute.localpref import LocalPreference
from exabgp.bgp.message.update.attribute.community.extended.communities \
import ExtendedCommunities
from exabgp.bgp.message.update.attribute.community.extended \
import RouteTargetASN2Number as RouteTarget
from exabgp.bgp.message.update.attribute.community.extended.encapsulation \
import Encapsulation
from exabgp.bgp.message.update.nlri.ipvpn import IPVPN
from exabgp.bgp.message.update.nlri.evpn.mac import MAC as EVPNMAC
from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher
from exabgp.bgp.message.update.nlri.qualifier.labels import Labels
from exabgp.bgp.message.update.nlri.qualifier.esi import ESI
from exabgp.bgp.message.update.nlri.qualifier.etag import EthernetTag
from exabgp.bgp.message.update.nlri.qualifier.mac import MAC
from exabgp.protocol.ip import IP
from exabgp.bgp.message import OUT
from exabgp.configuration.setup import environment
environment.setup('')
def prefixToPackedIPMask(prefix):
ipString, mask = prefix.split("/")
return (IP.pton(ipString), int(mask))
class TestNLRIs(unittest.TestCase):
# tests on MPLS VPN NLRIs
def test0_IPVPNHashEqual(self):
'''
Two indistinct VPN NLRIs should
hash to the same value, and be equal
'''
rd = RouteDistinguisher.fromElements("42.42.42.42", 5)
packedPrefix, mask = prefixToPackedIPMask("1.1.1.1/32")
nlri1 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([42], True), rd,
IP.pton("45.45.45.45"),
OUT.ANNOUNCE)
nlri2 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([42], True), rd,
IP.pton("45.45.45.45"),
OUT.ANNOUNCE)
self.assertEqual(hash(nlri1), hash(nlri2))
self.assertEqual(nlri1, nlri2)
def test1_IPVPNHashEqual(self):
'''
Two VPN NLRI distinct only by their *label* should
hash to the same value, and be equal
'''
packedPrefix, mask = prefixToPackedIPMask("1.1.1.1/32")
nlri1 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([42], True),
RouteDistinguisher.fromElements("42.42.42.42", 5),
IP.pton("45.45.45.45"),
OUT.ANNOUNCE)
nlri2 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([0], True),
RouteDistinguisher.fromElements("42.42.42.42", 5),
IP.pton("45.45.45.45"),
OUT.ANNOUNCE)
self.assertEqual(hash(nlri1), hash(nlri2))
self.assertNotEqual(nlri1, nlri2)
def test2_IPVPNHashEqual(self):
'''
Two VPN NLRI distinct only by their *nexthop* should
hash to the same value, and be equal
'''
packedPrefix, mask = prefixToPackedIPMask("1.1.1.1/32")
nlri1 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([42], True),
RouteDistinguisher.fromElements("42.42.42.42", 5),
IP.pton("45.45.45.45"),
OUT.ANNOUNCE)
nlri2 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([42], True),
RouteDistinguisher.fromElements("42.42.42.42", 5),
IP.pton("77.77.77.77"),
OUT.ANNOUNCE)
self.assertEqual(hash(nlri1), hash(nlri2))
self.assertTrue(nlri1.eq(nlri2))
self.assertNotEqual(nlri1, nlri2)
def test3_IPVPNHashEqual(self):
'''
Two VPN NLRI distinct only by their *action* should
hash to the same value, and be equal
'''
packedPrefix, mask = prefixToPackedIPMask("1.1.1.1/32")
nlri1 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([42], True),
RouteDistinguisher.fromElements("42.42.42.42", 5),
IP.pton("45.45.45.45"),
OUT.ANNOUNCE)
nlri2 = IPVPN.new(AFI(AFI.ipv4), SAFI(SAFI.mpls_vpn),
packedPrefix, mask,
Labels([42], True),
RouteDistinguisher.fromElements("42.42.42.42", 5),
IP.pton("45.45.45.45"),
OUT.WITHDRAW)
self.assertEqual(hash(nlri1), hash(nlri2))
self.assertTrue(nlri1.eq(nlri2))
self.assertEqual(nlri1, nlri2)
# Tests on EVPN NLRIs
def test100_EVPNMACHashEqual(self):
'''
Two indistinct EVPN NLRI should
hash to the same value, and be equal
'''
nlri1 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(0),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([42], True),
IP.create("1.1.1.1"))
nlri2 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(0),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([42], True),
IP.create("1.1.1.1"))
self.assertEqual(hash(nlri1), hash(nlri2))
self.assertEqual(nlri1, nlri2)
def test101_EVPNHashEqual_somefieldsvary(self):
'''
Two EVPN MAC NLRIs differing by their ESI or label or RD,
or nexthop, but otherwise identical should hash to the same value,
and be equal
'''
nlri0 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(0),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([42], True),
IP.create("1.1.1.1"))
# Esi
nlri1 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(1),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([42], True),
IP.create("1.1.1.1"))
# label
nlri2 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(0),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([4444], True),
IP.create("1.1.1.1"))
# IP: different IPs, but same MACs: different route
nlri3 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(0),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([42], True),
IP.create("2.2.2.2"))
# with a next hop...
nlri4 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(0),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([42], True),
IP.create("1.1.1.1"),
IP.pton("10.10.10.10"))
nlri5 = EVPNMAC(RouteDistinguisher.fromElements("42.42.42.42", 5),
ESI(0),
EthernetTag(111),
MAC("01:02:03:04:05:06"), 6*8,
Labels([42], True),
IP.create("1.1.1.1"),
IP.pton("11.11.11.11"))
self.assertEqual(hash(nlri0), hash(nlri1))
self.assertEqual(hash(nlri0), hash(nlri2))
self.assertEqual(hash(nlri0), hash(nlri4))
self.assertEqual(nlri0, nlri1)
self.assertEqual(nlri0, nlri2)
self.assertEqual(nlri0, nlri4)
self.assertEqual(nlri1, nlri2)
self.assertEqual(nlri1, nlri4)
self.assertEqual(nlri2, nlri4)
self.assertEqual(nlri4, nlri5)
self.assertNotEqual(hash(nlri0), hash(nlri3))
self.assertNotEqual(nlri0, nlri3)
self.assertNotEqual(nlri1, nlri3)
self.assertNotEqual(nlri2, nlri3)
self.assertNotEqual(nlri3, nlri4)
# tests on attributes
def test4_DistinctAttributes(self):
atts1 = Attributes()
atts1.add(LocalPreference(10))
atts2 = Attributes()
atts2.add(LocalPreference(20))
self.assertNotEqual(atts1, atts2)
def test5_SameAttributes(self):
atts1 = Attributes()
atts1.add(LocalPreference(10))
atts2 = Attributes()
atts2.add(LocalPreference(10))
self.assertEqual(hash(atts1), hash(atts2))
self.assertEqual(atts1, atts2)
def test6_SameAttributesOrderMultivalued(self):
atts1 = Attributes()
eComs1 = ExtendedCommunities()
eComs1.communities.append(RouteTarget(64512, 1))
eComs1.communities.append(Encapsulation(Encapsulation.Type.VXLAN))
eComs1.communities.append(RouteTarget(64512, 2))
atts1.add(eComs1)
atts2 = Attributes()
eComs2 = ExtendedCommunities()
eComs2.communities.append(RouteTarget(64512, 2))
eComs2.communities.append(RouteTarget(64512, 1))
eComs2.communities.append(Encapsulation(Encapsulation.Type.VXLAN))
atts2.add(eComs2)
self.assertEqual(hash(atts1), hash(atts2))
self.assertEqual(atts1, atts2)
def test10_Ecoms(self):
eComs1 = ExtendedCommunities()
eComs1.communities.append(Encapsulation(Encapsulation.Type.VXLAN))
atts1 = Attributes()
atts1.add(eComs1)
eComs2 = ExtendedCommunities()
eComs2.communities.append(Encapsulation(Encapsulation.Type.VXLAN))
eComs2.communities.append(RouteTarget(64512, 1))
atts2 = Attributes()
atts2.add(eComs2)
self.assertFalse(atts1.sameValuesAs(atts2))
self.assertFalse(atts2.sameValuesAs(atts1))
def test11_RTs(self):
rt1a = RouteTarget(64512, 1)
rt1b = RouteTarget(64512, 1)
rt3 = RouteTarget(64512, 2)
rt4 = RouteTarget(64513, 1)
self.assertEqual(hash(rt1a), hash(rt1b))
self.assertNotEqual(hash(rt1a), hash(rt3))
self.assertNotEqual(hash(rt1a), hash(rt4))
self.assertEqual(rt1a, rt1b)
self.assertNotEqual(rt1a, rt3)
self.assertNotEqual(rt1a, rt4)
self.assertEqual(set([rt1a]), set([rt1b]))
self.assertEqual(1, len(set([rt1a]).intersection(set([rt1b]))))
if __name__ == '__main__':
unittest.main()
| |
# -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import os
import re
import time
import hashlib
import threading
from base64 import b64encode
from .compat import urlparse, str
from .cookies import extract_cookies_to_jar
from .utils import parse_dict_header, to_native_string
from .status_codes import codes
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
authstr = 'Basic ' + to_native_string(
b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
)
return authstr
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
# Keep state in per-thread local storage
self._thread_local = threading.local()
def init_per_thread_state(self):
# Ensure state is initialized just once per-thread
if not hasattr(self._thread_local, 'init'):
self._thread_local.init = True
self._thread_local.last_nonce = ''
self._thread_local.nonce_count = 0
self._thread_local.chal = {}
self._thread_local.pos = None
self._thread_local.num_401_calls = None
def build_digest_header(self, method, url):
realm = self._thread_local.chal['realm']
nonce = self._thread_local.chal['nonce']
qop = self._thread_local.chal.get('qop')
algorithm = self._thread_local.chal.get('algorithm')
opaque = self._thread_local.chal.get('opaque')
if algorithm is None:
_algorithm = 'MD5'
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
#: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
if p_parsed.query:
path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (method, path)
HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2)
if nonce == self._thread_local.last_nonce:
self._thread_local.nonce_count += 1
else:
self._thread_local.nonce_count = 1
ncvalue = '%08x' % self._thread_local.nonce_count
s = str(self._thread_local.nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if qop is None:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
nonce, ncvalue, cnonce, 'auth', HA2
)
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
return None
self._thread_local.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
def handle_redirect(self, r, **kwargs):
"""Reset num_401_calls counter on redirects."""
if r.is_redirect:
self._thread_local.num_401_calls = 1
def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
if self._thread_local.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
r.request.body.seek(self._thread_local.pos)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
self._thread_local.num_401_calls += 1
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.close()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
self._thread_local.num_401_calls = 1
return r
def __call__(self, r):
# Initialize per-thread state, if needed
self.init_per_thread_state()
# If we have a saved nonce, skip the 401
if self._thread_local.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
try:
self._thread_local.pos = r.body.tell()
except AttributeError:
# In the case of HTTPDigestAuth being reused and the body of
# the previous request was a file-like object, pos has the
# file position of the previous body. Ensure it's set to
# None.
self._thread_local.pos = None
r.register_hook('response', self.handle_401)
r.register_hook('response', self.handle_redirect)
self._thread_local.num_401_calls = 1
return r
| |
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""LogView maintains a log pane's scrolling and searching state."""
from __future__ import annotations
import asyncio
import collections
import copy
from enum import Enum
import itertools
import logging
import operator
from pathlib import Path
import re
import time
from typing import Callable, Dict, List, Optional, Tuple, TYPE_CHECKING
from prompt_toolkit.data_structures import Point
from prompt_toolkit.formatted_text import StyleAndTextTuples
from pw_console.log_filter import (
DEFAULT_SEARCH_MATCHER,
LogFilter,
RegexValidator,
SearchMatcher,
preprocess_search_regex,
)
from pw_console.log_screen import ScreenLine, LogScreen
from pw_console.log_store import LogStore
from pw_console.text_formatting import remove_formatting
if TYPE_CHECKING:
from pw_console.console_app import ConsoleApp
from pw_console.log_line import LogLine
from pw_console.log_pane import LogPane
_LOG = logging.getLogger(__package__)
class FollowEvent(Enum):
"""Follow mode scroll event types."""
SEARCH_MATCH = 'scroll_to_bottom'
STICKY_FOLLOW = 'scroll_to_bottom_with_sticky_follow'
class LogView:
"""Viewing window into a LogStore."""
# pylint: disable=too-many-instance-attributes,too-many-public-methods
def __init__(
self,
log_pane: 'LogPane',
application: 'ConsoleApp',
log_store: Optional[LogStore] = None,
):
# Parent LogPane reference. Updated by calling `set_log_pane()`.
self.log_pane = log_pane
self.log_store = log_store if log_store else LogStore(
prefs=application.prefs)
self.log_store.set_prefs(application.prefs)
self.log_store.register_viewer(self)
self.marked_logs_start: Optional[int] = None
self.marked_logs_end: Optional[int] = None
# Search variables
self.search_text: Optional[str] = None
self.search_filter: Optional[LogFilter] = None
self.search_highlight: bool = False
self.search_matcher = DEFAULT_SEARCH_MATCHER
self.search_validator = RegexValidator()
# Container for each log_index matched by active searches.
self.search_matched_lines: Dict[int, int] = {}
# Background task to find historical matched lines.
self.search_match_count_task: Optional[asyncio.Task] = None
# Flag for automatically jumping to each new search match as they
# appear.
self.follow_search_match: bool = False
self.last_search_matched_log: Optional[int] = None
# Follow event flag. This is set to by the new_logs_arrived() function
# as a signal that the log screen should be scrolled to the bottom.
# This is read by render_content() whenever the screen is drawn.
self.follow_event: Optional[FollowEvent] = None
self.log_screen = LogScreen(
get_log_source=self._get_log_lines,
get_line_wrapping=self.wrap_lines_enabled,
get_log_formatter=self._get_table_formatter,
get_search_filter=lambda: self.search_filter,
get_search_highlight=lambda: self.search_highlight,
)
# Filter
self.filtering_on: bool = False
self.filters: 'collections.OrderedDict[str, LogFilter]' = (
collections.OrderedDict())
self.filtered_logs: collections.deque = collections.deque()
self.filter_existing_logs_task: Optional[asyncio.Task] = None
# Current log line index state variables:
self._last_log_index = -1
self._log_index = 0
self._filtered_log_index = 0
self._last_start_index = 0
self._last_end_index = 0
self._current_start_index = 0
self._current_end_index = 0
self._scrollback_start_index = 0
# LogPane prompt_toolkit container render size.
self._window_height = 20
self._window_width = 80
self._reset_log_screen_on_next_render: bool = True
self._user_scroll_event: bool = False
# Max frequency in seconds of prompt_toolkit UI redraws triggered by new
# log lines.
self._ui_update_frequency = 0.05
self._last_ui_update_time = time.time()
self._last_log_store_index = 0
self._new_logs_since_last_render = True
# Should new log lines be tailed?
self.follow: bool = True
self.visual_select_mode: bool = False
# Cache of formatted text tuples used in the last UI render.
self._line_fragment_cache: List[StyleAndTextTuples] = []
def view_mode_changed(self) -> None:
self._reset_log_screen_on_next_render = True
@property
def log_index(self):
if self.filtering_on:
return self._filtered_log_index
return self._log_index
@log_index.setter
def log_index(self, new_log_index):
# Save the old log_index
self._last_log_index = self.log_index
if self.filtering_on:
self._filtered_log_index = new_log_index
else:
self._log_index = new_log_index
def _reset_log_index_changed(self) -> None:
self._last_log_index = self.log_index
def log_index_changed_since_last_render(self) -> bool:
return self._last_log_index != self.log_index
def _set_match_position(self, position: int):
self.follow = False
self.log_index = position
self.save_search_matched_line(position)
self.log_screen.reset_logs(log_index=self.log_index)
self.log_screen.shift_selected_log_to_center()
self._user_scroll_event = True
self.log_pane.application.redraw_ui()
def select_next_search_matcher(self):
matchers = list(SearchMatcher)
index = matchers.index(self.search_matcher)
new_index = (index + 1) % len(matchers)
self.search_matcher = matchers[new_index]
def search_forwards(self):
if not self.search_filter:
return
self.search_highlight = True
log_beginning_index = self.hidden_line_count()
starting_index = self.log_index + 1
if starting_index > self.get_last_log_index():
starting_index = log_beginning_index
_, logs = self._get_log_lines()
# From current position +1 and down
for i in range(starting_index, self.get_last_log_index() + 1):
if self.search_filter.matches(logs[i]):
self._set_match_position(i)
return
# From the beginning to the original start
for i in range(log_beginning_index, starting_index):
if self.search_filter.matches(logs[i]):
self._set_match_position(i)
return
def search_backwards(self):
if not self.search_filter:
return
self.search_highlight = True
log_beginning_index = self.hidden_line_count()
starting_index = self.log_index - 1
if starting_index < 0:
starting_index = self.get_last_log_index()
_, logs = self._get_log_lines()
# From current position - 1 and up
for i in range(starting_index, log_beginning_index - 1, -1):
if self.search_filter.matches(logs[i]):
self._set_match_position(i)
return
# From the end to the original start
for i in range(self.get_last_log_index(), starting_index, -1):
if self.search_filter.matches(logs[i]):
self._set_match_position(i)
return
def set_search_regex(self,
text,
invert,
field,
matcher: Optional[SearchMatcher] = None) -> bool:
search_matcher = matcher if matcher else self.search_matcher
_LOG.debug(search_matcher)
regex_text, regex_flags = preprocess_search_regex(
text, matcher=search_matcher)
try:
compiled_regex = re.compile(regex_text, regex_flags)
self.search_filter = LogFilter(
regex=compiled_regex,
input_text=text,
invert=invert,
field=field,
)
_LOG.debug(self.search_filter)
except re.error as error:
_LOG.debug(error)
return False
self.search_highlight = True
self.search_text = regex_text
return True
def new_search(
self,
text,
invert=False,
field: Optional[str] = None,
search_matcher: Optional[str] = None,
interactive: bool = True,
) -> bool:
"""Start a new search for the given text."""
valid_matchers = list(s.name for s in SearchMatcher)
selected_matcher: Optional[SearchMatcher] = None
if (search_matcher is not None
and search_matcher.upper() in valid_matchers):
selected_matcher = SearchMatcher(search_matcher.upper())
if not self.set_search_regex(text, invert, field, selected_matcher):
return False
# Clear matched lines
self.search_matched_lines = {}
if interactive:
# Start count historical search matches task.
self.search_match_count_task = asyncio.create_task(
self.count_search_matches())
# Default search direction when hitting enter in the search bar.
if interactive:
self.search_forwards()
return True
def save_search_matched_line(self, log_index: int) -> None:
"""Save the log_index at position as a matched line."""
self.search_matched_lines[log_index] = 0
# Keep matched lines sorted by position
self.search_matched_lines = {
# Save this log_index and its match number.
log_index: match_number
for match_number, log_index in enumerate(
sorted(self.search_matched_lines.keys()))
}
def disable_search_highlighting(self):
self.log_pane.log_view.search_highlight = False
def _restart_filtering(self):
# Turn on follow
if not self.follow:
self.toggle_follow()
# Reset filtered logs.
self.filtered_logs.clear()
# Reset scrollback start
self._scrollback_start_index = 0
# Start filtering existing log lines.
self.filter_existing_logs_task = asyncio.create_task(
self.filter_past_logs())
# Reset existing search
self.clear_search()
# Trigger a main menu update to set log window menu titles.
self.log_pane.application.update_menu_items()
# Redraw the UI
self.log_pane.application.redraw_ui()
def install_new_filter(self):
"""Set a filter using the current search_regex."""
if not self.search_filter:
return
self.filtering_on = True
self.filters[self.search_text] = copy.deepcopy(self.search_filter)
self.clear_search()
def apply_filter(self):
"""Set new filter and schedule historical log filter asyncio task."""
self.install_new_filter()
self._restart_filtering()
def clear_search_highlighting(self):
self.search_highlight = False
self._reset_log_screen_on_next_render = True
def clear_search(self):
self.search_matched_lines = {}
self.search_text = None
self.search_filter = None
self.search_highlight = False
self._reset_log_screen_on_next_render = True
def _get_log_lines(self) -> Tuple[int, collections.deque[LogLine]]:
logs = self.log_store.logs
if self.filtering_on:
logs = self.filtered_logs
return self._scrollback_start_index, logs
def _get_visible_log_lines(self):
_, logs = self._get_log_lines()
if self._scrollback_start_index > 0:
return collections.deque(
itertools.islice(logs, self.hidden_line_count(), len(logs)))
return logs
def _get_table_formatter(self) -> Optional[Callable]:
table_formatter = None
if self.log_pane.table_view:
table_formatter = self.log_store.table.formatted_row
return table_formatter
def delete_filter(self, filter_text):
if filter_text not in self.filters:
return
# Delete this filter
del self.filters[filter_text]
# If no filters left, stop filtering.
if len(self.filters) == 0:
self.clear_filters()
else:
# Erase existing filtered lines.
self._restart_filtering()
def clear_filters(self):
if not self.filtering_on:
return
self.clear_search()
self.filtering_on = False
self.filters: 'collections.OrderedDict[str, re.Pattern]' = (
collections.OrderedDict())
self.filtered_logs.clear()
# Reset scrollback start
self._scrollback_start_index = 0
if not self.follow:
self.toggle_follow()
async def count_search_matches(self):
"""Count search matches and save their locations."""
# Wait for any filter_existing_logs_task to finish.
if self.filtering_on and self.filter_existing_logs_task:
await self.filter_existing_logs_task
starting_index = self.get_last_log_index()
ending_index, logs = self._get_log_lines()
# From the end of the log store to the beginning.
for i in range(starting_index, ending_index - 1, -1):
# Is this log a match?
if self.search_filter.matches(logs[i]):
self.save_search_matched_line(i)
# Pause every 100 lines or so
if i % 100 == 0:
await asyncio.sleep(.1)
async def filter_past_logs(self):
"""Filter past log lines."""
starting_index = self.log_store.get_last_log_index()
ending_index = -1
# From the end of the log store to the beginning.
for i in range(starting_index, ending_index, -1):
# Is this log a match?
if self.filter_scan(self.log_store.logs[i]):
# Add to the beginning of the deque.
self.filtered_logs.appendleft(self.log_store.logs[i])
# TODO(tonymd): Tune these values.
# Pause every 100 lines or so
if i % 100 == 0:
await asyncio.sleep(.1)
def set_log_pane(self, log_pane: 'LogPane'):
"""Set the parent LogPane instance."""
self.log_pane = log_pane
def _update_log_index(self) -> ScreenLine:
line_at_cursor = self.log_screen.get_line_at_cursor_position()
if line_at_cursor.log_index is not None:
self.log_index = line_at_cursor.log_index
return line_at_cursor
def get_current_line(self) -> int:
"""Return the currently selected log event index."""
return self.log_index
def get_total_count(self):
"""Total size of the logs store."""
return (len(self.filtered_logs)
if self.filtering_on else self.log_store.get_total_count())
def get_last_log_index(self):
total = self.get_total_count()
return 0 if total < 0 else total - 1
def clear_scrollback(self):
"""Hide log lines before the max length of the stored logs."""
# Enable follow and scroll to the bottom, then clear.
if not self.follow:
self.toggle_follow()
self._scrollback_start_index = self.log_index
self._reset_log_screen_on_next_render = True
def hidden_line_count(self):
"""Return the number of hidden lines."""
if self._scrollback_start_index > 0:
return self._scrollback_start_index + 1
return 0
def undo_clear_scrollback(self):
"""Reset the current scrollback start index."""
self._scrollback_start_index = 0
def wrap_lines_enabled(self):
"""Get the parent log pane wrap lines setting."""
if not self.log_pane:
return False
return self.log_pane.wrap_lines
def toggle_follow(self):
"""Toggle auto line following."""
self.follow = not self.follow
if self.follow:
# Disable search match follow mode.
self.follow_search_match = False
self.scroll_to_bottom()
def filter_scan(self, log: 'LogLine'):
filter_match_count = 0
for _filter_text, log_filter in self.filters.items():
if log_filter.matches(log):
filter_match_count += 1
else:
break
if filter_match_count == len(self.filters):
return True
return False
def new_logs_arrived(self):
"""Check newly arrived log messages.
Depending on where log statements occur ``new_logs_arrived`` may be in a
separate thread since it is triggerd by the Python log handler
``emit()`` function. In this case the log handler is the LogStore
instance ``self.log_store``. This function should not redraw the screen
or scroll.
"""
latest_total = self.log_store.get_total_count()
if self.filtering_on:
# Scan newly arived log lines
for i in range(self._last_log_store_index, latest_total):
if self.filter_scan(self.log_store.logs[i]):
self.filtered_logs.append(self.log_store.logs[i])
if self.search_filter:
last_matched_log: Optional[int] = None
# Scan newly arived log lines
for i in range(self._last_log_store_index, latest_total):
if self.search_filter.matches(self.log_store.logs[i]):
self.save_search_matched_line(i)
last_matched_log = i
if last_matched_log and self.follow_search_match:
# Set the follow event flag for the next render_content call.
self.follow_event = FollowEvent.SEARCH_MATCH
self.last_search_matched_log = last_matched_log
self._last_log_store_index = latest_total
self._new_logs_since_last_render = True
if self.follow:
# Set the follow event flag for the next render_content call.
self.follow_event = FollowEvent.STICKY_FOLLOW
# Trigger a UI update
self._update_prompt_toolkit_ui()
def _update_prompt_toolkit_ui(self):
"""Update Prompt Toolkit UI if a certain amount of time has passed."""
emit_time = time.time()
# Has enough time passed since last UI redraw?
if emit_time > self._last_ui_update_time + self._ui_update_frequency:
# Update last log time
self._last_ui_update_time = emit_time
# Trigger Prompt Toolkit UI redraw.
self.log_pane.application.redraw_ui()
def get_cursor_position(self) -> Point:
"""Return the position of the cursor."""
return Point(0, self.log_screen.cursor_position)
def scroll_to_top(self):
"""Move selected index to the beginning."""
# Stop following so cursor doesn't jump back down to the bottom.
self.follow = False
# First possible log index that should be displayed
log_beginning_index = self.hidden_line_count()
self.log_index = log_beginning_index
self.log_screen.reset_logs(log_index=self.log_index)
self.log_screen.shift_selected_log_to_top()
self._user_scroll_event = True
def move_selected_line_to_top(self):
self.follow = False
# Update selected line
self._update_log_index()
self.log_screen.reset_logs(log_index=self.log_index)
self.log_screen.shift_selected_log_to_top()
self._user_scroll_event = True
def center_log_line(self):
self.follow = False
# Update selected line
self._update_log_index()
self.log_screen.reset_logs(log_index=self.log_index)
self.log_screen.shift_selected_log_to_center()
self._user_scroll_event = True
def scroll_to_bottom(self, with_sticky_follow: bool = True):
"""Move selected index to the end."""
# Don't change following state like scroll_to_top.
self.log_index = max(0, self.get_last_log_index())
self.log_screen.reset_logs(log_index=self.log_index)
# Sticky follow mode
if with_sticky_follow:
self.follow = True
self._user_scroll_event = True
def scroll(self, lines) -> None:
"""Scroll up or down by plus or minus lines.
This method is only called by user keybindings.
"""
# If the user starts scrolling, stop auto following.
self.follow = False
self.log_screen.scroll_subline(lines)
self._user_scroll_event = True
# Update the current log
current_line = self._update_log_index()
# Don't check for sticky follow mode if selecting lines.
if self.visual_select_mode:
return
# Is the last log line selected?
if self.log_index == self.get_last_log_index():
# Is the last line of the current log selected?
if current_line.subline + 1 == current_line.height:
# Sticky follow mode
self.follow = True
def visual_selected_log_count(self) -> int:
if self.marked_logs_start is None or self.marked_logs_end is None:
return 0
return (self.marked_logs_end - self.marked_logs_start) + 1
def clear_visual_selection(self) -> None:
self.marked_logs_start = None
self.marked_logs_end = None
self.visual_select_mode = False
self._user_scroll_event = True
self.log_pane.application.redraw_ui()
def visual_select_all(self) -> None:
self.marked_logs_start = self._scrollback_start_index
self.marked_logs_end = self.get_total_count() - 1
self.visual_select_mode = True
self._user_scroll_event = True
self.log_pane.application.redraw_ui()
def visual_select_up(self) -> None:
# Select the current line
self.visual_select_line(self.get_cursor_position(), autoscroll=False)
# Move the cursor by 1
self.scroll_up(1)
# Select the new line
self.visual_select_line(self.get_cursor_position(), autoscroll=False)
def visual_select_down(self) -> None:
# Select the current line
self.visual_select_line(self.get_cursor_position(), autoscroll=False)
# Move the cursor by 1
self.scroll_down(1)
# Select the new line
self.visual_select_line(self.get_cursor_position(), autoscroll=False)
def visual_select_line(self,
mouse_position: Point,
autoscroll: bool = True) -> None:
"""Mark the log under mouse_position as visually selected."""
# Check mouse_position is valid
if not 0 <= mouse_position.y < len(self.log_screen.line_buffer):
return
# Update mode flags
self.visual_select_mode = True
self.follow = False
# Get the ScreenLine for the cursor position
screen_line = self.log_screen.line_buffer[mouse_position.y]
if screen_line.log_index is None:
return
if self.marked_logs_start is None:
self.marked_logs_start = screen_line.log_index
if self.marked_logs_end is None:
self.marked_logs_end = screen_line.log_index
if screen_line.log_index < self.marked_logs_start:
self.marked_logs_start = screen_line.log_index
elif screen_line.log_index > self.marked_logs_end:
self.marked_logs_end = screen_line.log_index
# Update cursor position
self.log_screen.move_cursor_to_position(mouse_position.y)
# Autoscroll when mouse dragging on the top or bottom of the window.
if autoscroll:
if mouse_position.y == 0:
self.scroll_up(1)
elif mouse_position.y == self._window_height - 1:
self.scroll_down(1)
# Trigger a rerender.
self._user_scroll_event = True
self.log_pane.application.redraw_ui()
def scroll_to_position(self, mouse_position: Point):
"""Set the selected log line to the mouse_position."""
# Disable follow mode when the user clicks or mouse drags on a log line.
self.follow = False
self.log_screen.move_cursor_to_position(mouse_position.y)
self._update_log_index()
self._user_scroll_event = True
def scroll_up_one_page(self):
"""Move the selected log index up by one window height."""
lines = 1
if self._window_height > 0:
lines = self._window_height
self.scroll(-1 * lines)
def scroll_down_one_page(self):
"""Move the selected log index down by one window height."""
lines = 1
if self._window_height > 0:
lines = self._window_height
self.scroll(lines)
def scroll_down(self, lines=1):
"""Move the selected log index down by one or more lines."""
self.scroll(lines)
def scroll_up(self, lines=1):
"""Move the selected log index up by one or more lines."""
self.scroll(-1 * lines)
def log_start_end_indexes_changed(self) -> bool:
return (self._last_start_index != self._current_start_index
or self._last_end_index != self._current_end_index)
def render_table_header(self):
"""Get pre-formatted table header."""
return self.log_store.render_table_header()
def render_content(self) -> list:
"""Return logs to display on screen as a list of FormattedText tuples.
This function determines when the log screen requires re-rendeing based
on user scroll events, follow mode being on, or log pane being
empty. The FormattedText tuples passed to prompt_toolkit are cached if
no updates are required.
"""
screen_update_needed = False
# Check window size
if self.log_pane.pane_resized():
self._window_width = self.log_pane.current_log_pane_width
self._window_height = self.log_pane.current_log_pane_height
self.log_screen.resize(self._window_width, self._window_height)
self._reset_log_screen_on_next_render = True
if self.follow_event is not None:
if (self.follow_event == FollowEvent.SEARCH_MATCH
and self.last_search_matched_log):
self.log_index = self.last_search_matched_log
self.last_search_matched_log = None
self._reset_log_screen_on_next_render = True
elif self.follow_event == FollowEvent.STICKY_FOLLOW:
# Jump to the last log message
self.log_index = max(0, self.get_last_log_index())
self.follow_event = None
screen_update_needed = True
if self._reset_log_screen_on_next_render or self.log_screen.empty():
# Clear the reset flag.
self._reset_log_screen_on_next_render = False
self.log_screen.reset_logs(log_index=self.log_index)
screen_update_needed = True
elif self.follow and self._new_logs_since_last_render:
# Follow mode is on so add new logs to the screen
self._new_logs_since_last_render = False
current_log_index = self.log_index
last_rendered_log_index = self.log_screen.last_appended_log_index
# If so many logs have arrived than can fit on the screen, redraw
# the whole screen from the new position.
if (current_log_index -
last_rendered_log_index) > self.log_screen.height:
self.log_screen.reset_logs(log_index=self.log_index)
# A small amount of logs have arrived, append them one at a time
# without redrawing the whole screen.
else:
for i in range(last_rendered_log_index + 1,
current_log_index + 1):
self.log_screen.append_log(i)
screen_update_needed = True
if self.follow:
# Select the last line for follow mode.
self.log_screen.move_cursor_to_bottom()
screen_update_needed = True
if self._user_scroll_event:
self._user_scroll_event = False
screen_update_needed = True
if screen_update_needed:
self._line_fragment_cache = self.log_screen.get_lines(
marked_logs_start=self.marked_logs_start,
marked_logs_end=self.marked_logs_end,
)
return self._line_fragment_cache
def _logs_to_text(
self,
use_table_formatting: bool = True,
selected_lines_only: bool = False,
) -> str:
"""Convert all or selected log messages to plaintext."""
def get_table_string(log: LogLine) -> str:
return remove_formatting(self.log_store.table.formatted_row(log))
formatter: Callable[[LogLine],
str] = operator.attrgetter('ansi_stripped_log')
if use_table_formatting:
formatter = get_table_string
_start_log_index, log_source = self._get_log_lines()
log_index_range = range(self._scrollback_start_index,
self.get_total_count())
if (selected_lines_only and self.marked_logs_start is not None
and self.marked_logs_end is not None):
log_index_range = range(self.marked_logs_start,
self.marked_logs_end + 1)
text_output = ''
for i in log_index_range:
log_text = formatter(log_source[i])
text_output += log_text
if not log_text.endswith('\n'):
text_output += '\n'
return text_output
def export_logs(
self,
use_table_formatting: bool = True,
selected_lines_only: bool = False,
file_name: Optional[str] = None,
to_clipboard: bool = False,
add_markdown_fence: bool = False,
) -> bool:
"""Export log lines to file or clipboard."""
text_output = self._logs_to_text(use_table_formatting,
selected_lines_only)
if file_name:
target_path = Path(file_name).expanduser()
with target_path.open('w') as output_file:
output_file.write(text_output)
_LOG.debug('Saved to file: %s', file_name)
elif to_clipboard:
if add_markdown_fence:
text_output = '```\n' + text_output + '```\n'
self.log_pane.application.application.clipboard.set_text(
text_output)
_LOG.debug('Copied logs to clipboard.')
return True
| |
import json
import re
from django.conf import settings
from django.core.urlresolvers import reverse
from oscar.core.loading import get_model
from oscarapi.tests.utils import APITest
Basket = get_model('basket', 'Basket')
class BasketTest(APITest):
fixtures = [
'product', 'productcategory', 'productattribute', 'productclass',
'productattributevalue', 'category', 'attributeoptiongroup', 'attributeoption',
'stockrecord', 'partner'
]
def setUp(self):
# make sure we have this disabled for most of the tests
settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD = None
super(BasketTest, self).setUp()
def test_basket_api_create(self):
"The basket api create command should work with regular cookie based login"
url = reverse('basket-list')
empty = Basket.objects.all()
self.assertFalse(empty.exists(), "There should be no baskets yet.")
# anonymous
data = {}
self.response = self.client.post(url, json.dumps(data), content_type='application/json')
self.response.assertStatusEqual(403, "Anonymous users can not use the basket api to create baskets.")
# authenticated
self.login('nobody', 'nobody')
data = {'owner': "http://testserver%s" % reverse('user-detail', args=[2])}
self.response = self.client.post(url, json.dumps(data), content_type='application/json')
self.response.assertStatusEqual(403, "Authenticated regular users can not use the basket api to create baskets.")
# admin
self.login('admin', 'admin')
data = {'owner': "http://testserver%s" % reverse('user-detail', args=[1])}
self.response = self.client.post(url, json.dumps(data), content_type='application/json')
self.response.assertStatusEqual(201, "It should be possible for a basket to be created, for a specific user.")
self.response.assertObjectIdEqual('owner', 1)
# When we created a basket, it should be listed in the basket-list view
self.response = self.client.get(url, content_type='application/json')
self.assertEqual(len(self.response.data), 1)
data = {}
self.response = self.client.post(url, json.dumps(data), content_type='application/json')
self.response.assertStatusEqual(201, "It should be possible for a basket to be created for an anonymous user.")
self.assertEqual(Basket.objects.count(), 2, "2 baskets should after creating 2 baskets.")
def test_basket_api_create_header(self):
"The basket api create command should work with header based login."
empty = Basket.objects.all()
self.assertFalse(empty.exists(), "There should be no baskets yet.")
if self.hlogin('nobody', 'nobody', session_id='nobody'):
self.response = self.post('basket-list', session_id='nobody', authenticated=True,
owner="http://testserver%s" % reverse('user-detail', args=[2])
)
self.response.assertStatusEqual(403, "Authenticated regular users can not use the basket api to create baskets.")
if self.hlogin('admin', 'admin', session_id='admin'):
self.response = self.post('basket-list', session_id='admin', authenticated=True,
owner="http://testserver%s" % reverse('user-detail', args=[1])
)
self.response.assertStatusEqual(201, "It should be possible for a basket to be created, for a specific user.")
self.response.assertObjectIdEqual('owner', 1)
self.assertEqual(Basket.objects.count(), 3, "There should be 2 baskets from loging in and 1 is created with the api.")
def test_retrieve_basket(self):
"A user can fetch their own basket with the basket API and get's the same basket every time."
# anonymous
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('owner', None)
basket_id = self.response['id']
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
# authenticated
self.login('nobody', 'nobody')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertObjectIdEqual('owner', 2)
basket_id = self.response['id']
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
# admin
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
self.login('admin', 'admin')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertObjectIdEqual('owner', 1)
basket_id = self.response['id']
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.assertEqual(Basket.objects.count(), 3, "There should be 3 baskets open after 3 users accessed a basket.")
def test_retrieve_basket_header(self):
"Using header authentication the basket api should also work perfectly."
# anonymous
self.response = self.get('api-basket', session_id='anonymous')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('owner', None)
basket_id = self.response['id']
self.response = self.get('api-basket', session_id='anonymous')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
# authenticated
self.hlogin('nobody', 'nobody', session_id='nobody')
self.response = self.get('api-basket', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertObjectIdEqual('owner', 2)
basket_id = self.response['id']
self.response = self.get('api-basket', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
# admin
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
self.hlogin('admin', 'admin', session_id='admin')
self.response = self.get('api-basket', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertObjectIdEqual('owner', 1)
basket_id = self.response['id']
self.response = self.get('api-basket', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.assertEqual(Basket.objects.count(), 3, "There should be 3 baskets open after 3 users accessed a basket.")
def test_basket_read_permissions(self):
"A regular or anonymous user should not be able to fetch someone elses basket."
# anonymous user can retrive a basket.
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_url = self.response['url']
basket_lines = self.response['lines']
self.response = self.client.get(basket_url)
self.response.assertStatusEqual(200)
self.response = self.client.get(basket_lines)
self.response.assertStatusEqual(200)
# create a basket for somebody else
b = Basket.objects.create(owner_id=2)
self.assertEqual(str(b.owner), 'nobody')
self.assertEqual(b.pk, 2)
# try to acces somebody else's basket (hihi).
url = reverse('basket-detail', args=(2,))
self.response = self.client.get(url)
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users carts.")
url = reverse('basket-lines-list', args=(2,))
self.response = self.client.get(url)
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users cart items.")
# now try for authenticated user.
self.login('nobody', 'nobody')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_url = self.response['url']
basket_lines = self.response['lines']
self.response = self.client.get(basket_url)
self.response.assertStatusEqual(200)
self.response = self.client.get(basket_lines)
self.response.assertStatusEqual(200)
# try to acces somebody else's basket (hihi).
url = reverse('basket-detail', args=(1,))
self.response = self.client.get(url)
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users carts.")
url = reverse('basket-lines-list', args=(1,))
self.response = self.client.get(url)
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users cart items.")
# now let's show the power of the admin!
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
self.login('admin', 'admin')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_url = self.response['url']
basket_lines = self.response['lines']
self.response = self.client.get(basket_url)
self.response.assertStatusEqual(200)
self.response = self.client.get(basket_lines)
self.response.assertStatusEqual(200)
# try to acces somebody else's basket (hihi).
url = reverse('basket-detail', args=(1,))
self.response = self.client.get(url)
self.response.assertStatusEqual(200, "Staff users can access anything.")
url = reverse('basket-lines-list', args=(1,))
self.response = self.client.get(url)
self.response.assertStatusEqual(200, "Staff users can access anything.")
self.assertEqual(Basket.objects.count(), 3, "There should be 3 baskets open after 3 users accessed a basket.")
def test_basket_read_permissions_header(self):
"A regular or anonymous user should not be able to fetch someone elses basket, even when authenticating with a session header."
# anonymous user can retrieve a basket.
self.response = self.get('api-basket', session_id='anonymous')
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_url = self.response['url']
basket_lines = self.response['lines']
self.response = self.client.get(basket_url, HTTP_SESSION_ID='SID:ANON:testserver:anonymous')
self.response.assertStatusEqual(200)
self.response = self.client.get(basket_lines, HTTP_SESSION_ID='SID:ANON:testserver:anonymous')
self.response.assertStatusEqual(200)
# create a basket for somebody else
b = Basket.objects.create(owner_id=2)
self.assertEqual(str(b.owner), 'nobody')
self.assertEqual(b.pk, 2)
# try to acces somebody else's basket (hihi).
url = reverse('basket-detail', args=(2,))
self.response = self.client.get(url, HTTP_SESSION_ID='SID:ANON:testserver:anonymous')
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users carts.")
url = reverse('basket-lines-list', args=(2,))
self.response = self.client.get(url, HTTP_SESSION_ID='SID:ANON:testserver:anonymous')
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users cart items.")
# now try for authenticated user.
self.hlogin('nobody', 'nobody', session_id='nobody')
self.response = self.get('api-basket', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_url = self.response['url']
basket_lines = self.response['lines']
self.response = self.client.get(basket_url, HTTP_SESSION_ID='SID:AUTH:testserver:nobody')
self.response.assertStatusEqual(200)
self.response = self.client.get(basket_lines, HTTP_SESSION_ID='SID:AUTH:testserver:nobody')
self.response.assertStatusEqual(200)
# try to acces somebody else's basket (hihi).
url = reverse('basket-detail', args=(1,))
self.response = self.client.get(url, HTTP_SESSION_ID='SID:AUTH:testserver:nobody')
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users carts.")
url = reverse('basket-lines-list', args=(1,))
self.response = self.client.get(url, HTTP_SESSION_ID='SID:AUTH:testserver:nobody')
self.response.assertStatusEqual(403, "Script kiddies should fail to collect other users cart items.")
# now let's show the power of the admin!
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
self.hlogin('admin', 'admin', session_id='admin')
self.response = self.get('api-basket', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_url = self.response['url']
basket_lines = self.response['lines']
self.response = self.client.get(basket_url, HTTP_SESSION_ID='SID:AUTH:testserver:admin')
self.response.assertStatusEqual(200)
self.response = self.client.get(basket_lines, HTTP_SESSION_ID='SID:AUTH:testserver:admin')
self.response.assertStatusEqual(200)
# try to acces somebody else's basket (hihi).
url = reverse('basket-detail', args=(1,))
self.response = self.client.get(url, HTTP_SESSION_ID='SID:AUTH:testserver:admin')
self.response.assertStatusEqual(200, "Staff users can access anything.")
url = reverse('basket-lines-list', args=(1,))
self.response = self.client.get(url, HTTP_SESSION_ID='SID:AUTH:testserver:admin')
self.response.assertStatusEqual(200, "Staff users can access anything.")
self.assertEqual(Basket.objects.count(), 3, "There should be 3 baskets open after 3 users accessed a basket.")
def test_basket_write_permissions_anonymous(self):
"An anonymous user should not be able to change someone elses basket."
# anonymous user
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('status', 'Open')
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
# change status to saved
url = reverse('basket-detail', args=(basket_id,))
self.response = self.put(url, status='Saved')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Saved')
# and back to open again
self.response = self.put(url, status='Open')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Open')
# write a line to the basket
line_data = {
"basket": basket_url,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0",
}
line_url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(line_url, **line_data)
self.response.assertStatusEqual(201)
# throw the basket away
self.response = self.delete(url)
self.response.assertStatusEqual(204)
# now lets start messing around
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
basket_id = self.response['id']
# create a basket for another user.
b = Basket.objects.create(owner_id=2)
self.assertEqual(str(b.owner), 'nobody')
self.assertEqual(Basket.objects.count(), 2)
nobody_basket_id = b.pk
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
url = reverse('basket-detail', args=(basket_id,))
self.response.assertValueEqual('status', 'Open')
# try to write to someone else's basket by sending the primary key
# along.
self.response = self.put(url, status='Saved', id=nobody_basket_id)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id, 'Primary key value can not be changed.')
self.response.assertValueEqual('status', 'Saved')
# try to write to someone else's basket directly
url = reverse('basket-detail', args=(nobody_basket_id,))
self.response = self.put(url, status='Saved')
self.response.assertStatusEqual(403)
# try to delete someone else's basket
self.response = self.delete(url)
self.response.assertStatusEqual(403)
# try adding lines to someone elses basket
line_data = {
"basket": "http://testserver/api/baskets/%s/" % nobody_basket_id,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0"
}
url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(url, **line_data)
self.response.assertStatusEqual(403)
def test_basket_write_permissions_authenticated(self):
"An authenticated user should not be able to change someone elses basket"
# now try for authenticated user.
self.login('nobody', 'nobody')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
owner_url = self.response['owner']
self.assertIn(reverse('user-detail', args=(2,)), owner_url)
self.response.assertValueEqual('status', 'Open')
# change status to saved
url = reverse('basket-detail', args=(basket_id,))
self.response = self.put(url, status='Saved')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Saved')
# and back to open again
self.response = self.put(url, status='Open')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Open')
# write a line to the basket
line_data = {
"basket": basket_url,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0",
}
line_url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(line_url, **line_data)
self.response.assertStatusEqual(201)
# throw the basket away
self.response = self.delete(url)
self.response.assertStatusEqual(204)
# now lets start messing around
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
basket_id = self.response['id']
# create a basket for another user.
b = Basket.objects.create(owner_id=3)
self.assertEqual(str(b.owner), 'somebody')
self.assertEqual(Basket.objects.count(), 2)
somebody_basket_id = b.pk
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
url = reverse('basket-detail', args=(basket_id,))
self.response.assertValueEqual('status', 'Open')
# try to write to someone else's basket by sending the primary key
# along.
self.response = self.put(url, status='Saved', id=somebody_basket_id)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id, 'Primary key value can not be changed.')
self.response.assertValueEqual('status', 'Saved')
# try to write to someone else's basket directly
url = reverse('basket-detail', args=(somebody_basket_id,))
self.response = self.put(url, status='Saved')
self.response.assertStatusEqual(403)
# try to delete someone else's basket
self.response = self.delete(url)
self.response.assertStatusEqual(403)
# try adding lines to someone elses basket
line_data = {
"basket": "http://testserver/api/baskets/%s/" % somebody_basket_id,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0"
}
url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(url, **line_data)
self.response.assertStatusEqual(403)
def test_basket_write_permissions_header_authenticated(self):
"An authenticated user should not be able to change someone elses basket, when authinticating with session header."
# now try for authenticated user.
self.hlogin('nobody', 'nobody', session_id='nobody')
self.response = self.get('api-basket', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
owner_url = self.response['owner']
self.assertIn(reverse('user-detail', args=(2,)), owner_url)
self.response.assertValueEqual('status', 'Open')
# change status to saved
url = reverse('basket-detail', args=(basket_id,))
self.response = self.put(url, status='Saved', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Saved')
# and back to open again
self.response = self.put(url, status='Open', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Open')
# write a line to the basket
line_data = {
"basket": basket_url,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0",
}
line_url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(line_url, session_id='nobody', authenticated=True, **line_data)
self.response.assertStatusEqual(201)
# throw the basket away
self.response = self.delete(url, session_id='nobody', authenticated=True)
self.response.assertStatusEqual(204)
# now lets start messing around
self.response = self.get('api-basket', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
basket_id = self.response['id']
# create a basket for another user.
b = Basket.objects.create(owner_id=3)
self.assertEqual(str(b.owner), 'somebody')
self.assertEqual(Basket.objects.count(), 2)
somebody_basket_id = b.pk
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
url = reverse('basket-detail', args=(basket_id,))
self.response.assertValueEqual('status', 'Open')
# try to write to someone else's basket by sending the primary key
# along.
self.response = self.put(url, status='Saved', session_id='nobody', authenticated=True, id=somebody_basket_id)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id, 'Primary key value can not be changed.')
self.response.assertValueEqual('status', 'Saved')
# try to write to someone else's basket directly
url = reverse('basket-detail', args=(somebody_basket_id,))
self.response = self.put(url, status='Saved', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(403)
# try to delete someone else's basket
self.response = self.delete(url, session_id='nobody', authenticated=True)
self.response.assertStatusEqual(403)
# try adding lines to someone elses basket
line_data = {
"basket": "http://testserver/api/baskets/%s/" % somebody_basket_id,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0"
}
url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(url, session_id='nobody', authenticated=True, **line_data)
self.response.assertStatusEqual(403)
def test_basket_write_permissions_admin(self):
"An admin user can change someone elses basket."
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
# now try for authenticated user.
self.login('admin', 'admin')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
owner_url = self.response['owner']
self.assertIn(reverse('user-detail', args=(1,)), owner_url)
self.response.assertValueEqual('status', 'Open')
# change status to saved
url = reverse('basket-detail', args=(basket_id,))
self.response = self.put(url, status='Saved')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Saved')
# and back to open again
self.response = self.put(url, status='Open')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Open')
# write a line to the basket
line_data = {
"basket": basket_url,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0",
}
line_url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(line_url, **line_data)
self.response.assertStatusEqual(201)
# throw the basket away
self.response = self.delete(url)
self.response.assertStatusEqual(204)
# now lets start messing around
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
basket_id = self.response['id']
# create a basket for another user.
b = Basket.objects.create(owner_id=3)
self.assertEqual(str(b.owner), 'somebody')
self.assertEqual(Basket.objects.count(), 2)
somebody_basket_id = b.pk
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
url = reverse('basket-detail', args=(basket_id,))
self.response.assertValueEqual('status', 'Open')
# try to write to someone else's basket by sending the primary key
# along.
self.response = self.put(url, status='Saved', id=somebody_basket_id)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id, 'Primary key value can not be changed.')
self.response.assertValueEqual('status', 'Saved')
# try to write to someone else's basket directly
url = reverse('basket-detail', args=(somebody_basket_id,))
self.response = self.put(url, status='Saved')
self.response.assertStatusEqual(200)
# try adding lines to someone elses basket
line_data = {
"basket": "http://testserver/api/baskets/%s/" % somebody_basket_id,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0"
}
zurl = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(zurl, **line_data)
self.response.assertStatusEqual(406)
# try to delete someone else's basket
self.response = self.delete(url)
self.response.assertStatusEqual(204)
def test_basket_write_permissions_header_admin(self):
"An admin user can change someone elses basket, when authinticating with session header."
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
# now try for authenticated user.
self.hlogin('admin', 'admin', session_id='admin')
self.response = self.get('api-basket', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
owner_url = self.response['owner']
self.assertIn(reverse('user-detail', args=(1,)), owner_url)
self.response.assertValueEqual('status', 'Open')
# change status to saved
url = reverse('basket-detail', args=(basket_id,))
self.response = self.put(url, status='Saved', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Saved')
# and back to open again
self.response = self.put(url, status='Open', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id)
self.response.assertValueEqual('status', 'Open')
# write a line to the basket
line_data = {
"basket": basket_url,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0",
}
line_url = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(line_url, session_id='admin', authenticated=True, **line_data)
self.response.assertStatusEqual(201)
# throw the basket away
self.response = self.delete(url, session_id='admin', authenticated=True)
self.response.assertStatusEqual(204)
# now lets start messing around
self.response = self.get('api-basket', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
basket_id = self.response['id']
# create a basket for another user.
b = Basket.objects.create(owner_id=3)
self.assertEqual(str(b.owner), 'somebody')
self.assertEqual(Basket.objects.count(), 2)
somebody_basket_id = b.pk
# try to access the urls in the response.
basket_id = self.response['id']
basket_url = self.response['url']
url = reverse('basket-detail', args=(basket_id,))
self.response.assertValueEqual('status', 'Open')
# try to write to someone else's basket by sending the primary key
# along.
self.response = self.put(url, status='Saved', session_id='admin', authenticated=True, id=somebody_basket_id)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('id', basket_id, 'Primary key value can not be changed.')
self.response.assertValueEqual('status', 'Saved')
# try to write to someone else's basket directly
url = reverse('basket-detail', args=(somebody_basket_id,))
self.response = self.put(url, status='Saved', session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
# try adding lines to someone elses basket
line_data = {
"basket": "http://testserver/api/baskets/%s/" % somebody_basket_id,
"line_reference": "234_345",
"product": "http://testserver/api/products/1/",
"stockrecord": "http://testserver/api/stockrecords/1/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0"
}
zurl = reverse('basket-lines-list', args=(basket_id,))
self.response = self.post(zurl, session_id='admin', authenticated=True, **line_data)
self.response.assertStatusEqual(406)
# try to delete someone else's basket
self.response = self.delete(url, session_id='admin', authenticated=True)
self.response.assertStatusEqual(204)
def test_add_product_anonymous(self):
"Test if an anonymous user can add a product to his basket"
self.response = self.post('api-basket-add-product', url="http://testserver/api/products/1/", quantity=5)
self.response.assertStatusEqual(200)
self.response = self.get(self.response['lines'])
self.assertEqual(len(self.response.body), 1)
line0 = self.response.body[0]
self.assertEqual(line0['product'], "http://testserver/api/products/1/")
self.assertEqual(line0['quantity'], 5)
def test_add_product_authenticated(self):
"Test if an authenticated user can add a product to his basket"
self.login('nobody', 'nobody')
self.response = self.post('api-basket-add-product', url="http://testserver/api/products/1/", quantity=5)
self.response.assertStatusEqual(200)
self.response = self.get(self.response['lines'])
self.assertEqual(len(self.response.body), 1)
line0 = self.response.body[0]
self.assertEqual(line0['product'], "http://testserver/api/products/1/")
self.assertEqual(line0['quantity'], 5)
def test_basket_line_permissions(self):
"A user's Basket lines can not be viewed by another user in any way (except admins)"
self.login('nobody', 'nobody')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response = self.post('api-basket-add-product', url="http://testserver/api/products/1/", quantity=5)
self.response = self.get(self.response['lines'])
line0 = self.response.body[0]
line0url = line0['url']
self.response = self.get(line0url)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('product', "http://testserver/api/products/1/")
self.response.assertValueEqual('quantity', 5)
# now let's try to cheat
self.login('somebody', 'somebody')
self.response = self.get(line0url)
self.response.assertStatusEqual(404)
# admin can cheat, but he uses a different url
line0id = re.search('(?P<id>\d+)/$', line0url).group('id')
admin_line0url = reverse('line-detail', args=(line0id,))
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
self.login('admin', 'admin')
self.response = self.get(admin_line0url)
self.response.assertStatusEqual(200)
# nobody can not cheat like admin
self.login('somebody', 'somebody')
self.response = self.get(admin_line0url)
self.response.assertStatusEqual(403)
def test_basket_line_permissions_header(self):
"A user's Basket lines can not be viewed by another user in any way (except admins), even with header authetication"
self.hlogin('nobody', 'nobody', session_id='nobody')
self.response = self.get('api-basket', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response = self.post('api-basket-add-product', url="http://testserver/api/products/1/", quantity=5, session_id='nobody', authenticated=True)
self.response = self.get(self.response['lines'], session_id='nobody', authenticated=True)
line0 = self.response.body[0]
line0url = line0['url']
self.response = self.get(line0url, session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('product', "http://testserver/api/products/1/")
self.response.assertValueEqual('quantity', 5)
# now let's try to cheat
self.hlogin('somebody', 'somebody', session_id='somebody')
self.response = self.get(line0url, session_id='somebody', authenticated=True)
self.response.assertStatusEqual(404)
# admin can cheat, but he uses a different url
line0id = re.search('(?P<id>\d+)/$', line0url).group('id')
admin_line0url = reverse('line-detail', args=(line0id,))
with self.settings(OSCARAPI_BLOCK_ADMIN_API_ACCESS=False):
self.hlogin('admin', 'admin', session_id='admin')
self.response = self.get(admin_line0url, session_id='admin', authenticated=True)
self.response.assertStatusEqual(200)
# nobody can not cheat like admin
self.login('somebody', 'somebody')
self.response = self.get(admin_line0url)
self.response.assertStatusEqual(403)
def test_frozen_basket_can_not_be_accessed(self):
"Prove that frozen baskets can nolonger be accessed by the user."
self.login('nobody', 'nobody')
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
self.response.assertValueEqual('status', 'Open')
# change status to saved
url = reverse('basket-detail', args=(self.response['id'],))
self.response = self.put(url, status='Frozen')
self.response.assertValueEqual('status', 'Frozen')
self.response = self.get(url)
self.response.assertStatusEqual(403)
def test_frozen_basket_can_not_be_accessed_header(self):
"Prove that frozen baskets can nolonger be accessed by the user, even with header authentication"
self.hlogin('nobody', 'nobody', session_id='nobody')
self.response = self.get('api-basket', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('status', 'Open')
# change status to saved
url = reverse('basket-detail', args=(self.response['id'],))
self.response = self.put(url, status='Frozen', session_id='nobody', authenticated=True)
self.response.assertValueEqual('status', 'Frozen')
self.response = self.get(url, session_id='nobody', authenticated=True)
self.response.assertStatusEqual(403)
def test_header_login_does_not_cause_regular_login(self):
"Prove that there is not a bug in the test client that logs a user in when doing hlogin."
self.hlogin('nobody', 'nobody', session_id='nobody')
with self.settings(DEBUG=True):
self.response = self.get('api-login')
self.response.assertStatusEqual(204)
self.response = self.get('api-login', session_id='nobody', authenticated=True)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('username', 'nobody')
def test_add_product_limit_basket(self):
"""Test if an anonymous user cannot add more than two products to his
basket when amount of baskets is limited
"""
settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD = 2
self.response = self.post(
'api-basket-add-product',
url="http://testserver/api/products/1/",
quantity=3)
self.response.assertStatusEqual(406)
def test_total_prices_anonymous(self):
"Test if the prices calcualted by the basket are ok"
self.response = self.post(
'api-basket-add-product',
url="http://testserver/api/products/1/",
quantity=5)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('total_incl_tax', '50.00')
def test_add_product_above_stock(self):
"""Test if an anonymous user cannot add more products to his
basket when stock is not sufficient
"""
self.response = self.post(
'api-basket-add-product',
url="http://testserver/api/products/1/",
quantity=25)
self.response.assertStatusEqual(406)
def test_adjust_basket_line_quantity(self):
"""Test if we can update the quantity of a line"""
self.response = self.post(
'api-basket-add-product',
url="http://testserver/api/products/1/",
quantity=5)
self.response.assertStatusEqual(200)
self.response = self.get('api-basket')
self.response.assertStatusEqual(200)
# Get the basket lines, and update the quantity to 4
self.response = self.get(self.response['lines'])
basket_line_url = self.response.data[0]['url']
self.response = self.put(basket_line_url, quantity=4)
self.response.assertStatusEqual(200)
# see if it's updated
self.response = self.get(basket_line_url)
self.response.assertStatusEqual(200)
self.response.assertValueEqual('quantity', 4)
| |
# -*- coding: utf-8 -*-
"""
pyvisa-sim.component
~~~~~~~~~~~~~~~~~~~~
Base classes for devices parts.
:copyright: 2014 by PyVISA-sim Authors, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
import stringparser
from .common import logger
def to_bytes(val):
"""Takes a text message and return a tuple"""
if val is NoResponse:
return val
val = val.replace("\\r", "\r").replace("\\n", "\n")
return val.encode()
# Sentinel used for when there should not be a response to a query
NoResponse = object()
class Property(object):
"""A device property"""
def __init__(self, name, value, specs):
"""
:param name: name of the property
:param value: default value
:param specs: specification dictionary
:return:
"""
t = specs.get("type", None)
if t:
for key, val in (("float", float), ("int", int), ("str", str)):
if t == key:
t = specs["type"] = val
break
for key in ("min", "max"):
if key in specs:
specs[key] = t(specs[key])
if "valid" in specs:
specs["valid"] = set([t(val) for val in specs["valid"]])
self.name = name
self.specs = specs
self._value = None
self.init_value(value)
def init_value(self, string_value):
"""Initialize the value hold by the Property."""
self.set_value(string_value)
def get_value(self):
"""Return the value stored by the Property."""
return self._value
def set_value(self, string_value):
"""Set the value"""
self._value = self.validate_value(string_value)
def validate_value(self, string_value):
"""Validate that a value match the Property specs."""
specs = self.specs
if "type" in specs:
value = specs["type"](string_value)
else:
value = string_value
if "min" in specs and value < specs["min"]:
raise ValueError
if "max" in specs and value > specs["max"]:
raise ValueError
if "valid" in specs and value not in specs["valid"]:
raise ValueError
return value
class Component(object):
"""A component of a device."""
def __init__(self):
#: Stores the queries accepted by the device.
#: query: response
#: :type: dict[bytes, bytes]
self._dialogues = {}
#: Maps property names to value, type, validator
#: :type: dict[str, Property]
self._properties = {}
#: Stores the getter queries accepted by the device.
#: query: (property_name, response)
#: :type: dict[bytes, (str, str)]
self._getters = {}
#: Stores the setters queries accepted by the device.
#: (property_name, string parser query, response, error response)
#: :type: list[(str, stringparser.Parser, bytes, bytes)]
self._setters = []
def add_dialogue(self, query, response):
"""Add dialogue to device.
:param query: query string
:param response: response string
"""
self._dialogues[to_bytes(query)] = to_bytes(response)
def add_property(self, name, default_value, getter_pair, setter_triplet, specs):
"""Add property to device
:param name: property name
:param default_value: default value as string
:param getter_pair: (query, response)
:param setter_triplet: (query, response, error)
:param specs: specification of the Property
"""
self._properties[name] = Property(name, default_value, specs)
if getter_pair:
query, response = getter_pair
self._getters[to_bytes(query)] = name, response
if setter_triplet:
query, response, error = setter_triplet
self._setters.append(
(name, stringparser.Parser(query), to_bytes(response), to_bytes(error))
)
def match(self, query):
"""Try to find a match for a query in the instrument commands."""
raise NotImplementedError()
def _match_dialog(self, query, dialogues=None):
"""Tries to match in dialogues
:param query: message tuple
:type query: Tuple[bytes]
:return: response if found or None
:rtype: Tuple[bytes] | None
"""
if dialogues is None:
dialogues = self._dialogues
# Try to match in the queries
if query in dialogues:
response = dialogues[query]
logger.debug("Found response in queries: %s" % repr(response))
return response
def _match_getters(self, query, getters=None):
"""Tries to match in getters
:param query: message tuple
:type query: Tuple[bytes]
:return: response if found or None
:rtype: Tuple[bytes] | None
"""
if getters is None:
getters = self._getters
if query in getters:
name, response = getters[query]
logger.debug("Found response in getter of %s" % name)
response = response.format(self._properties[name].get_value())
return response.encode("utf-8")
def _match_setters(self, query):
"""Tries to match in setters
:param query: message tuple
:type query: Tuple[bytes]
:return: response if found or None
:rtype: Tuple[bytes] | None
"""
q = query.decode("utf-8")
for name, parser, response, error_response in self._setters:
try:
value = parser(q)
logger.debug("Found response in setter of %s" % name)
except ValueError:
continue
try:
self._properties[name].set_value(value)
return response
except ValueError:
if isinstance(error_response, bytes):
return error_response
return self.error_response("command_error")
return None
| |
"""Create a mosaic plot from a contingency table.
It allows to visualize multivariate categorical data in a rigorous
and informative way.
see the docstring of the mosaic function for more informations.
"""
# Author: Enrico Giampieri - 21 Jan 2013
from __future__ import division
from statsmodels.compat.python import (iteritems, iterkeys, lrange, string_types, lzip,
itervalues, zip, range)
import numpy as np
from statsmodels.compat.collections import OrderedDict
from itertools import product
from numpy import iterable, r_, cumsum, array
from statsmodels.graphics import utils
from pandas import DataFrame
__all__ = ["mosaic"]
def _normalize_split(proportion):
"""
return a list of proportions of the available space given the division
if only a number is given, it will assume a split in two pieces
"""
if not iterable(proportion):
if proportion == 0:
proportion = array([0.0, 1.0])
elif proportion >= 1:
proportion = array([1.0, 0.0])
elif proportion < 0:
raise ValueError("proportions should be positive,"
"given value: {}".format(proportion))
else:
proportion = array([proportion, 1.0 - proportion])
proportion = np.asarray(proportion, dtype=float)
if np.any(proportion < 0):
raise ValueError("proportions should be positive,"
"given value: {}".format(proportion))
if np.allclose(proportion, 0):
raise ValueError("at least one proportion should be"
"greater than zero".format(proportion))
# ok, data are meaningful, so go on
if len(proportion) < 2:
return array([0.0, 1.0])
left = r_[0, cumsum(proportion)]
left /= left[-1] * 1.0
return left
def _split_rect(x, y, width, height, proportion, horizontal=True, gap=0.05):
"""
Split the given rectangle in n segments whose proportion is specified
along the given axis if a gap is inserted, they will be separated by a
certain amount of space, retaining the relative proportion between them
a gap of 1 correspond to a plot that is half void and the remaining half
space is proportionally divided among the pieces.
"""
x, y, w, h = float(x), float(y), float(width), float(height)
if (w < 0) or (h < 0):
raise ValueError("dimension of the square less than"
"zero w={} h=()".format(w, h))
proportions = _normalize_split(proportion)
# extract the starting point and the dimension of each subdivision
# in respect to the unit square
starting = proportions[:-1]
amplitude = proportions[1:] - starting
# how much each extrema is going to be displaced due to gaps
starting += gap * np.arange(len(proportions) - 1)
# how much the squares plus the gaps are extended
extension = starting[-1] + amplitude[-1] - starting[0]
# normalize everything for fit again in the original dimension
starting /= extension
amplitude /= extension
# bring everything to the original square
starting = (x if horizontal else y) + starting * (w if horizontal else h)
amplitude = amplitude * (w if horizontal else h)
# create each 4-tuple for each new block
results = [(s, y, a, h) if horizontal else (x, s, w, a)
for s, a in zip(starting, amplitude)]
return results
def _reduce_dict(count_dict, partial_key):
"""
Make partial sum on a counter dict.
Given a match for the beginning of the category, it will sum each value.
"""
L = len(partial_key)
count = sum(v for k, v in iteritems(count_dict) if k[:L] == partial_key)
return count
def _key_splitting(rect_dict, keys, values, key_subset, horizontal, gap):
"""
Given a dictionary where each entry is a rectangle, a list of key and
value (count of elements in each category) it split each rect accordingly,
as long as the key start with the tuple key_subset. The other keys are
returned without modification.
"""
result = OrderedDict()
L = len(key_subset)
for name, (x, y, w, h) in iteritems(rect_dict):
if key_subset == name[:L]:
# split base on the values given
divisions = _split_rect(x, y, w, h, values, horizontal, gap)
for key, rect in zip(keys, divisions):
result[name + (key,)] = rect
else:
result[name] = (x, y, w, h)
return result
def _tuplify(obj):
"""convert an object in a tuple of strings (even if it is not iterable,
like a single integer number, but keep the string healthy)
"""
if np.iterable(obj) and not isinstance(obj, string_types):
res = tuple(str(o) for o in obj)
else:
res = (str(obj),)
return res
def _categories_level(keys):
"""use the Ordered dict to implement a simple ordered set
return each level of each category
[[key_1_level_1,key_2_level_1],[key_1_level_2,key_2_level_2]]
"""
res = []
for i in zip(*(keys)):
tuplefied = _tuplify(i)
res.append(list(OrderedDict([(j, None) for j in tuplefied])))
return res
def _hierarchical_split(count_dict, horizontal=True, gap=0.05):
"""
Split a square in a hierarchical way given a contingency table.
Hierarchically split the unit square in alternate directions
in proportion to the subdivision contained in the contingency table
count_dict. This is the function that actually perform the tiling
for the creation of the mosaic plot. If the gap array has been specified
it will insert a corresponding amount of space (proportional to the
unit lenght), while retaining the proportionality of the tiles.
Parameters
----------
count_dict : dict
Dictionary containing the contingency table.
Each category should contain a non-negative number
with a tuple as index. It expects that all the combination
of keys to be representes; if that is not true, will
automatically consider the missing values as 0
horizontal : bool
The starting direction of the split (by default along
the horizontal axis)
gap : float or array of floats
The list of gaps to be applied on each subdivision.
If the lenght of the given array is less of the number
of subcategories (or if it's a single number) it will extend
it with exponentially decreasing gaps
Returns
----------
base_rect : dict
A dictionary containing the result of the split.
To each key is associated a 4-tuple of coordinates
that are required to create the corresponding rectangle:
0 - x position of the lower left corner
1 - y position of the lower left corner
2 - width of the rectangle
3 - height of the rectangle
"""
# this is the unit square that we are going to divide
base_rect = OrderedDict([(tuple(), (0, 0, 1, 1))])
# get the list of each possible value for each level
categories_levels = _categories_level(list(iterkeys(count_dict)))
L = len(categories_levels)
# recreate the gaps vector starting from an int
if not np.iterable(gap):
gap = [gap / 1.5 ** idx for idx in range(L)]
# extend if it's too short
if len(gap) < L:
last = gap[-1]
gap = list(*gap) + [last / 1.5 ** idx for idx in range(L)]
# trim if it's too long
gap = gap[:L]
# put the count dictionay in order for the keys
# this will allow some code simplification
count_ordered = OrderedDict([(k, count_dict[k])
for k in list(product(*categories_levels))])
for cat_idx, cat_enum in enumerate(categories_levels):
# get the partial key up to the actual level
base_keys = list(product(*categories_levels[:cat_idx]))
for key in base_keys:
# for each partial and each value calculate how many
# observation we have in the counting dictionary
part_count = [_reduce_dict(count_ordered, key + (partial,))
for partial in cat_enum]
# reduce the gap for subsequents levels
new_gap = gap[cat_idx]
# split the given subkeys in the rectangle dictionary
base_rect = _key_splitting(base_rect, cat_enum, part_count, key,
horizontal, new_gap)
horizontal = not horizontal
return base_rect
def _single_hsv_to_rgb(hsv):
"""Transform a color from the hsv space to the rgb."""
from matplotlib.colors import hsv_to_rgb
return hsv_to_rgb(array(hsv).reshape(1, 1, 3)).reshape(3)
def _create_default_properties(data):
""""Create the default properties of the mosaic given the data
first it will varies the color hue (first category) then the color
saturation (second category) and then the color value
(third category). If a fourth category is found, it will put
decoration on the rectangle. Doesn't manage more than four
level of categories
"""
categories_levels = _categories_level(list(iterkeys(data)))
Nlevels = len(categories_levels)
# first level, the hue
L = len(categories_levels[0])
# hue = np.linspace(1.0, 0.0, L+1)[:-1]
hue = np.linspace(0.0, 1.0, L + 2)[:-2]
# second level, the saturation
L = len(categories_levels[1]) if Nlevels > 1 else 1
saturation = np.linspace(0.5, 1.0, L + 1)[:-1]
# third level, the value
L = len(categories_levels[2]) if Nlevels > 2 else 1
value = np.linspace(0.5, 1.0, L + 1)[:-1]
# fourth level, the hatch
L = len(categories_levels[3]) if Nlevels > 3 else 1
hatch = ['', '/', '-', '|', '+'][:L + 1]
# convert in list and merge with the levels
hue = lzip(list(hue), categories_levels[0])
saturation = lzip(list(saturation),
categories_levels[1] if Nlevels > 1 else [''])
value = lzip(list(value),
categories_levels[2] if Nlevels > 2 else [''])
hatch = lzip(list(hatch),
categories_levels[3] if Nlevels > 3 else [''])
# create the properties dictionary
properties = {}
for h, s, v, t in product(hue, saturation, value, hatch):
hv, hn = h
sv, sn = s
vv, vn = v
tv, tn = t
level = (hn,) + ((sn,) if sn else tuple())
level = level + ((vn,) if vn else tuple())
level = level + ((tn,) if tn else tuple())
hsv = array([hv, sv, vv])
prop = {'color': _single_hsv_to_rgb(hsv), 'hatch': tv, 'lw': 0}
properties[level] = prop
return properties
def _normalize_data(data, index):
"""normalize the data to a dict with tuples of strings as keys
right now it works with:
0 - dictionary (or equivalent mappable)
1 - pandas.Series with simple or hierarchical indexes
2 - numpy.ndarrays
3 - everything that can be converted to a numpy array
4 - pandas.DataFrame (via the _normalize_dataframe function)
"""
# if data is a dataframe we need to take a completely new road
# before coming back here. Use the hasattr to avoid importing
# pandas explicitly
if hasattr(data, 'pivot') and hasattr(data, 'groupby'):
data = _normalize_dataframe(data, index)
index = None
# can it be used as a dictionary?
try:
items = list(iteritems(data))
except AttributeError:
# ok, I cannot use the data as a dictionary
# Try to convert it to a numpy array, or die trying
data = np.asarray(data)
temp = OrderedDict()
for idx in np.ndindex(data.shape):
name = tuple(i for i in idx)
temp[name] = data[idx]
data = temp
items = list(iteritems(data))
# make all the keys a tuple, even if simple numbers
data = OrderedDict([_tuplify(k), v] for k, v in items)
categories_levels = _categories_level(list(iterkeys(data)))
# fill the void in the counting dictionary
indexes = product(*categories_levels)
contingency = OrderedDict([(k, data.get(k, 0)) for k in indexes])
data = contingency
# reorder the keys order according to the one specified by the user
# or if the index is None convert it into a simple list
# right now it doesn't do any check, but can be modified in the future
index = lrange(len(categories_levels)) if index is None else index
contingency = OrderedDict()
for key, value in iteritems(data):
new_key = tuple(key[i] for i in index)
contingency[new_key] = value
data = contingency
return data
def _normalize_dataframe(dataframe, index):
"""Take a pandas DataFrame and count the element present in the
given columns, return a hierarchical index on those columns
"""
#groupby the given keys, extract the same columns and count the element
# then collapse them with a mean
data = dataframe[index].dropna()
grouped = data.groupby(index, sort=False)
counted = grouped[index].count()
averaged = counted.mean(axis=1)
return averaged
def _statistical_coloring(data):
"""evaluate colors from the indipendence properties of the matrix
It will encounter problem if one category has all zeros
"""
data = _normalize_data(data, None)
categories_levels = _categories_level(list(iterkeys(data)))
Nlevels = len(categories_levels)
total = 1.0 * sum(v for v in itervalues(data))
# count the proportion of observation
# for each level that has the given name
# at each level
levels_count = []
for level_idx in range(Nlevels):
proportion = {}
for level in categories_levels[level_idx]:
proportion[level] = 0.0
for key, value in iteritems(data):
if level == key[level_idx]:
proportion[level] += value
proportion[level] /= total
levels_count.append(proportion)
# for each key I obtain the expected value
# and it's standard deviation from a binomial distribution
# under the hipothesys of independence
expected = {}
for key, value in iteritems(data):
base = 1.0
for i, k in enumerate(key):
base *= levels_count[i][k]
expected[key] = base * total, np.sqrt(total * base * (1.0 - base))
# now we have the standard deviation of distance from the
# expected value for each tile. We create the colors from this
sigmas = dict((k, (data[k] - m) / s) for k, (m, s) in iteritems(expected))
props = {}
for key, dev in iteritems(sigmas):
red = 0.0 if dev < 0 else (dev / (1 + dev))
blue = 0.0 if dev > 0 else (dev / (-1 + dev))
green = (1.0 - red - blue) / 2.0
hatch = 'x' if dev > 2 else 'o' if dev < -2 else ''
props[key] = {'color': [red, green, blue], 'hatch': hatch}
return props
def _create_labels(rects, horizontal, ax, rotation):
"""find the position of the label for each value of each category
right now it supports only up to the four categories
ax: the axis on which the label should be applied
rotation: the rotation list for each side
"""
categories = _categories_level(list(iterkeys(rects)))
if len(categories) > 4:
msg = ("maximum of 4 level supported for axes labeling..and 4"
"is alreay a lot of level, are you sure you need them all?")
raise NotImplementedError(msg)
labels = {}
#keep it fixed as will be used a lot of times
items = list(iteritems(rects))
vertical = not horizontal
#get the axis ticks and labels locator to put the correct values!
ax2 = ax.twinx()
ax3 = ax.twiny()
#this is the order of execution for horizontal disposition
ticks_pos = [ax.set_xticks, ax.set_yticks, ax3.set_xticks, ax2.set_yticks]
ticks_lab = [ax.set_xticklabels, ax.set_yticklabels,
ax3.set_xticklabels, ax2.set_yticklabels]
#for the vertical one, rotate it by one
if vertical:
ticks_pos = ticks_pos[1:] + ticks_pos[:1]
ticks_lab = ticks_lab[1:] + ticks_lab[:1]
#clean them
for pos, lab in zip(ticks_pos, ticks_lab):
pos([])
lab([])
#for each level, for each value in the level, take the mean of all
#the sublevel that correspond to that partial key
for level_idx, level in enumerate(categories):
#this dictionary keep the labels only for this level
level_ticks = dict()
for value in level:
#to which level it should refer to get the preceding
#values of labels? it's rather a tricky question...
#this is dependent on the side. It's a very crude management
#but I couldn't think a more general way...
if horizontal:
if level_idx == 3:
index_select = [-1, -1, -1]
else:
index_select = [+0, -1, -1]
else:
if level_idx == 3:
index_select = [+0, -1, +0]
else:
index_select = [-1, -1, -1]
#now I create the base key name and append the current value
#It will search on all the rects to find the corresponding one
#and use them to evaluate the mean position
basekey = tuple(categories[i][index_select[i]]
for i in range(level_idx))
basekey = basekey + (value,)
subset = dict((k, v) for k, v in items
if basekey == k[:level_idx + 1])
#now I extract the center of all the tiles and make a weighted
#mean of all these center on the area of the tile
#this should give me the (more or less) correct position
#of the center of the category
vals = list(itervalues(subset))
W = sum(w * h for (x, y, w, h) in vals)
x_lab = sum((x + w / 2.0) * w * h / W for (x, y, w, h) in vals)
y_lab = sum((y + h / 2.0) * w * h / W for (x, y, w, h) in vals)
#now base on the ordering, select which position to keep
#needs to be written in a more general form of 4 level are enough?
#should give also the horizontal and vertical alignment
side = (level_idx + vertical) % 4
level_ticks[value] = y_lab if side % 2 else x_lab
#now we add the labels of this level to the correct axis
ticks_pos[level_idx](list(itervalues(level_ticks)))
ticks_lab[level_idx](list(iterkeys(level_ticks)),
rotation=rotation[level_idx])
return labels
def mosaic(data, index=None, ax=None, horizontal=True, gap=0.005,
properties=lambda key: None, labelizer=None,
title='', statistic=False, axes_label=True,
label_rotation=0.0):
"""Create a mosaic plot from a contingency table.
It allows to visualize multivariate categorical data in a rigorous
and informative way.
Parameters
----------
data : dict, pandas.Series, np.ndarray, pandas.DataFrame
The contingency table that contains the data.
Each category should contain a non-negative number
with a tuple as index. It expects that all the combination
of keys to be representes; if that is not true, will
automatically consider the missing values as 0. The order
of the keys will be the same as the one of insertion.
If a dict of a Series (or any other dict like object)
is used, it will take the keys as labels. If a
np.ndarray is provided, it will generate a simple
numerical labels.
index: list, optional
Gives the preferred order for the category ordering. If not specified
will default to the given order. It doesn't support named indexes
for hierarchical Series. If a DataFrame is provided, it expects
a list with the name of the columns.
ax : matplotlib.Axes, optional
The graph where display the mosaic. If not given, will
create a new figure
horizontal : bool, optional (default True)
The starting direction of the split (by default along
the horizontal axis)
gap : float or array of floats
The list of gaps to be applied on each subdivision.
If the lenght of the given array is less of the number
of subcategories (or if it's a single number) it will extend
it with exponentially decreasing gaps
labelizer : function (key) -> string, optional
A function that generate the text to display at the center of
each tile base on the key of that tile
properties : function (key) -> dict, optional
A function that for each tile in the mosaic take the key
of the tile and returns the dictionary of properties
of the generated Rectangle, like color, hatch or similar.
A default properties set will be provided fot the keys whose
color has not been defined, and will use color variation to help
visually separates the various categories. It should return None
to indicate that it should use the default property for the tile.
A dictionary of the properties for each key can be passed,
and it will be internally converted to the correct function
statistic: bool, optional (default False)
if true will use a crude statistical model to give colors to the plot.
If the tile has a containt that is more than 2 standard deviation
from the expected value under independence hipotesys, it will
go from green to red (for positive deviations, blue otherwise) and
will acquire an hatching when crosses the 3 sigma.
title: string, optional
The title of the axis
axes_label: boolean, optional
Show the name of each value of each category
on the axis (default) or hide them.
label_rotation: float or list of float
the rotation of the axis label (if present). If a list is given
each axis can have a different rotation
Returns
----------
fig : matplotlib.Figure
The generate figure
rects : dict
A dictionary that has the same keys of the original
dataset, that holds a reference to the coordinates of the
tile and the Rectangle that represent it
See Also
----------
A Brief History of the Mosaic Display
Michael Friendly, York University, Psychology Department
Journal of Computational and Graphical Statistics, 2001
Mosaic Displays for Loglinear Models.
Michael Friendly, York University, Psychology Department
Proceedings of the Statistical Graphics Section, 1992, 61-68.
Mosaic displays for multi-way contingecy tables.
Michael Friendly, York University, Psychology Department
Journal of the american statistical association
March 1994, Vol. 89, No. 425, Theory and Methods
Examples
----------
The most simple use case is to take a dictionary and plot the result
>>> data = {'a': 10, 'b': 15, 'c': 16}
>>> mosaic(data, title='basic dictionary')
>>> pylab.show()
A more useful example is given by a dictionary with multiple indices.
In this case we use a wider gap to a better visual separation of the
resulting plot
>>> data = {('a', 'b'): 1, ('a', 'c'): 2, ('d', 'b'): 3, ('d', 'c'): 4}
>>> mosaic(data, gap=0.05, title='complete dictionary')
>>> pylab.show()
The same data can be given as a simple or hierarchical indexed Series
>>> rand = np.random.random
>>> from itertools import product
>>>
>>> tuples = list(product(['bar', 'baz', 'foo', 'qux'], ['one', 'two']))
>>> index = pd.MultiIndex.from_tuples(tuples, names=['first', 'second'])
>>> data = pd.Series(rand(8), index=index)
>>> mosaic(data, title='hierarchical index series')
>>> pylab.show()
The third accepted data structureis the np array, for which a
very simple index will be created.
>>> rand = np.random.random
>>> data = 1+rand((2,2))
>>> mosaic(data, title='random non-labeled array')
>>> pylab.show()
If you need to modify the labeling and the coloring you can give
a function tocreate the labels and one with the graphical properties
starting from the key tuple
>>> data = {'a': 10, 'b': 15, 'c': 16}
>>> props = lambda key: {'color': 'r' if 'a' in key else 'gray'}
>>> labelizer = lambda k: {('a',): 'first', ('b',): 'second',
('c',): 'third'}[k]
>>> mosaic(data, title='colored dictionary',
properties=props, labelizer=labelizer)
>>> pylab.show()
Using a DataFrame as source, specifying the name of the columns of interest
>>> gender = ['male', 'male', 'male', 'female', 'female', 'female']
>>> pet = ['cat', 'dog', 'dog', 'cat', 'dog', 'cat']
>>> data = pandas.DataFrame({'gender': gender, 'pet': pet})
>>> mosaic(data, ['pet', 'gender'])
>>> pylab.show()
"""
if isinstance(data, DataFrame) and index is None:
raise ValueError("You must pass an index if data is a DataFrame."
" See examples.")
from pylab import Rectangle
fig, ax = utils.create_mpl_ax(ax)
# normalize the data to a dict with tuple of strings as keys
data = _normalize_data(data, index)
# split the graph into different areas
rects = _hierarchical_split(data, horizontal=horizontal, gap=gap)
# if there is no specified way to create the labels
# create a default one
if labelizer is None:
labelizer = lambda k: "\n".join(k)
if statistic:
default_props = _statistical_coloring(data)
else:
default_props = _create_default_properties(data)
if isinstance(properties, dict):
color_dict = properties
properties = lambda key: color_dict.get(key, None)
for k, v in iteritems(rects):
# create each rectangle and put a label on it
x, y, w, h = v
conf = properties(k)
props = conf if conf else default_props[k]
text = labelizer(k)
Rect = Rectangle((x, y), w, h, label=text, **props)
ax.add_patch(Rect)
ax.text(x + w / 2, y + h / 2, text, ha='center',
va='center', size='smaller')
#creating the labels on the axis
#o clearing it
if axes_label:
if np.iterable(label_rotation):
rotation = label_rotation
else:
rotation = [label_rotation] * 4
labels = _create_labels(rects, horizontal, ax, rotation)
else:
ax.set_xticks([])
ax.set_xticklabels([])
ax.set_yticks([])
ax.set_yticklabels([])
ax.set_title(title)
return fig, rects
| |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import netaddr
import testtools
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest.common.utils.linux import remote_client
from tempest import config
from tempest import test
CONF = config.CONF
class ServersTestJSON(base.BaseV2ComputeTest):
disk_config = 'AUTO'
@classmethod
def resource_setup(cls):
cls.prepare_instance_network()
super(ServersTestJSON, cls).resource_setup()
cls.meta = {'hello': 'world'}
cls.accessIPv4 = '1.1.1.1'
cls.accessIPv6 = '0000:0000:0000:0000:0000:babe:220.12.22.2'
cls.name = data_utils.rand_name('server')
file_contents = 'This is a test file.'
personality = [{'path': '/test.txt',
'contents': base64.b64encode(file_contents)}]
cls.client = cls.servers_client
cls.network_client = cls.os.network_client
cli_resp = cls.create_test_server(name=cls.name,
meta=cls.meta,
accessIPv4=cls.accessIPv4,
accessIPv6=cls.accessIPv6,
personality=personality,
disk_config=cls.disk_config)
cls.resp, cls.server_initial = cli_resp
cls.password = cls.server_initial['adminPass']
cls.client.wait_for_server_status(cls.server_initial['id'], 'ACTIVE')
resp, cls.server = cls.client.get_server(cls.server_initial['id'])
@test.attr(type='smoke')
def test_verify_server_details(self):
# Verify the specified server attributes are set correctly
self.assertEqual(self.accessIPv4, self.server['accessIPv4'])
# NOTE(maurosr): See http://tools.ietf.org/html/rfc5952 (section 4)
# Here we compare directly with the canonicalized format.
self.assertEqual(self.server['accessIPv6'],
str(netaddr.IPAddress(self.accessIPv6)))
self.assertEqual(self.name, self.server['name'])
self.assertEqual(self.image_ref, self.server['image']['id'])
self.assertEqual(self.flavor_ref, self.server['flavor']['id'])
self.assertEqual(self.meta, self.server['metadata'])
@test.attr(type='smoke')
def test_list_servers(self):
# The created server should be in the list of all servers
resp, body = self.client.list_servers()
servers = body['servers']
found = any([i for i in servers if i['id'] == self.server['id']])
self.assertTrue(found)
@test.attr(type='smoke')
def test_list_servers_with_detail(self):
# The created server should be in the detailed list of all servers
resp, body = self.client.list_servers_with_detail()
servers = body['servers']
found = any([i for i in servers if i['id'] == self.server['id']])
self.assertTrue(found)
@testtools.skipUnless(CONF.compute.run_ssh,
'Instance validation tests are disabled.')
@test.attr(type='gate')
def test_verify_created_server_vcpus(self):
# Verify that the number of vcpus reported by the instance matches
# the amount stated by the flavor
resp, flavor = self.flavors_client.get_flavor_details(self.flavor_ref)
linux_client = remote_client.RemoteClient(self.server, self.ssh_user,
self.password)
self.assertEqual(flavor['vcpus'], linux_client.get_number_of_vcpus())
@testtools.skipUnless(CONF.compute.run_ssh,
'Instance validation tests are disabled.')
@test.attr(type='gate')
def test_host_name_is_same_as_server_name(self):
# Verify the instance host name is the same as the server name
linux_client = remote_client.RemoteClient(self.server, self.ssh_user,
self.password)
self.assertTrue(linux_client.hostname_equals_servername(self.name))
@test.attr(type='gate')
def test_create_server_with_scheduler_hint_group(self):
# Create a server with the scheduler hint "group".
name = data_utils.rand_name('server_group')
policies = ['affinity']
resp, body = self.client.create_server_group(name=name,
policies=policies)
self.assertEqual(200, resp.status)
group_id = body['id']
self.addCleanup(self.client.delete_server_group, group_id)
hints = {'group': group_id}
resp, server = self.create_test_server(sched_hints=hints,
wait_until='ACTIVE')
self.assertEqual(202, resp.status)
# Check a server is in the group
resp, server_group = self.client.get_server_group(group_id)
self.assertEqual(200, resp.status)
self.assertIn(server['id'], server_group['members'])
@testtools.skipUnless(CONF.service_available.neutron,
'Neutron service must be available.')
def test_verify_multiple_nics_order(self):
# Verify that the networks order given at the server creation is
# preserved within the server.
name_net1 = data_utils.rand_name(self.__class__.__name__)
net1 = self.network_client.create_network(name=name_net1)
self.addCleanup(self.network_client.delete_network,
net1['network']['id'])
name_net2 = data_utils.rand_name(self.__class__.__name__)
net2 = self.network_client.create_network(name=name_net2)
self.addCleanup(self.network_client.delete_network,
net2['network']['id'])
subnet1 = self.network_client.create_subnet(
network_id=net1['network']['id'],
cidr='19.80.0.0/24',
ip_version=4)
self.addCleanup(self.network_client.delete_subnet,
subnet1['subnet']['id'])
subnet2 = self.network_client.create_subnet(
network_id=net2['network']['id'],
cidr='19.86.0.0/24',
ip_version=4)
self.addCleanup(self.network_client.delete_subnet,
subnet2['subnet']['id'])
networks = [{'uuid': net1['network']['id']},
{'uuid': net2['network']['id']}]
_, server_multi_nics = self.create_test_server(
networks=networks, wait_until='ACTIVE')
# Cleanup server; this is needed in the test case because with the LIFO
# nature of the cleanups, if we don't delete the server first, the port
# will still be part of the subnet and we'll get a 409 from Neutron
# when trying to delete the subnet. The tear down in the base class
# will try to delete the server and get a 404 but it's ignored so
# we're OK.
def cleanup_server():
self.client.delete_server(server_multi_nics['id'])
self.client.wait_for_server_termination(server_multi_nics['id'])
self.addCleanup(cleanup_server)
_, addresses = self.client.list_addresses(server_multi_nics['id'])
expected_addr = ['19.80.0.2', '19.86.0.2']
addr = [addresses[name_net1][0]['addr'],
addresses[name_net2][0]['addr']]
self.assertEqual(expected_addr, addr)
class ServersWithSpecificFlavorTestJSON(base.BaseV2ComputeAdminTest):
disk_config = 'AUTO'
@classmethod
def resource_setup(cls):
cls.prepare_instance_network()
super(ServersWithSpecificFlavorTestJSON, cls).resource_setup()
cls.flavor_client = cls.os_adm.flavors_client
cls.client = cls.servers_client
@testtools.skipUnless(CONF.compute.run_ssh,
'Instance validation tests are disabled.')
@test.attr(type='gate')
def test_verify_created_server_ephemeral_disk(self):
# Verify that the ephemeral disk is created when creating server
def create_flavor_with_extra_specs():
flavor_with_eph_disk_name = data_utils.rand_name('eph_flavor')
flavor_with_eph_disk_id = data_utils.rand_int_id(start=1000)
ram = 64
vcpus = 1
disk = 0
# Create a flavor with extra specs
resp, flavor = (self.flavor_client.
create_flavor(flavor_with_eph_disk_name,
ram, vcpus, disk,
flavor_with_eph_disk_id,
ephemeral=1))
self.addCleanup(flavor_clean_up, flavor['id'])
self.assertEqual(200, resp.status)
return flavor['id']
def create_flavor_without_extra_specs():
flavor_no_eph_disk_name = data_utils.rand_name('no_eph_flavor')
flavor_no_eph_disk_id = data_utils.rand_int_id(start=1000)
ram = 64
vcpus = 1
disk = 0
# Create a flavor without extra specs
resp, flavor = (self.flavor_client.
create_flavor(flavor_no_eph_disk_name,
ram, vcpus, disk,
flavor_no_eph_disk_id))
self.addCleanup(flavor_clean_up, flavor['id'])
self.assertEqual(200, resp.status)
return flavor['id']
def flavor_clean_up(flavor_id):
resp, body = self.flavor_client.delete_flavor(flavor_id)
self.assertEqual(resp.status, 202)
self.flavor_client.wait_for_resource_deletion(flavor_id)
flavor_with_eph_disk_id = create_flavor_with_extra_specs()
flavor_no_eph_disk_id = create_flavor_without_extra_specs()
admin_pass = self.image_ssh_password
resp, server_no_eph_disk = (self.create_test_server(
wait_until='ACTIVE',
adminPass=admin_pass,
flavor=flavor_no_eph_disk_id))
resp, server_with_eph_disk = (self.create_test_server(
wait_until='ACTIVE',
adminPass=admin_pass,
flavor=flavor_with_eph_disk_id))
# Get partition number of server without extra specs.
_, server_no_eph_disk = self.client.get_server(
server_no_eph_disk['id'])
linux_client = remote_client.RemoteClient(server_no_eph_disk,
self.ssh_user, admin_pass)
partition_num = len(linux_client.get_partitions().split('\n'))
_, server_with_eph_disk = self.client.get_server(
server_with_eph_disk['id'])
linux_client = remote_client.RemoteClient(server_with_eph_disk,
self.ssh_user, admin_pass)
partition_num_emph = len(linux_client.get_partitions().split('\n'))
self.assertEqual(partition_num + 1, partition_num_emph)
class ServersTestManualDisk(ServersTestJSON):
disk_config = 'MANUAL'
@classmethod
def resource_setup(cls):
if not CONF.compute_feature_enabled.disk_config:
msg = "DiskConfig extension not enabled."
raise cls.skipException(msg)
super(ServersTestManualDisk, cls).resource_setup()
| |
from itertools import chain
import datetime
import pulsar
from pulsar.utils.pep import to_string
from pulsar.utils.structures import mapping_iterator, Zset
from pulsar.apps.ds import COMMANDS_INFO, CommandError
from .pubsub import RedisPubSub
str_or_bytes = (bytes, str)
INVERSE_COMMANDS_INFO = dict(((i.method_name, i.name)
for i in COMMANDS_INFO.values()))
class Executor:
__slots__ = ('client', 'command')
def __init__(self, client, command):
self.client = client
self.command = command
def __call__(self, *args, **options):
return self.client.execute(self.command, *args, **options)
class ResponseError:
__slots__ = ('exception',)
def __init__(self, exception):
self.exception = exception
def dict_merge(*dicts):
merged = {}
[merged.update(d) for d in dicts]
return merged
def pairs_to_object(response, factory=None):
it = iter(response)
return (factory or dict)(zip(it, it))
def values_to_object(response, fields=None, factory=None):
if fields is not None:
return (factory or dict)(zip(fields, response))
else:
return response
def string_keys_to_dict(key_string, callback):
return dict.fromkeys(key_string.split(), callback)
def parse_info(response):
info = {}
response = to_string(response)
def get_value(value):
if ',' not in value or '=' not in value:
try:
if '.' in value:
return float(value)
else:
return int(value)
except ValueError:
return value
else:
sub_dict = {}
for item in value.split(','):
k, v = item.rsplit('=', 1)
sub_dict[k] = get_value(v)
return sub_dict
for line in response.splitlines():
if line and not line.startswith('#'):
key, value = line.split(':', 1)
info[key] = get_value(value)
return info
def values_to_zset(response, withscores=False, **kw):
if withscores:
it = iter(response)
return Zset(((float(score), value) for value, score in zip(it, it)))
else:
return response
def sort_return_tuples(response, groups=None, **options):
"""
If ``groups`` is specified, return the response as a list of
n-element tuples with n being the value found in options['groups']
"""
if not response or not groups:
return response
return list(zip(*[response[i::groups] for i in range(groups)]))
def pubsub_callback(response, subcommand=None):
if subcommand == 'numsub':
it = iter(response)
return dict(((k, int(v)) for k, v in zip(it, it)))
return pairs_to_object(response)
elif subcommand == 'numpat':
return int(response)
else:
return response
class Consumer(pulsar.ProtocolConsumer):
RESPONSE_CALLBACKS = dict_merge(
string_keys_to_dict(
'BGSAVE FLUSHALL FLUSHDB HMSET LSET LTRIM MSET RENAME RESTORE '
'SAVE SELECT SHUTDOWN SLAVEOF SET WATCH UNWATCH',
lambda r: r == b'OK'
),
string_keys_to_dict('SORT', sort_return_tuples),
string_keys_to_dict('BLPOP BRPOP', lambda r: r and tuple(r) or None),
string_keys_to_dict('SMEMBERS SDIFF SINTER SUNION', set),
string_keys_to_dict('INCRBYFLOAT HINCRBYFLOAT ZINCRBY ZSCORE',
lambda v: float(v) if v is not None else v),
string_keys_to_dict('ZRANGE ZRANGEBYSCORE ZREVRANGE ZREVRANGEBYSCORE',
values_to_zset),
string_keys_to_dict('EXISTS EXPIRE EXPIREAT PEXPIRE PEXPIREAT '
'PERSIST RENAMENX',
lambda r: bool(r)),
{
'PING': lambda r: r == b'PONG',
'PUBSUB': pubsub_callback,
'INFO': parse_info,
'TIME': lambda x: (int(float(x[0])), int(float(x[1]))),
'HGETALL': pairs_to_object,
'HMGET': values_to_object,
'TYPE': lambda r: r.decode('utf-8')
}
)
def start_request(self):
conn = self._connection
args = self._request[0]
if len(self._request) == 2:
chunk = conn.parser.pack_command(args)
else:
chunk = conn.parser.pack_pipeline(args)
conn._transport.write(chunk)
def parse_response(self, response, command, options):
callback = self.RESPONSE_CALLBACKS.get(command.upper())
return callback(response, **options) if callback else response
def data_received(self, data):
conn = self._connection
parser = conn.parser
parser.feed(data)
response = parser.get()
request = self._request
try:
if len(request) == 2:
if response is not False:
if not isinstance(response, Exception):
cmnd = request[0][0]
response = self.parse_response(response, cmnd,
request[1])
else:
response = ResponseError(response)
self.finished(response)
else: # pipeline
commands, raise_on_error, responses = request
while response is not False:
responses.append(response)
response = parser.get()
if len(responses) == len(commands):
error = None
result = responses[-1]
response = []
if isinstance(result, Exception):
error = result
result = responses[1:-1]
for cmds, resp in zip(commands[1:-1], result):
args, options = cmds
if isinstance(resp, Exception) and not error:
error = resp
resp = self.parse_response(resp, args[0], options)
response.append(resp)
if error and raise_on_error:
response = ResponseError(error)
self.finished(response)
except Exception as exc:
self.finished(exc=exc)
class RedisClient(object):
'''Client for :class:`.RedisStore`.
.. attribute:: store
The :class:`.RedisStore` for this client.
'''
def __init__(self, store):
self.store = store
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.store)
__str__ = __repr__
def pubsub(self, **kw):
return RedisPubSub(self.store, **kw)
def pipeline(self):
'''Create a :class:`.Pipeline` for pipelining commands
'''
return Pipeline(self.store)
def execute(self, command, *args, **options):
return self.store.execute(command, *args, **options)
execute_command = execute
# special commands
# STRINGS
def decrby(self, key, ammount=None):
if ammount is None:
return self.execute('decr', key)
else:
return self.execute('decrby', key, ammount)
decr = decrby
def incrby(self, key, ammount=None):
if ammount is None:
return self.execute('incr', key)
else:
return self.execute('incrby', key, ammount)
incr = incrby
def incrbyfloat(self, key, ammount=None):
if ammount is None:
ammount = 1
return self.execute('incrbyfloat', key, ammount)
def set(self, name, value, ex=None, px=None, nx=False, xx=False):
"""
Set the value at key ``name`` to ``value``
``ex`` sets an expire flag on key ``name`` for ``ex`` seconds.
``px`` sets an expire flag on key ``name`` for ``px`` milliseconds.
``nx`` if set to True, set the value at key ``name`` to ``value`` if it
does not already exist.
``xx`` if set to True, set the value at key ``name`` to ``value`` if it
already exists.
"""
pieces = [name, value]
if ex:
pieces.append('EX')
if isinstance(ex, datetime.timedelta):
ex = ex.seconds + ex.days * 24 * 3600
pieces.append(ex)
if px:
pieces.append('PX')
if isinstance(px, datetime.timedelta):
ms = int(px.microseconds / 1000)
px = (px.seconds + px.days * 24 * 3600) * 1000 + ms
pieces.append(px)
if nx:
pieces.append('NX')
if xx:
pieces.append('XX')
return self.execute('set', *pieces)
# HASHES
def hmget(self, key, *fields):
return self.execute('hmget', key, *fields, fields=fields)
def hmset(self, key, iterable):
args = []
[args.extend(pair) for pair in mapping_iterator(iterable)]
return self.execute('hmset', key, *args)
# LISTS
def blpop(self, keys, timeout=0):
if timeout is None:
timeout = 0
if isinstance(keys, str_or_bytes):
keys = [keys]
else:
keys = list(keys)
keys.append(timeout)
return self.execute_command('BLPOP', *keys)
def brpop(self, keys, timeout=0):
if timeout is None:
timeout = 0
if isinstance(keys, str_or_bytes):
keys = [keys]
else:
keys = list(keys)
keys.append(timeout)
return self.execute_command('BRPOP', *keys)
def brpoplpush(self, src, dst, timeout=0):
if timeout is None:
timeout = 0
return self.execute_command('BRPOPLPUSH', src, dst, timeout)
# SORTED SETS
def zadd(self, name, *args, **kwargs):
"""
Set any number of score, element-name pairs to the key ``name``. Pairs
can be specified in two ways:
As ``*args``, in the form of::
score1, name1, score2, name2, ...
or as ``**kwargs``, in the form of::
name1=score1, name2=score2, ...
The following example would add four values to the 'my-key' key::
client.zadd('my-key', 1.1, 'name1', 2.2, 'name2',
name3=3.3, name4=4.4)
"""
pieces = []
if args:
if len(args) % 2 != 0:
raise ValueError("ZADD requires an equal number of "
"values and scores")
pieces.extend(args)
for pair in kwargs.items():
pieces.append(pair[1])
pieces.append(pair[0])
return self.execute_command('ZADD', name, *pieces)
def zinterstore(self, des, keys, weights=None, aggregate=None):
numkeys = len(keys)
pieces = list(keys)
if weights:
pieces.append(b'WEIGHTS')
pieces.extend(weights)
if aggregate:
pieces.append(b'AGGREGATE')
pieces.append(aggregate)
return self.execute_command('ZINTERSTORE', des, numkeys, *pieces)
def zunionstore(self, des, keys, weights=None, aggregate=None):
numkeys = len(keys)
pieces = list(keys)
if weights:
pieces.append(b'WEIGHTS')
pieces.extend(weights)
if aggregate:
pieces.append(b'AGGREGATE')
pieces.append(aggregate)
return self.execute_command('ZUNIONSTORE', des, numkeys, *pieces)
def zrange(self, key, start, stop, withscores=False):
if withscores:
return self.execute_command('ZRANGE', key, start, stop,
b'WITHSCORES', withscores=True)
else:
return self.execute_command('ZRANGE', key, start, stop)
def zrangebyscore(self, key, min, max, withscores=False, offset=None,
count=None):
pieces = []
if withscores:
pieces.append(b'WITHSCORES')
if offset:
pieces.append(b'LIMIT')
pieces.append(offset)
pieces.append(count)
return self.execute_command('ZRANGEBYSCORE', key, min, max, *pieces,
withscores=withscores)
def zrevrange(self, key, start, stop, withscores=False):
if withscores:
return self.execute_command('ZREVRANGE', key, start, stop,
'WITHSCORES', withscores=True)
else:
return self.execute_command('ZRANGE', key, start, stop)
def zrevrangebyscore(self, key, min, max, withscores=False, offset=None,
count=None):
pieces = []
if withscores:
pieces.append(b'WITHSCORES')
if offset:
pieces.append(b'LIMIT')
pieces.append(offset)
pieces.append(count)
return self.execute_command('ZREVRANGEBYSCORE', key, min, max, *pieces,
withscores=withscores)
def eval(self, script, keys=None, args=None):
return self._eval('eval', script, keys, args)
def evalsha(self, sha, keys=None, args=None):
return self._eval('evalsha', sha, keys, args)
def sort(self, key, start=None, num=None, by=None, get=None,
desc=False, alpha=False, store=None, groups=False):
'''Sort and return the list, set or sorted set at ``key``.
``start`` and ``num`` allow for paging through the sorted data
``by`` allows using an external key to weight and sort the items.
Use an "*" to indicate where in the key the item value is located
``get`` allows for returning items from external keys rather than the
sorted data itself. Use an "*" to indicate where int he key
the item value is located
``desc`` allows for reversing the sort
``alpha`` allows for sorting lexicographically rather than numerically
``store`` allows for storing the result of the sort into
the key ``store``
``groups`` if set to True and if ``get`` contains at least two
elements, sort will return a list of tuples, each containing the
values fetched from the arguments to ``get``.
'''
if ((start is not None and num is None) or
(num is not None and start is None)):
raise CommandError("``start`` and ``num`` must both be specified")
pieces = [key]
if by is not None:
pieces.append('BY')
pieces.append(by)
if start is not None and num is not None:
pieces.append('LIMIT')
pieces.append(start)
pieces.append(num)
if get is not None:
# If get is a string assume we want to get a single value.
# Otherwise assume it's an interable and we want to get multiple
# values. We can't just iterate blindly because strings are
# iterable.
if isinstance(get, str):
pieces.append('GET')
pieces.append(get)
else:
for g in get:
pieces.append('GET')
pieces.append(g)
if desc:
pieces.append('DESC')
if alpha:
pieces.append('ALPHA')
if store is not None:
pieces.append('STORE')
pieces.append(store)
if groups:
if not get or isinstance(get, str) or len(get) < 2:
raise CommandError('when using "groups" the "get" argument '
'must be specified and contain at least '
'two keys')
options = {'groups': len(get) if groups else None}
return self.execute_command('SORT', *pieces, **options)
def __getattr__(self, name):
command = INVERSE_COMMANDS_INFO.get(name)
if command:
return Executor(self, command)
else:
raise AttributeError("'%s' object has no attribute '%s'" %
(type(self), name))
def _eval(self, command, script, keys, args):
all = keys if keys is not None else ()
num_keys = len(all)
if args:
all = tuple(chain(all, args))
return self.execute(command, script, num_keys, *all)
class Pipeline(RedisClient):
'''A :class:`.RedisClient` for pipelining commands
'''
def __init__(self, store):
self.store = store
self.reset()
def execute(self, *args, **kwargs):
self.command_stack.append((args, kwargs))
execute_command = execute
def reset(self):
self.command_stack = []
def commit(self, raise_on_error=True):
'''Send commands to redis.
'''
cmds = list(chain([(('multi',), {})],
self.command_stack, [(('exec',), {})]))
self.reset()
return self.store.execute_pipeline(cmds, raise_on_error)
| |
"""
List output formatting
TODO: messy, rewrite
Copyright (c) 2010-2012 Mika Eloranta
See LICENSE for details.
"""
import os
import sys
from . import colors
from . import core
from . import util
if sys.version_info[0] == 2:
int_types = (int, long) # pylint: disable=E0602
unicode_types = unicode # pylint: disable=E0602
else:
int_types = int
unicode_types = None
class ListOutput(colors.Output):
def __init__(self, tool, confman, show_nodes=False, show_systems=False,
show_config=False, show_tree=False, show_inherits=False,
pattern=False, full_match=False, show_node_prop=False,
show_cloud_prop=False, show_config_prop=False,
list_props=False, show_layers=False, color="auto",
show_controls=False, exclude=None,
query_status=False, show_settings=False, **kwargs):
colors.Output.__init__(self, sys.stdout, color=color)
self.exclude = exclude or []
self.show_nodes = show_nodes
self.show_systems = show_systems
self.show_config = show_config
self.show_tree = show_tree
self.show_inherits = show_inherits
self.show_node_prop = show_node_prop
self.show_cloud_prop = show_cloud_prop
self.show_config_prop = show_config_prop
self.show_settings = show_settings
self.show_layers = show_layers
self.show_controls = show_controls
self.list_props = list_props
self.query_status = query_status
self.pattern = pattern
self.full_match = full_match
self.tool = tool
self.confman = confman
self.hindent = 4 * " "
self.indent = int(show_tree) * self.hindent
self.formatters = {
"node": self.format_node,
"system": self.format_system,
"config": self.format_config,
"prop": self.format_prop,
"cloud": self.format_prop,
"confprop": self.format_prop,
"controls": self.format_controls,
"status": self.format_status,
"setting": self.format_setting,
"layer": self.format_layer,
}
def value_repr(self, value, top_level=False):
if unicode_types and isinstance(value, unicode_types):
try:
value = repr(value.encode("ascii"))
except UnicodeEncodeError:
pass
if isinstance(value, dict):
if not value:
yield "none", "gray"
raise StopIteration()
if not top_level:
yield "{", None
for i, (key, value) in enumerate(sorted(value.items())):
if i > 0:
yield ", ", None
yield key, "key"
yield ":", None
for output in self.value_repr(value):
yield output
if not top_level:
yield "}", None
elif isinstance(value, str):
yield value, "str"
elif isinstance(value, bool):
yield str(value), "bool"
elif isinstance(value, int_types):
yield str(value), "int"
else:
yield repr(value), "red"
def format_setting(self, entry):
yield entry["setting"], "setting"
yield " = ", None
for output in self.value_repr(entry["value"]):
yield output
def format_status(self, entry):
yield entry["status"], "status"
def format_layer(self, entry):
yield "#%d: %s: %s" % (entry["index"], entry["layer"],
os.path.basename(entry["file_path"])), "layer"
def format_prop(self, entry):
return self.value_repr(entry["prop"], top_level=True)
def format_controls(self, entry):
yield ", ".join(sorted(entry["controls"])), "controls"
def format_system(self, entry):
name = entry["item"].name
if self.show_tree:
name = name.rsplit("/", 1)[-1]
yield name, "system"
def format_node(self, entry):
system = entry["item"].system
if not self.show_tree and system.name:
for output in self.format_system(dict(type="system", item=system)):
yield output
yield "/", None
node_name = entry["item"].name.rsplit("/", 1)[-1]
yield node_name, "node"
parent_name = entry["item"].get("parent")
if parent_name and self.show_inherits:
yield "(", None
yield parent_name, "nodeparent"
yield ")", None
def format_config(self, entry):
if not self.show_tree:
node = False
for output in self.format_node(entry):
node = True
yield output
if node:
yield "/", None
yield entry["config"].name, "config"
parent_name = entry["config"].get("parent")
if parent_name and self.show_inherits:
yield "(", None
yield parent_name, "configparent"
yield ")", None
if entry["inherited"]:
yield " [inherited]", None
def format_unknown(self, entry):
yield "UNKNOWN: %r" % entry["type"], "red"
def output(self):
"""Yields formatted strings ready for writing to the output file"""
for text, color_code in self.output_pairs():
yield self.color(text, color_code)
def output_pairs(self):
for entry in self.iter_tree():
indent = (entry["item"].get("depth", 0) - 1) * self.indent
if entry["type"] not in ["system", "node", "config"]:
indent += self.hindent
if entry["type"] == "config":
indent += self.indent
type_code = "%stype" % entry["type"]
if type_code not in colors.CODES:
type_code = "reset"
yield "%8s" % entry["type"], type_code
yield " ", None
yield indent, None
for output in self.formatters.get(entry["type"],
self.format_unknown)(entry):
yield output
yield "\n", None
def iter_tree(self):
"""
Yields every system, node, config, etc. that needs to be produced to
the output.
"""
for item in self.confman.find(self.pattern, systems=self.show_systems,
full_match=self.full_match,
exclude=self.exclude):
if isinstance(item, core.Node):
if self.show_nodes:
yield dict(type="node", item=item)
elif self.show_systems:
yield dict(type="system", item=item)
if self.show_node_prop:
items = dict(item.showable())
if self.list_props:
for key_path, value in util.path_iter_dict(items):
yield dict(type="prop", item=item,
prop={key_path: value})
else:
yield dict(type="prop", item=item, prop=items)
if isinstance(item, core.Node):
for conf in sorted(item.iter_all_configs(),
key=lambda x: x.name):
if self.show_config:
yield dict(type="config", item=item, config=conf, inherited=(conf.node != item))
if self.show_layers:
for i, (sort_key, layer_name, file_path) \
in enumerate(conf.settings.layers):
yield dict(type="layer", item=item, config=conf,
layer=layer_name, file_path=file_path,
index=i)
if self.show_config_prop:
yield dict(type="confprop", item=item, config=conf,
prop=conf)
plugin = conf.get_plugin()
if plugin and self.show_controls and plugin.controls:
yield dict(type="controls", item=item, config=conf,
controls=plugin.controls)
if self.show_settings:
for key, value in util.path_iter_dict(conf.settings):
yield dict(type="setting", item=item, config=conf,
setting=key, value=value)
cloud_prop = item.get("cloud", {})
if self.show_cloud_prop and cloud_prop:
if self.list_props:
for key_path, value in util.path_iter_dict(cloud_prop):
yield dict(type="cloud", item=item,
prop={key_path: value})
else:
yield dict(type="cloud", cloud=cloud_prop, item=item,
prop=cloud_prop)
if self.query_status and cloud_prop.get("instance"):
provider = self.tool.sky.get_provider(cloud_prop)
status = provider.get_instance_status(cloud_prop)
yield dict(type="status", item=item, status=status)
| |
import docker
from docker.types import IPAMConfig, IPAMPool
import pytest
from ..helpers import random_name, requires_api_version
from .base import BaseAPIIntegrationTest, TEST_IMG
class TestNetworks(BaseAPIIntegrationTest):
def tearDown(self):
self.client.leave_swarm(force=True)
super().tearDown()
def create_network(self, *args, **kwargs):
net_name = random_name()
net_id = self.client.create_network(net_name, *args, **kwargs)['Id']
self.tmp_networks.append(net_id)
return (net_name, net_id)
def test_list_networks(self):
networks = self.client.networks()
net_name, net_id = self.create_network()
networks = self.client.networks()
assert net_id in [n['Id'] for n in networks]
networks_by_name = self.client.networks(names=[net_name])
assert [n['Id'] for n in networks_by_name] == [net_id]
networks_by_partial_id = self.client.networks(ids=[net_id[:8]])
assert [n['Id'] for n in networks_by_partial_id] == [net_id]
def test_inspect_network(self):
net_name, net_id = self.create_network()
net = self.client.inspect_network(net_id)
assert net['Id'] == net_id
assert net['Name'] == net_name
assert net['Driver'] == 'bridge'
assert net['Scope'] == 'local'
assert net['IPAM']['Driver'] == 'default'
def test_create_network_with_ipam_config(self):
_, net_id = self.create_network(
ipam=IPAMConfig(
driver='default',
pool_configs=[
IPAMPool(
subnet="172.28.0.0/16",
iprange="172.28.5.0/24",
gateway="172.28.5.254",
aux_addresses={
"a": "172.28.1.5",
"b": "172.28.1.6",
"c": "172.28.1.7",
},
),
],
),
)
net = self.client.inspect_network(net_id)
ipam = net['IPAM']
assert ipam.pop('Options', None) is None
assert ipam['Driver'] == 'default'
assert ipam['Config'] == [{
'Subnet': "172.28.0.0/16",
'IPRange': "172.28.5.0/24",
'Gateway': "172.28.5.254",
'AuxiliaryAddresses': {
"a": "172.28.1.5",
"b": "172.28.1.6",
"c": "172.28.1.7",
},
}]
def test_create_network_with_host_driver_fails(self):
with pytest.raises(docker.errors.APIError):
self.client.create_network(random_name(), driver='host')
def test_remove_network(self):
net_name, net_id = self.create_network()
assert net_name in [n['Name'] for n in self.client.networks()]
self.client.remove_network(net_id)
assert net_name not in [n['Name'] for n in self.client.networks()]
def test_connect_and_disconnect_container(self):
net_name, net_id = self.create_network()
container = self.client.create_container(TEST_IMG, 'top')
self.tmp_containers.append(container)
self.client.start(container)
network_data = self.client.inspect_network(net_id)
assert not network_data.get('Containers')
self.client.connect_container_to_network(container, net_id)
network_data = self.client.inspect_network(net_id)
assert list(network_data['Containers'].keys()) == [
container['Id']
]
with pytest.raises(docker.errors.APIError):
self.client.connect_container_to_network(container, net_id)
self.client.disconnect_container_from_network(container, net_id)
network_data = self.client.inspect_network(net_id)
assert not network_data.get('Containers')
with pytest.raises(docker.errors.APIError):
self.client.disconnect_container_from_network(container, net_id)
@requires_api_version('1.22')
def test_connect_and_force_disconnect_container(self):
net_name, net_id = self.create_network()
container = self.client.create_container(TEST_IMG, 'top')
self.tmp_containers.append(container)
self.client.start(container)
network_data = self.client.inspect_network(net_id)
assert not network_data.get('Containers')
self.client.connect_container_to_network(container, net_id)
network_data = self.client.inspect_network(net_id)
assert list(network_data['Containers'].keys()) == \
[container['Id']]
self.client.disconnect_container_from_network(container, net_id, True)
network_data = self.client.inspect_network(net_id)
assert not network_data.get('Containers')
with pytest.raises(docker.errors.APIError):
self.client.disconnect_container_from_network(
container, net_id, force=True
)
@requires_api_version('1.22')
def test_connect_with_aliases(self):
net_name, net_id = self.create_network()
container = self.client.create_container(TEST_IMG, 'top')
self.tmp_containers.append(container)
self.client.start(container)
self.client.connect_container_to_network(
container, net_id, aliases=['foo', 'bar'])
container_data = self.client.inspect_container(container)
aliases = (
container_data['NetworkSettings']['Networks'][net_name]['Aliases']
)
assert 'foo' in aliases
assert 'bar' in aliases
def test_connect_on_container_create(self):
net_name, net_id = self.create_network()
container = self.client.create_container(
image=TEST_IMG,
command='top',
host_config=self.client.create_host_config(network_mode=net_name),
)
self.tmp_containers.append(container)
self.client.start(container)
network_data = self.client.inspect_network(net_id)
assert list(network_data['Containers'].keys()) == \
[container['Id']]
self.client.disconnect_container_from_network(container, net_id)
network_data = self.client.inspect_network(net_id)
assert not network_data.get('Containers')
@requires_api_version('1.22')
def test_create_with_aliases(self):
net_name, net_id = self.create_network()
container = self.client.create_container(
image=TEST_IMG,
command='top',
host_config=self.client.create_host_config(
network_mode=net_name,
),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
aliases=['foo', 'bar'],
),
}),
)
self.tmp_containers.append(container)
self.client.start(container)
container_data = self.client.inspect_container(container)
aliases = (
container_data['NetworkSettings']['Networks'][net_name]['Aliases']
)
assert 'foo' in aliases
assert 'bar' in aliases
@requires_api_version('1.22')
def test_create_with_ipv4_address(self):
net_name, net_id = self.create_network(
ipam=IPAMConfig(
driver='default',
pool_configs=[IPAMPool(subnet="132.124.0.0/16")],
),
)
container = self.client.create_container(
image=TEST_IMG, command='top',
host_config=self.client.create_host_config(network_mode=net_name),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
ipv4_address='132.124.0.23'
)
})
)
self.tmp_containers.append(container)
self.client.start(container)
net_settings = self.client.inspect_container(container)[
'NetworkSettings'
]
assert net_settings['Networks'][net_name]['IPAMConfig']['IPv4Address']\
== '132.124.0.23'
@requires_api_version('1.22')
def test_create_with_ipv6_address(self):
net_name, net_id = self.create_network(
ipam=IPAMConfig(
driver='default',
pool_configs=[IPAMPool(subnet="2001:389::1/64")],
),
)
container = self.client.create_container(
image=TEST_IMG, command='top',
host_config=self.client.create_host_config(network_mode=net_name),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
ipv6_address='2001:389::f00d'
)
})
)
self.tmp_containers.append(container)
self.client.start(container)
net_settings = self.client.inspect_container(container)[
'NetworkSettings'
]
assert net_settings['Networks'][net_name]['IPAMConfig']['IPv6Address']\
== '2001:389::f00d'
@requires_api_version('1.24')
def test_create_with_linklocal_ips(self):
container = self.client.create_container(
TEST_IMG, 'top',
networking_config=self.client.create_networking_config(
{
'bridge': self.client.create_endpoint_config(
link_local_ips=['169.254.8.8']
)
}
),
host_config=self.client.create_host_config(network_mode='bridge')
)
self.tmp_containers.append(container)
self.client.start(container)
container_data = self.client.inspect_container(container)
net_cfg = container_data['NetworkSettings']['Networks']['bridge']
assert 'IPAMConfig' in net_cfg
assert 'LinkLocalIPs' in net_cfg['IPAMConfig']
assert net_cfg['IPAMConfig']['LinkLocalIPs'] == ['169.254.8.8']
@requires_api_version('1.32')
def test_create_with_driveropt(self):
container = self.client.create_container(
TEST_IMG, 'top',
networking_config=self.client.create_networking_config(
{
'bridge': self.client.create_endpoint_config(
driver_opt={'com.docker-py.setting': 'on'}
)
}
),
host_config=self.client.create_host_config(network_mode='bridge')
)
self.tmp_containers.append(container)
self.client.start(container)
container_data = self.client.inspect_container(container)
net_cfg = container_data['NetworkSettings']['Networks']['bridge']
assert 'DriverOpts' in net_cfg
assert 'com.docker-py.setting' in net_cfg['DriverOpts']
assert net_cfg['DriverOpts']['com.docker-py.setting'] == 'on'
@requires_api_version('1.22')
def test_create_with_links(self):
net_name, net_id = self.create_network()
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
links=[('docker-py-test-upstream', 'bar')],
),
}),
)
net_settings = self.client.inspect_container(container)[
'NetworkSettings'
]
assert net_settings['Networks'][net_name]['Links'] == [
'docker-py-test-upstream:bar'
]
self.create_and_start(
name='docker-py-test-upstream',
host_config=self.client.create_host_config(network_mode=net_name),
)
self.execute(container, ['nslookup', 'bar'])
def test_create_check_duplicate(self):
net_name, net_id = self.create_network()
with pytest.raises(docker.errors.APIError):
self.client.create_network(net_name, check_duplicate=True)
net_id = self.client.create_network(net_name, check_duplicate=False)
self.tmp_networks.append(net_id['Id'])
@requires_api_version('1.22')
def test_connect_with_links(self):
net_name, net_id = self.create_network()
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name))
self.client.disconnect_container_from_network(container, net_name)
self.client.connect_container_to_network(
container, net_name,
links=[('docker-py-test-upstream', 'bar')])
net_settings = self.client.inspect_container(container)[
'NetworkSettings'
]
assert net_settings['Networks'][net_name]['Links'] == [
'docker-py-test-upstream:bar'
]
self.create_and_start(
name='docker-py-test-upstream',
host_config=self.client.create_host_config(network_mode=net_name),
)
self.execute(container, ['nslookup', 'bar'])
@requires_api_version('1.22')
def test_connect_with_ipv4_address(self):
net_name, net_id = self.create_network(
ipam=IPAMConfig(
driver='default',
pool_configs=[
IPAMPool(
subnet="172.28.0.0/16", iprange="172.28.5.0/24",
gateway="172.28.5.254"
)
]
)
)
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name))
self.client.disconnect_container_from_network(container, net_name)
self.client.connect_container_to_network(
container, net_name, ipv4_address='172.28.5.24'
)
container_data = self.client.inspect_container(container)
net_data = container_data['NetworkSettings']['Networks'][net_name]
assert net_data['IPAMConfig']['IPv4Address'] == '172.28.5.24'
@requires_api_version('1.22')
def test_connect_with_ipv6_address(self):
net_name, net_id = self.create_network(
ipam=IPAMConfig(
driver='default',
pool_configs=[
IPAMPool(
subnet="2001:389::1/64", iprange="2001:389::0/96",
gateway="2001:389::ffff"
)
]
)
)
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name))
self.client.disconnect_container_from_network(container, net_name)
self.client.connect_container_to_network(
container, net_name, ipv6_address='2001:389::f00d'
)
container_data = self.client.inspect_container(container)
net_data = container_data['NetworkSettings']['Networks'][net_name]
assert net_data['IPAMConfig']['IPv6Address'] == '2001:389::f00d'
@requires_api_version('1.23')
def test_create_internal_networks(self):
_, net_id = self.create_network(internal=True)
net = self.client.inspect_network(net_id)
assert net['Internal'] is True
@requires_api_version('1.23')
def test_create_network_with_labels(self):
_, net_id = self.create_network(labels={
'com.docker.py.test': 'label'
})
net = self.client.inspect_network(net_id)
assert 'Labels' in net
assert len(net['Labels']) == 1
assert net['Labels'] == {
'com.docker.py.test': 'label'
}
@requires_api_version('1.23')
def test_create_network_with_labels_wrong_type(self):
with pytest.raises(TypeError):
self.create_network(labels=['com.docker.py.test=label', ])
@requires_api_version('1.23')
def test_create_network_ipv6_enabled(self):
_, net_id = self.create_network(
enable_ipv6=True, ipam=IPAMConfig(
driver='default',
pool_configs=[
IPAMPool(
subnet="2001:389::1/64", iprange="2001:389::0/96",
gateway="2001:389::ffff"
)
]
)
)
net = self.client.inspect_network(net_id)
assert net['EnableIPv6'] is True
@requires_api_version('1.25')
def test_create_network_attachable(self):
assert self.init_swarm()
_, net_id = self.create_network(driver='overlay', attachable=True)
net = self.client.inspect_network(net_id)
assert net['Attachable'] is True
@requires_api_version('1.29')
def test_create_network_ingress(self):
assert self.init_swarm()
self.client.remove_network('ingress')
_, net_id = self.create_network(driver='overlay', ingress=True)
net = self.client.inspect_network(net_id)
assert net['Ingress'] is True
@requires_api_version('1.25')
def test_prune_networks(self):
net_name, _ = self.create_network()
result = self.client.prune_networks()
assert net_name in result['NetworksDeleted']
@requires_api_version('1.31')
def test_create_inspect_network_with_scope(self):
assert self.init_swarm()
net_name_loc, net_id_loc = self.create_network(scope='local')
assert self.client.inspect_network(net_name_loc)
assert self.client.inspect_network(net_name_loc, scope='local')
with pytest.raises(docker.errors.NotFound):
self.client.inspect_network(net_name_loc, scope='global')
net_name_swarm, net_id_swarm = self.create_network(
driver='overlay', scope='swarm'
)
assert self.client.inspect_network(net_name_swarm)
assert self.client.inspect_network(net_name_swarm, scope='swarm')
with pytest.raises(docker.errors.NotFound):
self.client.inspect_network(net_name_swarm, scope='local')
def test_create_remove_network_with_space_in_name(self):
net_id = self.client.create_network('test 01')
self.tmp_networks.append(net_id)
assert self.client.inspect_network('test 01')
assert self.client.remove_network('test 01') is None # does not raise
| |
# -*- coding: utf-8 -*-
"""
molvs.fragment
~~~~~~~~~~~~~~
This module contains tools for dealing with molecules with more than one covalently bonded unit. The main classes are
:class:`~molvs.fragment.LargestFragmentChooser`, which returns the largest covalent unit in a molecule, and
:class:`~molvs.fragment.FragmentRemover`, which filters out fragments from a molecule using SMARTS patterns.
:copyright: Copyright 2016 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
import logging
from rdkit import Chem
from rdkit.Chem import rdMolDescriptors
from .utils import memoized_property
log = logging.getLogger(__name__)
class FragmentPattern(object):
"""A fragment defined by a SMARTS pattern."""
def __init__(self, name, smarts):
"""Initialize a FragmentPattern with a name and a SMARTS pattern.
:param name: A name for this FragmentPattern.
:param smarts: A SMARTS pattern.
"""
self.name = name
self.smarts_str = smarts
@memoized_property
def smarts(self):
return Chem.MolFromSmarts(self.smarts_str)
def __repr__(self):
return 'FragmentPattern({!r}, {!r})'.format(self.name, self.smarts_str)
def __str__(self):
return self.name
#: The default list of :class:`FragmentPatterns <molvs.fragment.FragmentPattern>` to be used by
#: :class:`~molvs.fragment.FragmentRemover`.
REMOVE_FRAGMENTS = (
FragmentPattern('hydrogen', '[H]'),
FragmentPattern('fluorine', '[F]'),
FragmentPattern('chlorine', '[Cl]'),
FragmentPattern('bromine', '[Br]'),
FragmentPattern('iodine', '[I]'),
FragmentPattern('lithium', '[Li]'),
FragmentPattern('sodium', '[Na]'),
FragmentPattern('potassium', '[K]'),
FragmentPattern('calcium', '[Ca]'),
FragmentPattern('magnesium', '[Mg]'),
FragmentPattern('aluminium', '[Al]'),
FragmentPattern('barium', '[Ba]'),
FragmentPattern('bismuth', '[Bi]'),
FragmentPattern('silver', '[Ag]'),
FragmentPattern('strontium', '[Sr]'),
FragmentPattern('zinc', '[Zn]'),
FragmentPattern('ammonia/ammonium', '[#7]'),
FragmentPattern('water/hydroxide', '[#8]'),
FragmentPattern('methyl amine', '[#6]-[#7]'),
FragmentPattern('sulfide', 'S'),
FragmentPattern('nitrate', '[#7](=[#8])(-[#8])-[#8]'),
FragmentPattern('phosphate', '[P](=[#8])(-[#8])(-[#8])-[#8]'),
FragmentPattern('hexafluorophosphate', '[P](-[#9])(-[#9])(-[#9])(-[#9])(-[#9])-[#9]'),
FragmentPattern('sulfate', '[S](=[#8])(=[#8])(-[#8])-[#8]'),
FragmentPattern('methyl sulfonate', '[#6]-[S](=[#8])(=[#8])(-[#8])'),
FragmentPattern('trifluoromethanesulfonic acid',
'[#8]-[S](=[#8])(=[#8])-[#6](-[#9])(-[#9])-[#9]'),
FragmentPattern('trifluoroacetic acid', '[#9]-[#6](-[#9])(-[#9])-[#6](=[#8])-[#8]'),
FragmentPattern('1,2-dichloroethane', '[Cl]-[#6]-[#6]-[Cl]'),
FragmentPattern('1,2-dimethoxyethane', '[#6]-[#8]-[#6]-[#6]-[#8]-[#6]'),
FragmentPattern('1,4-dioxane', '[#6]-1-[#6]-[#8]-[#6]-[#6]-[#8]-1'),
FragmentPattern('1-methyl-2-pyrrolidinone', '[#6]-[#7]-1-[#6]-[#6]-[#6]-[#6]-1=[#8]'),
FragmentPattern('2-butanone', '[#6]-[#6]-[#6](-[#6])=[#8]'),
FragmentPattern('acetate/acetic acid', '[#8]-[#6](-[#6])=[#8]'),
FragmentPattern('acetone', '[#6]-[#6](-[#6])=[#8]'),
FragmentPattern('acetonitrile', '[#6]-[#6]#[N]'),
FragmentPattern('benzene', '[#6]1[#6][#6][#6][#6][#6]1'),
FragmentPattern('butanol', '[#8]-[#6]-[#6]-[#6]-[#6]'),
FragmentPattern('t-butanol', '[#8]-[#6](-[#6])(-[#6])-[#6]'),
FragmentPattern('chloroform', '[Cl]-[#6](-[Cl])-[Cl]'),
FragmentPattern('cycloheptane', '[#6]-1-[#6]-[#6]-[#6]-[#6]-[#6]-[#6]-1'),
FragmentPattern('cyclohexane', '[#6]-1-[#6]-[#6]-[#6]-[#6]-[#6]-1'),
FragmentPattern('dichloromethane', '[Cl]-[#6]-[Cl]'),
FragmentPattern('diethyl ether', '[#6]-[#6]-[#8]-[#6]-[#6]'),
FragmentPattern('diisopropyl ether', '[#6]-[#6](-[#6])-[#8]-[#6](-[#6])-[#6]'),
FragmentPattern('dimethyl formamide', '[#6]-[#7](-[#6])-[#6]=[#8]'),
FragmentPattern('dimethyl sulfoxide', '[#6]-[S](-[#6])=[#8]'),
FragmentPattern('ethanol', '[#8]-[#6]-[#6]'),
FragmentPattern('ethyl acetate', '[#6]-[#6]-[#8]-[#6](-[#6])=[#8]'),
FragmentPattern('formic acid', '[#8]-[#6]=[#8]'),
FragmentPattern('heptane', '[#6]-[#6]-[#6]-[#6]-[#6]-[#6]-[#6]'),
FragmentPattern('hexane', '[#6]-[#6]-[#6]-[#6]-[#6]-[#6]'),
FragmentPattern('isopropanol', '[#8]-[#6](-[#6])-[#6]'),
FragmentPattern('methanol', '[#8]-[#6]'),
FragmentPattern('N,N-dimethylacetamide', '[#6]-[#7](-[#6])-[#6](-[#6])=[#8]'),
FragmentPattern('pentane', '[#6]-[#6]-[#6]-[#6]-[#6]'),
FragmentPattern('propanol', '[#8]-[#6]-[#6]-[#6]'),
FragmentPattern('pyridine', '[#6]-1=[#6]-[#6]=[#7]-[#6]=[#6]-1'),
FragmentPattern('t-butyl methyl ether', '[#6]-[#8]-[#6](-[#6])(-[#6])-[#6]'),
FragmentPattern('tetrahydrofurane', '[#6]-1-[#6]-[#6]-[#8]-[#6]-1'),
FragmentPattern('toluene', '[#6]-[#6]~1~[#6]~[#6]~[#6]~[#6]~[#6]~1'),
FragmentPattern('xylene', '[#6]-[#6]~1~[#6](-[#6])~[#6]~[#6]~[#6]~[#6]~1')
)
#: The default value for whether to ensure at least one fragment is left after FragmentRemover is applied.
LEAVE_LAST = True
#: The default value for whether LargestFragmentChooser sees organic fragments as "larger" than inorganic fragments.
PREFER_ORGANIC = False
def is_organic(fragment):
"""Return true if fragment contains at least one carbon atom.
:param fragment: The fragment as an RDKit Mol object.
"""
# TODO: Consider a different definition?
# Could allow only H, C, N, O, S, P, F, Cl, Br, I
return any(atom.GetAtomicNum() == 6 for atom in fragment.GetAtoms())
class FragmentRemover(object):
"""A class for filtering out fragments using SMARTS patterns."""
def __init__(self, fragments=REMOVE_FRAGMENTS, leave_last=LEAVE_LAST):
"""Initialize a FragmentRemover with an optional custom list of :class:`~molvs.fragment.FragmentPattern`.
Setting leave_last to True will ensure at least one fragment is left in the molecule, even if it is matched by a
:class:`~molvs.fragment.FragmentPattern`. Fragments are removed in the order specified in the list, so place
those you would prefer to be left towards the end of the list. If all the remaining fragments match the same
:class:`~molvs.fragment.FragmentPattern`, they will all be left.
:param fragments: A list of :class:`~molvs.fragment.FragmentPattern` to remove.
:param bool leave_last: Whether to ensure at least one fragment is left.
"""
log.debug('Initializing FragmentRemover')
self.fragments = fragments
self.leave_last = leave_last
def __call__(self, mol):
"""Calling a FragmentRemover instance like a function is the same as calling its remove(mol) method."""
return self.remove(mol)
def remove(self, mol):
"""Return the molecule with specified fragments removed.
:param mol: The molecule to remove fragments from.
:type mol: :rdkit:`Mol <Chem.rdchem.Mol-class.html>`
:return: The molecule with fragments removed.
:rtype: :rdkit:`Mol <Chem.rdchem.Mol-class.html>`
"""
log.debug('Running FragmentRemover')
# Iterate FragmentPatterns and remove matching fragments
for frag in self.fragments:
# If nothing is left or leave_last and only one fragment, end here.
if mol.GetNumAtoms() == 0 or (self.leave_last and len(Chem.GetMolFrags(mol)) <= 1):
break
# Apply removal for this FragmentPattern
removed = Chem.DeleteSubstructs(mol, frag.smarts, onlyFrags=True)
if mol.GetNumAtoms() != removed.GetNumAtoms():
log.info(f'Removed fragment: {frag.name}')
if self.leave_last and removed.GetNumAtoms() == 0:
# All the remaining fragments match this pattern - leave them all
break
mol = removed
return mol
class LargestFragmentChooser(object):
"""A class for selecting the largest covalent unit in a molecule with multiple fragments."""
def __init__(self, prefer_organic=PREFER_ORGANIC):
"""
If prefer_organic is set to True, any organic fragment will be considered larger than any inorganic fragment. A
fragment is considered organic if it contains a carbon atom.
:param bool prefer_organic: Whether to prioritize organic fragments above all others.
"""
log.debug('Initializing LargestFragmentChooser')
self.prefer_organic = prefer_organic
def __call__(self, mol):
"""Calling a LargestFragmentChooser instance like a function is the same as calling its choose(mol) method."""
return self.choose(mol)
def choose(self, mol):
"""Return the largest covalent unit.
The largest fragment is determined by number of atoms (including hydrogens). Ties are broken by taking the
fragment with the higher molecular weight, and then by taking the first alphabetically by SMILES if needed.
:param mol: The molecule to choose the largest fragment from.
:type mol: :rdkit:`Mol <Chem.rdchem.Mol-class.html>`
:return: The largest fragment.
:rtype: :rdkit:`Mol <Chem.rdchem.Mol-class.html>`
"""
log.debug('Running LargestFragmentChooser')
# TODO: Alternatively allow a list of fragments to be passed as the mol parameter
fragments = Chem.GetMolFrags(mol, asMols=True)
largest = None
for f in fragments:
smiles = Chem.MolToSmiles(f, isomericSmiles=True)
log.debug(f'Fragment: {smiles}')
organic = is_organic(f)
if self.prefer_organic:
# Skip this fragment if not organic and we already have an organic fragment as the largest so far
if largest and largest['organic'] and not organic:
continue
# Reset largest if it wasn't organic and this fragment is organic
if largest and organic and not largest['organic']:
largest = None
# Count atoms
atoms = 0
for a in f.GetAtoms():
atoms += 1 + a.GetTotalNumHs()
# Skip this fragment if fewer atoms than the largest
if largest and atoms < largest['atoms']:
continue
# Skip this fragment if equal number of atoms but weight is lower
weight = rdMolDescriptors.CalcExactMolWt(f)
if largest and atoms == largest['atoms'] and weight < largest['weight']:
continue
# Skip this fragment if equal atoms and equal weight but smiles comes last alphabetically
if largest and atoms == largest['atoms'] and weight == largest['weight'] and smiles > largest['smiles']:
continue
# Otherwise this is the largest so far
log.debug(f'New largest fragment: {smiles} ({atoms})')
largest = {'smiles': smiles, 'fragment': f,
'atoms': atoms, 'weight': weight, 'organic': organic}
return largest['fragment']
| |
#============================================================================
# Copyright 2012 Citrix Systems, Inc. Licensed under the
# Apache License, Version 2.0 (the "License"); you may not use this
# file except in compliance with the License. Citrix Systems, Inc.
# reserves all rights not expressly granted by the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Automatically generated by addcopyright.py at 04/03/2012
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
#============================================================================
# prior permission.
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
import gettext
import xmlrpclib
import httplib
import socket
translation = gettext.translation('xen-xm', fallback = True)
API_VERSION_1_1 = '1.1'
API_VERSION_1_2 = '1.2'
class Failure(Exception):
def __init__(self, details):
self.details = details
def __str__(self):
try:
return str(self.details)
except Exception, exn:
import sys
print >>sys.stderr, exn
return "Xen-API failure: %s" % str(self.details)
def _details_map(self):
return dict([(str(i), self.details[i])
for i in range(len(self.details))])
_RECONNECT_AND_RETRY = (lambda _ : ())
class UDSHTTPConnection(httplib.HTTPConnection):
"""HTTPConnection subclass to allow HTTP over Unix domain sockets. """
def connect(self):
path = self.host.replace("_", "/")
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(path)
class UDSHTTP(httplib.HTTP):
_connection_class = UDSHTTPConnection
class UDSTransport(xmlrpclib.Transport):
def __init__(self, use_datetime=0):
self._use_datetime = use_datetime
self._extra_headers=[]
def add_extra_header(self, key, value):
self._extra_headers += [ (key,value) ]
def make_connection(self, host):
return UDSHTTP(host)
def send_request(self, connection, handler, request_body):
connection.putrequest("POST", handler)
for key, value in self._extra_headers:
connection.putheader(key, value)
class Session(xmlrpclib.ServerProxy):
"""A server proxy and session manager for communicating with xapi using
the Xen-API.
Example:
session = Session('http://localhost/')
session.login_with_password('me', 'mypassword')
session.xenapi.VM.start(vm_uuid)
session.xenapi.session.logout()
"""
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=1):
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding,
verbose, allow_none)
self.transport = transport
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = API_VERSION_1_1
def xenapi_request(self, methodname, params):
if methodname.startswith('login'):
self._login(methodname, params)
return None
elif methodname == 'logout' or methodname == 'session.logout':
self._logout()
return None
else:
retry_count = 0
while retry_count < 3:
full_params = (self._session,) + params
result = _parse_result(getattr(self, methodname)(*full_params))
if result == _RECONNECT_AND_RETRY:
retry_count += 1
if self.last_login_method:
self._login(self.last_login_method,
self.last_login_params)
else:
raise xmlrpclib.Fault(401, 'You must log in')
else:
return result
raise xmlrpclib.Fault(
500, 'Tried 3 times to get a valid session, but failed')
def _login(self, method, params):
result = _parse_result(getattr(self, 'session.%s' % method)(*params))
if result == _RECONNECT_AND_RETRY:
raise xmlrpclib.Fault(
500, 'Received SESSION_INVALID when logging in')
self._session = result
self.last_login_method = method
self.last_login_params = params
self.API_version = self._get_api_version()
def _logout(self):
try:
if self.last_login_method.startswith("slave_local"):
return _parse_result(self.session.local_logout(self._session))
else:
return _parse_result(self.session.logout(self._session))
finally:
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = API_VERSION_1_1
def _get_api_version(self):
pool = self.xenapi.pool.get_all()[0]
host = self.xenapi.pool.get_master(pool)
major = self.xenapi.host.get_API_version_major(host)
minor = self.xenapi.host.get_API_version_minor(host)
return "%s.%s"%(major,minor)
def __getattr__(self, name):
if name == 'handle':
return self._session
elif name == 'xenapi':
return _Dispatcher(self.API_version, self.xenapi_request, None)
elif name.startswith('login') or name.startswith('slave_local'):
return lambda *params: self._login(name, params)
else:
return xmlrpclib.ServerProxy.__getattr__(self, name)
def xapi_local():
return Session("http://_var_xapi_xapi/", transport=UDSTransport())
def _parse_result(result):
if type(result) != dict or 'Status' not in result:
raise xmlrpclib.Fault(500, 'Missing Status in response from server' + result)
if result['Status'] == 'Success':
if 'Value' in result:
return result['Value']
else:
raise xmlrpclib.Fault(500,
'Missing Value in response from server')
else:
if 'ErrorDescription' in result:
if result['ErrorDescription'][0] == 'SESSION_INVALID':
return _RECONNECT_AND_RETRY
else:
raise Failure(result['ErrorDescription'])
else:
raise xmlrpclib.Fault(
500, 'Missing ErrorDescription in response from server')
# Based upon _Method from xmlrpclib.
class _Dispatcher:
def __init__(self, API_version, send, name):
self.__API_version = API_version
self.__send = send
self.__name = name
def __repr__(self):
if self.__name:
return '<XenAPI._Dispatcher for %s>' % self.__name
else:
return '<XenAPI._Dispatcher>'
def __getattr__(self, name):
if self.__name is None:
return _Dispatcher(self.__API_version, self.__send, name)
else:
return _Dispatcher(self.__API_version, self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
| |
from django.db import connection
from django.template import Context, Template
from mezzanine.conf import settings
from mezzanine.core.models import CONTENT_STATUS_PUBLISHED
from mezzanine.core.request import current_request
from mezzanine.pages.models import Page, RichTextPage
from mezzanine.urls import PAGES_SLUG
from mezzanine.utils.tests import TestCase
class PagesTests(TestCase):
def test_page_ascendants(self):
"""
Test the methods for looking up ascendants efficiently
behave as expected.
"""
# Create related pages.
primary, created = RichTextPage.objects.get_or_create(title="Primary")
secondary, created = primary.children.get_or_create(title="Secondary")
tertiary, created = secondary.children.get_or_create(title="Tertiary")
# Force a site ID to avoid the site query when measuring queries.
setattr(current_request(), "site_id", settings.SITE_ID)
# Test that get_ascendants() returns the right thing.
page = Page.objects.get(id=tertiary.id)
ascendants = page.get_ascendants()
self.assertEqual(ascendants[0].id, secondary.id)
self.assertEqual(ascendants[1].id, primary.id)
# Test ascendants are returned in order for slug, using
# a single DB query.
connection.queries = []
pages_for_slug = Page.objects.with_ascendants_for_slug(tertiary.slug)
self.assertEqual(len(connection.queries), 1)
self.assertEqual(pages_for_slug[0].id, tertiary.id)
self.assertEqual(pages_for_slug[1].id, secondary.id)
self.assertEqual(pages_for_slug[2].id, primary.id)
# Test page.get_ascendants uses the cached attribute,
# without any more queries.
connection.queries = []
ascendants = pages_for_slug[0].get_ascendants()
self.assertEqual(len(connection.queries), 0)
self.assertEqual(ascendants[0].id, secondary.id)
self.assertEqual(ascendants[1].id, primary.id)
# Use a custom slug in the page path, and test that
# Page.objects.with_ascendants_for_slug fails, but
# correctly falls back to recursive queries.
secondary.slug += "custom"
secondary.save()
pages_for_slug = Page.objects.with_ascendants_for_slug(tertiary.slug)
self.assertEquals(len(pages_for_slug[0]._ascendants), 0)
connection.queries = []
ascendants = pages_for_slug[0].get_ascendants()
self.assertEqual(len(connection.queries), 2) # 2 parent queries
self.assertEqual(pages_for_slug[0].id, tertiary.id)
self.assertEqual(ascendants[0].id, secondary.id)
self.assertEqual(ascendants[1].id, primary.id)
def test_set_parent(self):
old_parent, _ = RichTextPage.objects.get_or_create(title="Old parent")
new_parent, _ = RichTextPage.objects.get_or_create(title="New parent")
child, _ = RichTextPage.objects.get_or_create(title="Child",
slug="kid")
self.assertTrue(child.parent is None)
self.assertTrue(child.slug == "kid")
child.set_parent(old_parent)
child.save()
self.assertEqual(child.parent_id, old_parent.id)
self.assertTrue(child.slug == "old-parent/kid")
child = RichTextPage.objects.get(id=child.id)
self.assertEqual(child.parent_id, old_parent.id)
self.assertTrue(child.slug == "old-parent/kid")
child.set_parent(new_parent)
child.save()
self.assertEqual(child.parent_id, new_parent.id)
self.assertTrue(child.slug == "new-parent/kid")
child = RichTextPage.objects.get(id=child.id)
self.assertEqual(child.parent_id, new_parent.id)
self.assertTrue(child.slug == "new-parent/kid")
child.set_parent(None)
child.save()
self.assertTrue(child.parent is None)
self.assertTrue(child.slug == "kid")
child = RichTextPage.objects.get(id=child.id)
self.assertTrue(child.parent is None)
self.assertTrue(child.slug == "kid")
child = RichTextPage(title="child2")
child.set_parent(new_parent)
self.assertEqual(child.slug, "new-parent/child2")
# Assert that cycles are detected.
p1, _ = RichTextPage.objects.get_or_create(title="p1")
p2, _ = RichTextPage.objects.get_or_create(title="p2")
p2.set_parent(p1)
with self.assertRaises(AttributeError):
p1.set_parent(p1)
with self.assertRaises(AttributeError):
p1.set_parent(p2)
p2c = RichTextPage.objects.get(title="p2")
with self.assertRaises(AttributeError):
p1.set_parent(p2c)
def test_set_slug(self):
parent, _ = RichTextPage.objects.get_or_create(title="Parent",
slug="parent")
child, _ = RichTextPage.objects.get_or_create(title="Child",
slug="parent/child",
parent_id=parent.id)
parent.set_slug("new-parent-slug")
parent.save()
self.assertTrue(parent.slug == "new-parent-slug")
parent = RichTextPage.objects.get(id=parent.id)
self.assertTrue(parent.slug == "new-parent-slug")
child = RichTextPage.objects.get(id=child.id)
self.assertTrue(child.slug == "new-parent-slug/child")
def test_page_menu_queries(self):
"""
Test that rendering a page menu executes the same number of
queries regardless of the number of pages or levels of
children.
"""
template = ('{% load pages_tags %}'
'{% page_menu "pages/menus/tree.html" %}')
before = self.queries_used_for_template(template)
self.assertTrue(before > 0)
self.create_recursive_objects(RichTextPage, "parent", title="Page",
status=CONTENT_STATUS_PUBLISHED)
after = self.queries_used_for_template(template)
self.assertEquals(before, after)
def test_page_menu_flags(self):
"""
Test that pages only appear in the menu templates they've been
assigned to show in.
"""
menus = []
pages = []
template = "{% load pages_tags %}"
for i, label, path in settings.PAGE_MENU_TEMPLATES:
menus.append(i)
pages.append(RichTextPage.objects.create(in_menus=list(menus),
title="Page for %s" % unicode(label),
status=CONTENT_STATUS_PUBLISHED))
template += "{%% page_menu '%s' %%}" % path
rendered = Template(template).render(Context({}))
for page in pages:
self.assertEquals(rendered.count(page.title), len(page.in_menus))
def test_page_menu_default(self):
"""
Test that the default value for the ``in_menus`` field is used
and that it doesn't get forced to unicode.
"""
old_menu_temp = settings.PAGE_MENU_TEMPLATES
old_menu_temp_def = settings.PAGE_MENU_TEMPLATES_DEFAULT
try:
# MenusField initializes choices and default during model
# loading, so we can't just override settings.
from mezzanine.pages.models import BasePage
from mezzanine.pages.fields import MenusField
settings.PAGE_MENU_TEMPLATES = ((8, 'a', 'a'), (9, 'b', 'b'))
settings.PAGE_MENU_TEMPLATES_DEFAULT = None
class P1(BasePage):
in_menus = MenusField(blank=True, null=True)
self.assertEqual(P1().in_menus[0], 8)
settings.PAGE_MENU_TEMPLATES_DEFAULT = tuple()
class P2(BasePage):
in_menus = MenusField(blank=True, null=True)
self.assertEqual(P2().in_menus, None)
settings.PAGE_MENU_TEMPLATES_DEFAULT = [9]
class P3(BasePage):
in_menus = MenusField(blank=True, null=True)
self.assertEqual(P3().in_menus[0], 9)
finally:
settings.PAGE_MENU_TEMPLATES = old_menu_temp
settings.PAGE_MENU_TEMPLATES_DEFAULT = old_menu_temp_def
def test_overridden_page(self):
"""
Test that a page with a slug matching a non-page urlpattern
return ``True`` for its overridden property.
"""
# BLOG_SLUG is empty then urlpatterns for pages are prefixed
# with PAGE_SLUG, and generally won't be overridden. In this
# case, there aren't any overridding URLs by default, so bail
# on the test.
if PAGES_SLUG:
return
page, created = RichTextPage.objects.get_or_create(slug="edit")
self.assertTrue(page.overridden())
| |
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import socket
import sys
import traceback
from oslo.config import cfg
import six
from six import moves
_PY26 = sys.version_info[0:2] == (2, 6)
from oslo.log import _local
from oslo.log import _options
from oslo.log.openstack.common.gettextutils import _
from oslo.log.openstack.common import importutils
from oslo.log.openstack.common import jsonutils
CONF = cfg.CONF
CONF.register_cli_opts(_options.common_cli_opts)
CONF.register_cli_opts(_options.logging_cli_opts)
CONF.register_opts(_options.generic_log_opts)
CONF.register_opts(_options.log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
def isEnabledFor(self, level):
if _PY26:
# This method was added in python 2.7 (and it does the exact
# same logic, so we need to do the exact same logic so that
# python 2.6 has this capability as well).
return self.logger.isEnabledFor(level)
else:
return super(BaseLoggerAdapter, self).isEnabledFor(level)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(msg, six.text_type):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(_local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
_options.log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
_options.log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
try:
handler = importutils.import_object(
"oslo.log.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
except ImportError:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
if CONF.use_syslog:
try:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(facility=facility)
else:
syslog = logging.handlers.SysLogHandler(facility=facility)
log_root.addHandler(syslog)
except socket.error:
log_root.error('Unable to add syslog handler. Verify that syslog'
'is running.')
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(record.msg, six.text_type):
record.msg = six.text_type(record.msg)
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(_local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
| |
"""
JUnitTestLoader
---------------
Test loader for nose to be able to register not only native nose's tests, but
compiled unit tests with JUnit-compatible definitions.
"""
import logging, os, os.path, subprocess, re, unittest, types, sys, time, timeout_monitor
from nose.loader import TestLoader
from JUnitSelector import JUnitSelector
from testsupport import TESTROOT, SBROOT
from sandbox import Sandbox
from nose.failure import Failure
from booleansimplifier import process_expression, ValidateBooleanException
_timeout_monitor = None
class _ProcAbort:
def __init__(self):
self.proc = None
def __call__(self):
if self.proc:
self.proc.terminate()
log = logging.getLogger('nose')
def install_all_modules(fullPath, className):
log.debug("install_all_modules(%s, %s)", fullPath, className)
module = None
prevModule = None
moduleNameParts = className.split('.')
log.debug("install_all_modules() moduleNameParts = %s", moduleNameParts)
for n in range(len(moduleNameParts))[1:]:
moduleName = '.'.join(moduleNameParts[:n])
log.debug("install_all_modules() moduleName = %s", moduleName)
if moduleName in sys.modules.keys():
module = sys.modules[moduleName]
else:
module = types.ModuleType(moduleName)
module.__path__ = module.__file__ = os.path.normpath(fullPath)
sys.modules.setdefault(moduleName, module)
log.debug("registerModule(): module is %s", repr(module))
__import__(moduleName)
module = sys.modules[moduleName]
log.debug("install_all_modules() prevModule is %s", prevModule)
log.debug("install_all_modules() module to add is %s",
moduleNameParts[n - 1])
if prevModule:
prevModule.__dict__[moduleNameParts[n - 1]] = module
prevModule = module
log.debug("install_all_modules(): module is: %s (full name is %s)",
repr(module), sys.modules[moduleName].__dict__)
return module
class JUnitTestLoader(TestLoader):
"""Test loader that extends native nose.TestLoader to load and process
execution results of compiled unit tests designed for JUnit.
"""
compiledTestsRoot = None
exclude = None
ignoreFiles = None
include = None
def __init__(self, config = None, importer = None,
workingDir = None, selector = JUnitSelector):
"""Initialize a test loader
"""
TestLoader.__init__(self, config, importer, workingDir, selector)
log.debug("JUnitTestLoader.__init__(): config = %s", config)
if hasattr(config.options, 'compiled_root'):
self.compiledTestsRoot = config.options.compiled_root
if os.name == "nt":
self.compiledTestsRoot = self.compiledTestsRoot.replace("/", "\\")
log.debug("Compiled root is %s, os is %s", self.compiledTestsRoot, os.name)
# Each element of config.options.attr is a comma-separated list of
# enabled of disabled test tags
target_expression = ''
for expression in config.options.eval_attr:
if target_expression:
target_expression = target_expression + '&'
target_expression = target_expression + '(' + expression + ')'
self.tags = process_expression(target_expression)
if not self.tags:
self.tags = 'checkin&!interactive'
log.debug("JUnitTestLoader.__init__(): Set default test tasgs")
log.debug("JUnitTestLoader.__init__(): self.tags: %s", self.tags)
def makeTestCase(self, singleTestOutput, testExceptions):
"""Creates test cases according to passed test output
"""
outputLines = singleTestOutput.splitlines()
pathSep = '\\' if os.name == 'nt' else '/'
fileName = outputLines[0].replace('.', pathSep)
log.debug("JUnitTestLoader.makeTestCase(): fileName = %s", fileName)
testName = outputLines[0].split('.')[-1]
log.debug("JUnitTestLoader.makeTestCase(): testName = %s", testName)
classDefinition = """
import unittest, subprocess, logging, re, tempfile, os, re, unittest
from nose import SkipTest
if 'log' not in globals().keys():
log = logging.getLogger('nose')
class %s(unittest.TestCase):
""" % testName.replace('-', '_')
log.debug("JUnitTestLoader.makeTestCase(): before install_all_modules()")
module = install_all_modules(os.path.join(self.compiledTestsRoot,
fileName),
outputLines[0])
log.debug("JUnitTestLoader.makeTestCase(): module is %s", module)
log.debug("JUnitTestLoader.makeTestCase(): module.__dict__ is %s",
module.__dict__)
log.debug("JUnitTestLoader.makeTestCase(): module.__dict__ finished")
testCases = []
outputLines = outputLines[1:]
outputLinesLen = len(outputLines)
found_test_defs = False
for n, testDefinitionString in enumerate(outputLines):
# Skipping lines not related to test cases
if not testDefinitionString.startswith("Testcase: "):
continue
## handle test results
i = n + 1
currentTestOutputLines = []
while i < outputLinesLen:
if outputLines[i].startswith("Testcase: "):
break
i = i + 1
currentTestOutputLines = filter(None, outputLines[n + 1:i])
for i, line in enumerate(currentTestOutputLines):
currentTestOutputLines[i] = line.strip()
failedStatus = False
log.debug("JUnitTestLoader.makeTestCase(): currentTestOutputLines:")
for line in currentTestOutputLines:
log.debug(line)
log.debug("")
log.debug("")
if currentTestOutputLines and (currentTestOutputLines[0] == 'FAILED' or 'ERROR' in currentTestOutputLines[0]):
failedStatus = True
currentTestOutputLines[0] = ''
testMethodName = testDefinitionString.split(' ')[1]
currentTestOutput = '\n'.join(currentTestOutputLines).strip()
found_test_defs = True
classDefinition += """ def %s(self):
testOutput = '''%s'''""" % (testMethodName, currentTestOutput)
if failedStatus:
classDefinition += """
self.fail(msg=testOutput)
"""
else:
classDefinition += """
if testOutput:
print testOutput
"""
if not found_test_defs:
classDefinition += ' pass\n'
print('classDefinition =\n' + classDefinition)
log.debug("JUnitTestLoader.makeTestCase(): classDefinition is %s",
classDefinition)
context = module.__dict__
exec classDefinition in context
for testDefinitionString in outputLines[1:]:
if not testDefinitionString.startswith("Testcase: "):
continue
testMethodName = testDefinitionString.split(' ')[1]
testCase = context[testName](methodName=testMethodName)
log.debug("JUnitTestLoader.makeTestCase() testCase is %s",
testCase)
log.debug("JUnitTestLoader.makeTestCase() testCase contents is %s",
testCase.__dict__)
testCases.append(testCase)
log.debug("JUnitTestLoader.makeTestCase(): %s ", repr(testCases))
return testCases
def prepareCommandLine(self):
log.debug("JUnitTestLoader.prepareCommandLine()")
build_py = SBROOT + '/code/buildscripts/build.py'
if os.name == 'nt':
build_py = build_py.replace('/', '\\')
##TODO add parameter to select tests by tags
commandLine = ['python', build_py,
# Next line is a hack: initial space changes option into
# target to build (from the point of view of build.py
# script)
##FIXME
' -Drun.test.categories="%s"' % self.tags,
'test', '-v']
return commandLine
def executeTests(self, commandLine):
log.debug("JUnitTestLoader.executeTests() with commandLine=%s", commandLine)
# Initialize our state.
start = time.time()
sb = Sandbox(SBROOT)
sb.set_last_test_date(start)
global _timeout_monitor
_timeout_monitor = None
testOutput = ""
err = 0
try:
# Start up a thread that will force us to exit if we hang.
pabrt = _ProcAbort()
_timeout_monitor = timeout_monitor.start(sb.get_test_timeout_seconds(), killfunc=pabrt)
# Always run tests in alphabetical order, for predictability
# and ease of explanation.
proc = subprocess.Popen(commandLine,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
_timeout_monitor.last_status = time.time()
pabrt.proc = proc
testOutput, stderr = proc.communicate()
err = proc.returncode
except Exception as e:
log.debug("JUnitTestLoader.executeTests(): Got exception: %s", str(e))
err = 1
finally:
if _timeout_monitor:
_timeout_monitor.stop()
if "[junit] '-classpath" in testOutput and 'BUILD FAILED' in testOutput:
err = 0
log.debug("JUnitTestLoader.executeTests(): Actually it's JUnit test failed, all is fine.")
if err != 0:
raise Exception("Building compiled test suite failed!")
return testOutput
def filterTestOutput(self, testOutput):
singleTestOutput = []
for line in filter(None, testOutput.splitlines()):
prepared = line.strip()
if prepared.startswith('[junit]'):
singleTestOutput.append(prepared)
testOutput = '\n'.join(singleTestOutput).strip()
singleTestOutput = re.split(r'\[junit\] Testsuite: ', testOutput)[1:]
re_junit_mark = re.compile(r'^\[junit\] ?')
for n, output in enumerate(singleTestOutput):
lines = output.splitlines()
for m, line in enumerate(lines):
lines[m] = re.sub(re_junit_mark, '', line)
singleTestOutput[n] = '\n'.join(filter(None, lines)).strip()
singleTestExceptions = []
##TODO Extract exceptions from the last element of singleTestOutput and
## populate singleTestExceptions
return filter(None, singleTestOutput), singleTestExceptions
def makeTestCases(self, testOutput, testExceptions):
tests = []
for singleTestOutput in testOutput:
testCases = self.makeTestCase(singleTestOutput, testExceptions)
if testCases:
for test in testCases:
tests.append(test)
return tests
def loadTestsFromFile(self, filename):
"""
We'll exploit the knowledge of the selector implementation details:
since the selector 'wants' only root build.xml file for JUnit-based
tests, we can safely check initials and then fall back to inherited
behavior or collect output for all tests.
"""
log.debug("JUnitTestLoader.loadTestsFromFile(%s)", filename)
if not filename.startswith(self.compiledTestsRoot):
log.debug("JUnitTestLoader.loadTestsFromFile(): fall back to predefined behavior")
log.debug("JUnitTestLoader.loadTestsFromFile(): compiledTestsRoot is %s", self.compiledTestsRoot)
return TestLoader.loadTestsFromFile(self, filename)
commandLine = self.prepareCommandLine()
try:
testOutput = self.executeTests(commandLine)
except Exception as e:
log.debug("Building JUnit test suite failed with message: %s",
str(e))
return self.suiteClass(Failure(Exception,"Couldn't build compiled test suite."))
log.debug("JUnitTestLoader.loadTestsFromFile(): got ant output:\n%s",
testOutput)
singleTestOutput, singleTestExceptions = self.filterTestOutput(testOutput)
log.debug("JUnitTestLoader.loadTestsFromFile(): JUnit filtered output:")
for line in singleTestOutput:
log.debug("%s\n", line)
log.debug("JUnitTestLoader.loadTestsFromFile(): %s",
"JUnit filtered output finished.")
loadedTests = self.makeTestCases(singleTestOutput,
singleTestExceptions)
log.debug("JUnitTestLoader.loadTestsFromFile(): loaded tests %s",
loadedTests)
log.debug("JUnitTestLoader.loadTestsFromFile(): suite class is %s",
repr(self.suiteClass))
return self.suiteClass(loadedTests)
| |
from enum import Enum
import itertools
import copy
# shapes must be regularly sized
# each unique rotation has a separate entry here
class Shapes(Enum):
L0 = [(True, False, False),
(True, True, True)]
L1 = [(False, True),
(False, True),
(True, True)]
L2 = [(True, True, True),
(False, False, True)]
L3 = [(True, True),
(True, False),
(True, False)]
J0 = [(True, True, True),
(True, False, False)]
J1 = [(True, False),
(True, False),
(True, True)]
J2 = [(False, False, True),
(True, True, True)]
J3 = [(True, True),
(False, True),
(False, True)]
O = [(True, True),
(True, True)]
I0 = [(True, True, True, True)]
I1 = [(True,), (True,), (True,), (True,)]
S0 = [(True, True, False),
(False, True, True)]
S1 = [(False, True),
(True, True),
(True, False)]
Z0 = [(False, True, True),
(True, True, False)]
Z1 = [(True, False),
(True, True),
(False, True)]
T0 = [(False, True, False),
(True, True, True)]
T1 = [(True, False),
(True, True),
(True, False)]
T2 = [(False, True),
(True, True),
(False, True)]
T3 = [(True, True, True),
(False, True, False)]
# characters to differentiate shapes in the grid
characters = "*#+x%&"
def pack_shape(grid, shape, target_col, char_index):
shape_height = len(shape.value)
shape_width = len(shape.value[0])
grid_height = len(grid)
grid_width = len(grid[0])
if shape_height > grid_height or shape_width > grid_width:
return None
if shape_width + target_col > grid_width:
return None
final_row = find_bottom_row(grid, shape, target_col)
if final_row is None:
return None
if final_row + (shape_height - 1) < grid_height:
output = copy.deepcopy(grid)
for row, shape_row in enumerate(shape.value):
for col, val in enumerate(shape_row):
if val:
output[final_row + row][target_col + col] = characters[char_index]
return output
return None
def find_bottom_row(grid, shape, target_col):
grid_height = len(grid)
for this_row in reversed(range(grid_height)):
for grid_row, shape_row in zip(grid[this_row:], shape.value):
for g, s in zip(grid_row[target_col:], shape_row):
if g and s:
result = this_row + 1
if result >= grid_height:
return None
else:
return result
return 0
def covered(grid):
for col in zip(*grid):
false_found = False
for row in col:
if row is False:
false_found = True
if false_found and row is not False:
return True
return False
def recursive_pack(grid, filled_shapes):
for shape in Shapes:
try:
column = grid[-1].index(False)
except ValueError:
column = None
if column is not None:
new_grid = pack_shape(grid, shape, column, len(filled_shapes))
if new_grid is not None and not covered(new_grid):
if all(itertools.chain.from_iterable(new_grid)):
yield new_grid
else:
for g in recursive_pack(new_grid, filled_shapes + [(shape, column)]):
yield g
if __name__ == "__main__":
width = 10
height = 2
grid = [[False for _ in range(width)] for _ in range(height)]
count = 0
for g in recursive_pack(grid, []):
count += 1
for line in reversed(g):
print("".join(line))
print(count)
"""
solution:
*###+xxx%%
***#+++x%%
1
*###++x%%%
***#++xxx%
2
*###++xx%%
***#++xx%%
3
*###++%%%%
***#++xxxx
4
*###xxxx%%
***#++++%%
5
**#+++x%%%
**###+xxx%
6
**#+++xx%%
**###+xx%%
7
**#+++%%%%
**###+xxxx
8
**##+xxx%%
**##+++x%%
9
**##++x%%%
**##++xxx%
10
**##++xx%%
**##++xx%%
11
**##++%%%%
**##++xxxx
12
**##xxxx%%
**##++++%%
13
**++++x%%%
**####xxx%
14
**++++xx%%
**####xx%%
15
**++++%%%%
**####xxxx
16
####+xxx%%
****+++x%%
17
####++x%%%
****++xxx%
18
####++xx%%
****++xx%%
19
####++%%%%
****++xxxx
20
####xxxx%%
****++++%%
21
"""
| |
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging as real_logging
import os
import sys
import time
from telemetry.core import local_server
from telemetry.core import memory_cache_http_server
from telemetry.core import network_controller
from telemetry.core import tracing_controller
from telemetry.core import util
from telemetry.internal.platform import (platform_backend as
platform_backend_module)
from py_utils import discover
_HOST_PLATFORM = None
# Remote platform is a dictionary from device ids to remote platform instances.
_REMOTE_PLATFORMS = {}
def _InitHostPlatformIfNeeded():
global _HOST_PLATFORM # pylint: disable=global-statement
if _HOST_PLATFORM:
return
backend = None
backends = _IterAllPlatformBackendClasses()
for platform_backend_class in backends:
if platform_backend_class.IsPlatformBackendForHost():
backend = platform_backend_class()
break
if not backend:
raise NotImplementedError()
_HOST_PLATFORM = Platform(backend)
def GetHostPlatform():
_InitHostPlatformIfNeeded()
return _HOST_PLATFORM
def _IterAllPlatformBackendClasses():
platform_dir = os.path.dirname(os.path.realpath(
platform_backend_module.__file__))
return discover.DiscoverClasses(
platform_dir, util.GetTelemetryDir(),
platform_backend_module.PlatformBackend).itervalues()
def GetPlatformForDevice(device, finder_options, logging=real_logging):
""" Returns a platform instance for the device.
Args:
device: a device.Device instance.
"""
if device.guid in _REMOTE_PLATFORMS:
return _REMOTE_PLATFORMS[device.guid]
try:
for platform_backend_class in _IterAllPlatformBackendClasses():
if platform_backend_class.SupportsDevice(device):
_REMOTE_PLATFORMS[device.guid] = (
platform_backend_class.CreatePlatformForDevice(device,
finder_options))
return _REMOTE_PLATFORMS[device.guid]
return None
except Exception:
current_exception = sys.exc_info()
logging.error('Fail to create platform instance for %s.', device.name)
raise current_exception[0], current_exception[1], current_exception[2]
class Platform(object):
"""The platform that the target browser is running on.
Provides a limited interface to interact with the platform itself, where
possible. It's important to note that platforms may not provide a specific
API, so check with IsFooBar() for availability.
"""
def __init__(self, platform_backend):
self._platform_backend = platform_backend
self._platform_backend.InitPlatformBackend()
self._platform_backend.SetPlatform(self)
self._network_controller = network_controller.NetworkController(
self._platform_backend.network_controller_backend)
self._tracing_controller = tracing_controller.TracingController(
self._platform_backend.tracing_controller_backend)
self._local_server_controller = local_server.LocalServerController(
self._platform_backend)
self._forwarder = None
@property
def is_host_platform(self):
return self == GetHostPlatform()
@property
def network_controller(self):
"""Control network settings and servers to simulate the Web."""
return self._network_controller
@property
def tracing_controller(self):
return self._tracing_controller
def Initialize(self):
pass
def CanMonitorThermalThrottling(self):
"""Platforms may be able to detect thermal throttling.
Some fan-less computers go into a reduced performance mode when their heat
exceeds a certain threshold. Performance tests in particular should use this
API to detect if this has happened and interpret results accordingly.
"""
return self._platform_backend.CanMonitorThermalThrottling()
def GetSystemLog(self):
return self._platform_backend.GetSystemLog()
def IsThermallyThrottled(self):
"""Returns True if the device is currently thermally throttled."""
return self._platform_backend.IsThermallyThrottled()
def HasBeenThermallyThrottled(self):
"""Returns True if the device has been thermally throttled."""
return self._platform_backend.HasBeenThermallyThrottled()
def GetDeviceTypeName(self):
"""Returns a string description of the Platform device, or None.
Examples: Nexus 7, Nexus 6, Desktop"""
return self._platform_backend.GetDeviceTypeName()
def GetArchName(self):
"""Returns a string description of the Platform architecture.
Examples: x86_64 (posix), AMD64 (win), armeabi-v7a, x86"""
return self._platform_backend.GetArchName()
def GetOSName(self):
"""Returns a string description of the Platform OS.
Examples: WIN, MAC, LINUX, CHROMEOS"""
return self._platform_backend.GetOSName()
def GetDeviceId(self):
"""Returns a string identifying the device.
Examples: 0123456789abcdef"""
return self._platform_backend.GetDeviceId()
def GetOSVersionName(self):
"""Returns a logically sortable, string-like description of the Platform OS
version.
Examples: VISTA, WIN7, LION, MOUNTAINLION"""
return self._platform_backend.GetOSVersionName()
def GetOSVersionDetailString(self):
"""Returns more detailed information about the OS version than
GetOSVersionName, if available. Otherwise returns the empty string.
Examples: '10.12.4' on macOS."""
return self._platform_backend.GetOSVersionDetailString()
def GetSystemTotalPhysicalMemory(self):
"""Returns an integer with the total physical memory in bytes."""
return self._platform_backend.GetSystemTotalPhysicalMemory()
def CanFlushIndividualFilesFromSystemCache(self):
"""Returns true if the disk cache can be flushed for individual files."""
return self._platform_backend.CanFlushIndividualFilesFromSystemCache()
def SupportFlushEntireSystemCache(self):
"""Returns true if entire system cache can be flushed.
Also checks that platform has required privilegues to flush system caches.
"""
return self._platform_backend.SupportFlushEntireSystemCache()
def _WaitForPageCacheToBeDropped(self):
# There seems to be no reliable way to wait for all pages to be dropped from
# the OS page cache (also known as 'file cache'). There is no guaranteed
# moment in time when everything is out of page cache. A number of pages
# will likely be reused before other pages are evicted. While individual
# files can be watched in limited ways, we choose not to be clever.
time.sleep(2)
def FlushEntireSystemCache(self):
"""Flushes the OS's file cache completely.
This function may require root or administrator access. Clients should
call SupportFlushEntireSystemCache to check first.
"""
self._platform_backend.FlushEntireSystemCache()
self._WaitForPageCacheToBeDropped()
def FlushSystemCacheForDirectories(self, directories):
"""Flushes the OS's file cache for the specified directory.
This function does not require root or administrator access."""
for path in directories:
self._platform_backend.FlushSystemCacheForDirectory(path)
self._WaitForPageCacheToBeDropped()
def FlushDnsCache(self):
"""Flushes the OS's DNS cache completely.
This function may require root or administrator access."""
return self._platform_backend.FlushDnsCache()
def LaunchApplication(self,
application,
parameters=None,
elevate_privilege=False):
""""Launches the given |application| with a list of |parameters| on the OS.
Set |elevate_privilege| to launch the application with root or admin rights.
Returns:
A popen style process handle for host platforms.
"""
return self._platform_backend.LaunchApplication(
application,
parameters,
elevate_privilege=elevate_privilege)
def StartActivity(self, intent, blocking=False):
"""Starts an activity for the given intent on the device."""
return self._platform_backend.StartActivity(intent, blocking)
def CanLaunchApplication(self, application):
"""Returns whether the platform can launch the given application."""
return self._platform_backend.CanLaunchApplication(application)
def InstallApplication(self, application, **kwargs):
"""Installs the given application."""
return self._platform_backend.InstallApplication(application, **kwargs)
def IsCooperativeShutdownSupported(self):
"""Indicates whether CooperativelyShutdown, below, is supported.
It is not necessary to implement it on all platforms."""
return self._platform_backend.IsCooperativeShutdownSupported()
def CooperativelyShutdown(self, proc, app_name):
"""Cooperatively shut down the given process from subprocess.Popen.
Currently this is only implemented on Windows. See
crbug.com/424024 for background on why it was added.
Args:
proc: a process object returned from subprocess.Popen.
app_name: on Windows, is the prefix of the application's window
class name that should be searched for. This helps ensure
that only the application's windows are closed.
Returns True if it is believed the attempt succeeded.
"""
return self._platform_backend.CooperativelyShutdown(proc, app_name)
def CanTakeScreenshot(self):
return self._platform_backend.CanTakeScreenshot()
# TODO(nednguyen): Implement this on Mac, Linux & Win. (crbug.com/369490)
def TakeScreenshot(self, file_path):
""" Takes a screenshot of the platform and save to |file_path|.
Note that this method may not be supported on all platform, so check with
CanTakeScreenshot before calling this.
Args:
file_path: Where to save the screenshot to. If the platform is remote,
|file_path| is the path on the host platform.
Returns True if it is believed the attempt succeeded.
"""
return self._platform_backend.TakeScreenshot(file_path)
def CanRecordVideo(self):
return self._platform_backend.CanRecordVideo()
def StartVideoRecording(self):
"""Starts recording a video on the device.
Note that this method may not be supported on all platforms, so the caller
must check with CanRecordVideo before calling this. Once the caller starts
recording a video using this call, the caller must stop recording the video
by calling StopVideoRecording() before attempting to start recording another
video.
"""
self._platform_backend.StartVideoRecording()
def StopVideoRecording(self, video_path):
"""Stops recording a video on the device and saves to |video_path|.
This method must be called only if recording a video had started using a
call to StartVideoRecording(), and it was not already stopped using a call
to StopVideoRecording().
Args:
video_path: Where to save the video to. If the platform is remote,
|video_path| is the path on the host platform.
"""
self._platform_backend.StopVideoRecording(video_path)
def SetFullPerformanceModeEnabled(self, enabled):
""" Set full performance mode on the platform.
Note: this can be no-op on certain platforms.
"""
return self._platform_backend.SetFullPerformanceModeEnabled(enabled)
def StartLocalServer(self, server):
"""Starts a LocalServer and associates it with this platform.
|server.Close()| should be called manually to close the started server.
"""
self._local_server_controller.StartServer(server)
@property
def http_server(self):
# TODO(crbug.com/799490): Ownership of the local server should be moved
# to the network_controller.
server = self._local_server_controller.GetRunningServer(
memory_cache_http_server.MemoryCacheDynamicHTTPServer, None)
if server:
return server
return self._local_server_controller.GetRunningServer(
memory_cache_http_server.MemoryCacheHTTPServer, None)
def SetHTTPServerDirectories(self, paths, handler_class=None):
"""Returns True if the HTTP server was started, False otherwise."""
# pylint: disable=redefined-variable-type
if isinstance(paths, basestring):
paths = set([paths])
paths = set(os.path.realpath(p) for p in paths)
# If any path is in a subdirectory of another, remove the subdirectory.
duplicates = set()
for parent_path in paths:
for sub_path in paths:
if parent_path == sub_path:
continue
if os.path.commonprefix((parent_path, sub_path)) == parent_path:
duplicates.add(sub_path)
paths -= duplicates
if self.http_server:
old_handler_class = getattr(self.http_server,
"dynamic_request_handler_class", None)
if not old_handler_class and not handler_class and \
self.http_server.paths == paths:
return False
if old_handler_class and handler_class \
and old_handler_class.__name__ == handler_class.__name__ \
and self.http_server.paths == paths:
return False
self.http_server.Close()
if not paths:
return False
if handler_class:
server = memory_cache_http_server.MemoryCacheDynamicHTTPServer(
paths, handler_class)
real_logging.info('MemoryCacheDynamicHTTPServer created')
else:
server = memory_cache_http_server.MemoryCacheHTTPServer(paths)
real_logging.info('MemoryCacheHTTPServer created')
self.StartLocalServer(server)
# For now, Fuchsia needs to do port forwarding due to --proxy-server
# flag not being supported in its browser.
# TODO(https://crbug.com/1014670): Remove once debug flags supported in
# Fuchsia browsers.
if self._platform_backend.GetOSName() == 'fuchsia':
self._platform_backend.forwarder_factory.Create(server.port, server.port)
return True
def StopAllLocalServers(self):
self._local_server_controller.Close()
if self._forwarder:
self._forwarder.Close()
@property
def local_servers(self):
"""Returns the currently running local servers."""
return self._local_server_controller.local_servers
def WaitForBatteryTemperature(self, temp):
"""Waits for the battery on the device under test to cool down to temp.
Args:
temp: temperature target in degrees C.
"""
return self._platform_backend.WaitForBatteryTemperature(temp)
def WaitForCpuTemperature(self, temp):
"""Waits for the CPU temperature to be less than temp.
Args:
temp: A float containing the maximum temperature to allow
in degrees c.
"""
return self._platform_backend.WaitForCpuTemperature(temp)
def GetTypExpectationsTags(self):
return self._platform_backend.GetTypExpectationsTags()
| |
import psycopg2
import socket
import json
import random
import threading
import sys
import os
import time
#Handles if the program should print stuff
verbose = False
# Host of the machine
#This defaults to all network locations for the maching
common_host = ""
#the port of the main socket
main_thread_port_list = [8888]
#sockets range
sockets_min_range = 30000
sockets_max_range = 40000
# This is the worker Function
# It is used by the threading process to build a socket
# and then communcate to the API Fetch clients
# Then it parses the request and returns appropriately
def worker(thread_number,socket_number):
print ("WOWOW")
#build the socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((common_host,socket_number))
s.listen(1)
conn, addr = s.accept()
#handle socket problems
try:
if verbose:
print ('Connected by', addr)
#build a bytestring of input
data = bytes([])
while True:
new_data = conn.recv(1024)
if not new_data: break
data += new_data
data = str(data,'UTF-8')
if verbose:
print ("Data Recieved: ",data)
#turn those bytes into a json request
try:
json_data = json.loads(data)
except Exception as e:
print ("Failed to parse JSON")
#check to make sure the request has a type
if "request_type" in json_data:
if json_data["request_type"] == "save_blogs":
# get the blogs and the links from the request
try:
insert_values = []
blog_list = json_data["blogs"]
link_list = json_data["links"]
if len(blog_list) != len(link_list):
raise Exception
#append these together into a list
for a in range(len(blog_list)):
insert_values.append({"name":blog_list[a],"link":link_list[a]})
#now build the db stuff and insert into the db
conn_string = "host='localhost' dbname='cs585' user='cs585' "
db_conn = psycopg2.connect(conn_string)
cursor = db_conn.cursor()
for a in insert_values:
try:
cursor.execute("insert into blog values(%s,%s);",(a["name"],a["link"]))
db_conn.commit()
except Exception as e:
db_conn.rollback()
pass
db_conn.commit()
cursor.close()
db_conn.close()
send_data = { "worked":True,
"request_type":"save_blogs",
}
except Exception as e:
print ("WOW: " + str(e))
send_data = { "worked":False,
"request_type":"save_blogs",
}
pass
elif json_data["request_type"] == "save_posts":
# get the blogs and the links from the request
try:
insert_values = []
post_list = json_data["posts"]
#now build the db stuff and insert into the db
conn_string = "host='localhost' dbname='cs585' user='cs585' "
db_conn = psycopg2.connect(conn_string)
cursor = db_conn.cursor()
for a in post_list:
try:
t = time.gmtime(int(a["timestamp"]))
if a["title"] != None:
a["title"] = a["title"][:100]
cursor.execute("insert into post values(%s,%s,%s,%s,%s,%s,%s,%s);",
( a["post_id"],
a["post_link"][:300],
a["blog_name"],
a["type"],
a["content"][:500],
psycopg2.Timestamp(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec),
a["note_count"],
a["title"],
)
)
db_conn.commit()
except Exception as e:
print ("DB Fail - " , str(e))
db_conn.rollback()
pass
if "tags" in a:
for b in a["tags"]:
try:
cursor.execute("insert into tag values(%s,%s);",(b,a["post_id"]))
db_conn.commit()
except Exception as e:
db_conn.rollback()
pass
db_conn.commit()
cursor.close()
db_conn.close()
send_data = { "worked":True,
"request_type":"save_blogs",
}
except Exception as e:
print ("WOW: " + str(e))
send_data = { "worked":False,
"request_type":"save_blogs",
}
pass
elif json_data["request_type"] == "save_notes":
# get the blogs and the links from the request
try:
insert_values = []
note_list = json_data["notes"]
#now build the db stuff and insert into the db
conn_string = "host='localhost' dbname='cs585' user='cs585' "
db_conn = psycopg2.connect(conn_string)
cursor = db_conn.cursor()
for a in note_list:
try:
t = time.gmtime(int(a["timestamp"]))
cursor.execute("insert into note values(%s,%s,%s,%s);",
( a["post_id"],
a["type"],
psycopg2.Timestamp(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec),
a["blog_name"]
)
)
db_conn.commit()
except Exception as e:
print ("sdfsdfdsf" , str(e))
db_conn.rollback()
pass
db_conn.commit()
cursor.close()
db_conn.close()
send_data = { "worked":True,
"request_type":"save_blogs",
}
except Exception as e:
print ("WOW: " + str(e))
send_data = { "worked":False,
"request_type":"save_blogs",
}
pass
#make sure we catch all shitty requests
else:
#build the json for the return string
send_data = { "worked":True,
"request_type":"NOT RECOGNIZED",
}
#send the message
conn.send(str.encode(json.dumps(send_data)))
conn.shutdown(socket.SHUT_WR)
#catch all thread exceptions so that we know what happened
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print ("Error in Thread " + str(thread_number) + ": " + str(e) + " on line " + str(exc_tb.tb_lineno))
#make sure the connection closed
finally:
if conn != None:
conn.close()
# this function checks for open sockets within a range
# if the socket it tried didnt match, it just exits with false
def get_open_socket():
s = None
#watch for socket exceptions
try:
#check to see if a random port is open, and return it if it is
try_port = random.randint(sockets_min_range,sockets_max_range)
print ("Trying Port " + str(try_port))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((common_host,try_port))
return True,try_port
#catch any socket exceptions
except Exception as e:
print (e)
return False,None
#make sure the socket was closed so that we can use it later
finally:
if s != None:
s.close()
s = None
# Build the main Socket
def main_socket_get():
for port in main_thread_port_list:
success = True
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((common_host,port))
except Exception as e:
print("Could Not Link To Socket " + str(port))
success = False
pass
if success:
print("Linked To Socket " + str(port))
return True,port, s
return False, None, None
# This is the main function It handles the main thread
# This function Will hold the main socket that recieves socket requests
# Then it will return the opened socket thread to the client
# if the worker never recieves a connection, it just closes silently
# this allows us to operate under the assumption that the threads
# will not kill the program, and that the only code that can kill the program
# is the main function
def main(socket_num):
#make sure we know what the exception means
try:
#document each thread by a number
#this can be removed if we dont need it any more
thread_count = 0
conn = None
# this is the loop that keeps the main socket open
while True:
#open the socket and start looking for a request
return_val,port_num,s = main_socket_get()
if not return_val:
continue
s.listen(1)
conn, addr = s.accept()
#build a bytestring from the incoming message
#and then serialize that
data = bytes([])
while True:
new_data = conn.recv(1024)
if not new_data: break
data += new_data
data = str(data,'UTF-8')
try:
json_data = json.loads(data)
except Exception as e:
print ("Failed to parse JSON")
conn.shutdown(socket.SHUT_WR)
conn.close()
conn = None
s = None
continue
#check to make sure there is a request
if "request_type" in json_data:
#check to make sure it is a socket request
if json_data["request_type"] == "socket_request":
# Look for an open socket until we find one
while True:
#look for an open socket
ret,opened_port = get_open_socket()
# check to make sure that the socket worked correctly
if ret:
#build the socket thread and then start it off
thread_count += 1
t = threading.Thread(target=worker, args = (thread_count,opened_port,))
t.start()
#build our send json
send_data = { "worked":True,
"request_type":"socket_request",
"socket_number":opened_port,
}
# send the json to the API FETCHER
conn.send(str.encode(json.dumps(send_data)))
break
else:
print ("WAT")
# let the API Fetcher know that Its sending stupid shit
else:
send_data = { "worked":False,
"request_type":"NOT RECOGNIZED",
}
conn.send(str.encode(json.dumps(send_data)))
#close the connection and dereference it
conn.shutdown(socket.SHUT_WR)
conn.close()
conn = None
s = None
#make sure we Document a Main Socket Error
except OSError as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print("---------------\nMain Thread Locked Up\n----------------\n")
print(str(e) + " on Line " + str(exc_tb.tb_lineno))
print ("\n----------------\n")
print (str(opened_port))
print ("\n----------------\n")
conn = None
sys.exit()
# Close the Connection at the end of the program
# we want to make sure we do this any time we have a socket
# or socket accessor
finally:
if conn != None:
conn.close()
if __name__ == "__main__":
main(8888)
| |
#!/usr/bin/python
#
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for looking up symbolic debugging information.
The information can include symbol names, offsets, and source locations.
"""
import glob
import itertools
import os
import re
import subprocess
import zipfile
CHROME_SRC = os.path.join(os.path.realpath(os.path.dirname(__file__)),
os.pardir, os.pardir, os.pardir, os.pardir)
ANDROID_BUILD_TOP = CHROME_SRC
SYMBOLS_DIR = CHROME_SRC
CHROME_SYMBOLS_DIR = CHROME_SRC
ARCH = "arm"
TOOLCHAIN_INFO = None
def Uname():
"""'uname' for constructing prebuilt/<...> and out/host/<...> paths."""
uname = os.uname()[0]
if uname == "Darwin":
proc = os.uname()[-1]
if proc == "i386" or proc == "x86_64":
return "darwin-x86"
return "darwin-ppc"
if uname == "Linux":
return "linux-x86"
return uname
def ToolPath(tool, toolchain_info=None):
"""Return a full qualified path to the specified tool"""
# ToolPath looks for the tools in the completely incorrect directory.
# This looks in the checked in android_tools.
if ARCH == "arm":
toolchain_source = "arm-linux-androideabi-4.9"
toolchain_prefix = "arm-linux-androideabi"
ndk = "ndk"
elif ARCH == "arm64":
toolchain_source = "aarch64-linux-android-4.9"
toolchain_prefix = "aarch64-linux-android"
ndk = "ndk"
elif ARCH == "x86":
toolchain_source = "x86-4.9"
toolchain_prefix = "i686-linux-android"
ndk = "ndk"
elif ARCH == "x86_64" or ARCH == "x64":
toolchain_source = "x86_64-4.9"
toolchain_prefix = "x86_64-linux-android"
ndk = "ndk"
elif ARCH == "mips":
toolchain_source = "mipsel-linux-android-4.9"
toolchain_prefix = "mipsel-linux-android"
ndk = "ndk"
else:
raise Exception("Could not find tool chain")
toolchain_subdir = (
"third_party/android_tools/%s/toolchains/%s/prebuilt/linux-x86_64/bin" %
(ndk, toolchain_source))
return os.path.join(CHROME_SRC,
toolchain_subdir,
toolchain_prefix + "-" + tool)
def FindToolchain():
"""Look for the latest available toolchain
Args:
None
Returns:
A pair of strings containing toolchain label and target prefix.
"""
global TOOLCHAIN_INFO
if TOOLCHAIN_INFO is not None:
return TOOLCHAIN_INFO
## Known toolchains, newer ones in the front.
gcc_version = "4.9"
if ARCH == "arm64":
known_toolchains = [
("aarch64-linux-android-" + gcc_version, "aarch64", "aarch64-linux-android")
]
elif ARCH == "arm":
known_toolchains = [
("arm-linux-androideabi-" + gcc_version, "arm", "arm-linux-androideabi")
]
elif ARCH =="x86":
known_toolchains = [
("x86-" + gcc_version, "x86", "i686-linux-android")
]
elif ARCH =="x86_64" or ARCH =="x64":
known_toolchains = [
("x86_64-" + gcc_version, "x86_64", "x86_64-linux-android")
]
elif ARCH == "mips":
known_toolchains = [
("mipsel-linux-android-" + gcc_version, "mips", "mipsel-linux-android")
]
else:
known_toolchains = []
# Look for addr2line to check for valid toolchain path.
for (label, platform, target) in known_toolchains:
toolchain_info = (label, platform, target);
if os.path.exists(ToolPath("addr2line", toolchain_info)):
TOOLCHAIN_INFO = toolchain_info
print "Using toolchain from :" + ToolPath("", TOOLCHAIN_INFO)
return toolchain_info
raise Exception("Could not find tool chain")
def GetAapt():
"""Returns the path to aapt.
Args:
None
Returns:
the pathname of the 'aapt' executable.
"""
sdk_home = os.path.join('third_party', 'android_tools', 'sdk')
sdk_home = os.environ.get('SDK_HOME', sdk_home)
aapt_exe = glob.glob(os.path.join(sdk_home, 'build-tools', '*', 'aapt'))
if not aapt_exe:
return None
return sorted(aapt_exe, key=os.path.getmtime, reverse=True)[0]
def ApkMatchPackageName(aapt, apk_path, package_name):
"""Returns true the APK's package name matches package_name.
Args:
aapt: pathname for the 'aapt' executable.
apk_path: pathname of the APK file.
package_name: package name to match.
Returns:
True if the package name matches or aapt is None, False otherwise.
"""
if not aapt:
# Allow false positives
return True
aapt_output = subprocess.check_output(
[aapt, 'dump', 'badging', apk_path]).split('\n')
package_name_re = re.compile(r'package: .*name=\'(\S*)\'')
for line in aapt_output:
match = package_name_re.match(line)
if match:
return package_name == match.group(1)
return False
def PathListJoin(prefix_list, suffix_list):
"""Returns each prefix in prefix_list joined with each suffix in suffix list.
Args:
prefix_list: list of path prefixes.
suffix_list: list of path suffixes.
Returns:
List of paths each of which joins a prefix with a suffix.
"""
return [
os.path.join(prefix, suffix)
for prefix in prefix_list for suffix in suffix_list ]
def GetCandidates(dirs, filepart, candidate_fun):
"""Returns a list of candidate filenames.
Args:
dirs: a list of the directory part of the pathname.
filepart: the file part of the pathname.
candidate_fun: a function to apply to each candidate, returns a list.
Returns:
A list of candidate files ordered by modification time, newest first.
"""
out_dir = os.environ.get('CHROMIUM_OUT_DIR', 'out')
out_dir = os.path.join(CHROME_SYMBOLS_DIR, out_dir)
buildtype = os.environ.get('BUILDTYPE')
if buildtype:
buildtype_list = [ buildtype ]
else:
buildtype_list = [ 'Debug', 'Release' ]
candidates = PathListJoin([out_dir], buildtype_list) + [CHROME_SYMBOLS_DIR]
candidates = PathListJoin(candidates, dirs)
candidates = PathListJoin(candidates, [filepart])
candidates = list(
itertools.chain.from_iterable(map(candidate_fun, candidates)))
candidates = sorted(candidates, key=os.path.getmtime, reverse=True)
return candidates
def GetCandidateApks():
"""Returns a list of APKs which could contain the library.
Args:
None
Returns:
list of APK filename which could contain the library.
"""
return GetCandidates(['apks'], '*.apk', glob.glob)
def GetCrazyLib(apk_filename):
"""Returns the name of the first crazy library from this APK.
Args:
apk_filename: name of an APK file.
Returns:
Name of the first library which would be crazy loaded from this APK.
"""
zip_file = zipfile.ZipFile(apk_filename, 'r')
for filename in zip_file.namelist():
match = re.match('lib/[^/]*/crazy.(lib.*[.]so)', filename)
if match:
return match.group(1)
def GetMatchingApks(device_apk_name):
"""Find any APKs which match the package indicated by the device_apk_name.
Args:
device_apk_name: name of the APK on the device.
Returns:
A list of APK filenames which could contain the desired library.
"""
match = re.match('(.*)-[0-9]+[.]apk$', device_apk_name)
if not match:
return None
package_name = match.group(1)
return filter(
lambda candidate_apk:
ApkMatchPackageName(GetAapt(), candidate_apk, package_name),
GetCandidateApks())
def MapDeviceApkToLibrary(device_apk_name):
"""Provide a library name which corresponds with device_apk_name.
Args:
device_apk_name: name of the APK on the device.
Returns:
Name of the library which corresponds to that APK.
"""
matching_apks = GetMatchingApks(device_apk_name)
for matching_apk in matching_apks:
crazy_lib = GetCrazyLib(matching_apk)
if crazy_lib:
return crazy_lib
def GetCandidateLibraries(library_name):
"""Returns a list of candidate library filenames.
Args:
library_name: basename of the library to match.
Returns:
A list of matching library filenames for library_name.
"""
return GetCandidates(
['lib', 'lib.target'], library_name,
lambda filename: filter(os.path.exists, [filename]))
def TranslateLibPath(lib):
# SymbolInformation(lib, addr) receives lib as the path from symbols
# root to the symbols file. This needs to be translated to point to the
# correct .so path. If the user doesn't explicitly specify which directory to
# use, then use the most recently updated one in one of the known directories.
# If the .so is not found somewhere in CHROME_SYMBOLS_DIR, leave it
# untranslated in case it is an Android symbol in SYMBOLS_DIR.
library_name = os.path.basename(lib)
# The filename in the stack trace maybe an APK name rather than a library
# name. This happens when the library was loaded directly from inside the
# APK. If this is the case we try to figure out the library name by looking
# for a matching APK file and finding the name of the library in contains.
# The name of the APK file on the device is of the form
# <package_name>-<number>.apk. The APK file on the host may have any name
# so we look at the APK badging to see if the package name matches.
if re.search('-[0-9]+[.]apk$', library_name):
mapping = MapDeviceApkToLibrary(library_name)
if mapping:
library_name = mapping
candidate_libraries = GetCandidateLibraries(library_name)
if not candidate_libraries:
return lib
library_path = os.path.relpath(candidate_libraries[0], SYMBOLS_DIR)
return '/' + library_path
def SymbolInformation(lib, addr, get_detailed_info):
"""Look up symbol information about an address.
Args:
lib: library (or executable) pathname containing symbols
addr: string hexidecimal address
Returns:
A list of the form [(source_symbol, source_location,
object_symbol_with_offset)].
If the function has been inlined then the list may contain
more than one element with the symbols for the most deeply
nested inlined location appearing first. The list is
always non-empty, even if no information is available.
Usually you want to display the source_location and
object_symbol_with_offset from the last element in the list.
"""
lib = TranslateLibPath(lib)
info = SymbolInformationForSet(lib, set([addr]), get_detailed_info)
return (info and info.get(addr)) or [(None, None, None)]
def SymbolInformationForSet(lib, unique_addrs, get_detailed_info):
"""Look up symbol information for a set of addresses from the given library.
Args:
lib: library (or executable) pathname containing symbols
unique_addrs: set of hexidecimal addresses
Returns:
A dictionary of the form {addr: [(source_symbol, source_location,
object_symbol_with_offset)]} where each address has a list of
associated symbols and locations. The list is always non-empty.
If the function has been inlined then the list may contain
more than one element with the symbols for the most deeply
nested inlined location appearing first. The list is
always non-empty, even if no information is available.
Usually you want to display the source_location and
object_symbol_with_offset from the last element in the list.
"""
if not lib:
return None
addr_to_line = CallAddr2LineForSet(lib, unique_addrs)
if not addr_to_line:
return None
if get_detailed_info:
addr_to_objdump = CallObjdumpForSet(lib, unique_addrs)
if not addr_to_objdump:
return None
else:
addr_to_objdump = dict((addr, ("", 0)) for addr in unique_addrs)
result = {}
for addr in unique_addrs:
source_info = addr_to_line.get(addr)
if not source_info:
source_info = [(None, None)]
if addr in addr_to_objdump:
(object_symbol, object_offset) = addr_to_objdump.get(addr)
object_symbol_with_offset = FormatSymbolWithOffset(object_symbol,
object_offset)
else:
object_symbol_with_offset = None
result[addr] = [(source_symbol, source_location, object_symbol_with_offset)
for (source_symbol, source_location) in source_info]
return result
class MemoizedForSet(object):
def __init__(self, fn):
self.fn = fn
self.cache = {}
def __call__(self, lib, unique_addrs):
lib_cache = self.cache.setdefault(lib, {})
no_cache = filter(lambda x: x not in lib_cache, unique_addrs)
if no_cache:
lib_cache.update((k, None) for k in no_cache)
result = self.fn(lib, no_cache)
if result:
lib_cache.update(result)
return dict((k, lib_cache[k]) for k in unique_addrs if lib_cache[k])
@MemoizedForSet
def CallAddr2LineForSet(lib, unique_addrs):
"""Look up line and symbol information for a set of addresses.
Args:
lib: library (or executable) pathname containing symbols
unique_addrs: set of string hexidecimal addresses look up.
Returns:
A dictionary of the form {addr: [(symbol, file:line)]} where
each address has a list of associated symbols and locations
or an empty list if no symbol information was found.
If the function has been inlined then the list may contain
more than one element with the symbols for the most deeply
nested inlined location appearing first.
"""
if not lib:
return None
symbols = SYMBOLS_DIR + lib
if not os.path.isfile(symbols):
return None
(label, platform, target) = FindToolchain()
cmd = [ToolPath("addr2line"), "--functions", "--inlines",
"--demangle", "--exe=" + symbols]
child = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
result = {}
addrs = sorted(unique_addrs)
for addr in addrs:
child.stdin.write("0x%s\n" % addr)
child.stdin.flush()
records = []
first = True
while True:
symbol = child.stdout.readline().strip()
if symbol == "??":
symbol = None
location = child.stdout.readline().strip()
if location == "??:0":
location = None
if symbol is None and location is None:
break
records.append((symbol, location))
if first:
# Write a blank line as a sentinel so we know when to stop
# reading inlines from the output.
# The blank line will cause addr2line to emit "??\n??:0\n".
child.stdin.write("\n")
first = False
result[addr] = records
child.stdin.close()
child.stdout.close()
return result
def StripPC(addr):
"""Strips the Thumb bit a program counter address when appropriate.
Args:
addr: the program counter address
Returns:
The stripped program counter address.
"""
global ARCH
if ARCH == "arm":
return addr & ~1
return addr
@MemoizedForSet
def CallObjdumpForSet(lib, unique_addrs):
"""Use objdump to find out the names of the containing functions.
Args:
lib: library (or executable) pathname containing symbols
unique_addrs: set of string hexidecimal addresses to find the functions for.
Returns:
A dictionary of the form {addr: (string symbol, offset)}.
"""
if not lib:
return None
symbols = SYMBOLS_DIR + lib
if not os.path.exists(symbols):
return None
symbols = SYMBOLS_DIR + lib
if not os.path.exists(symbols):
return None
result = {}
# Function lines look like:
# 000177b0 <android::IBinder::~IBinder()+0x2c>:
# We pull out the address and function first. Then we check for an optional
# offset. This is tricky due to functions that look like "operator+(..)+0x2c"
func_regexp = re.compile("(^[a-f0-9]*) \<(.*)\>:$")
offset_regexp = re.compile("(.*)\+0x([a-f0-9]*)")
# A disassembly line looks like:
# 177b2: b510 push {r4, lr}
asm_regexp = re.compile("(^[ a-f0-9]*):[ a-f0-0]*.*$")
for target_addr in unique_addrs:
start_addr_dec = str(StripPC(int(target_addr, 16)))
stop_addr_dec = str(StripPC(int(target_addr, 16)) + 8)
cmd = [ToolPath("objdump"),
"--section=.text",
"--demangle",
"--disassemble",
"--start-address=" + start_addr_dec,
"--stop-address=" + stop_addr_dec,
symbols]
current_symbol = None # The current function symbol in the disassembly.
current_symbol_addr = 0 # The address of the current function.
stream = subprocess.Popen(cmd, stdout=subprocess.PIPE).stdout
for line in stream:
# Is it a function line like:
# 000177b0 <android::IBinder::~IBinder()>:
components = func_regexp.match(line)
if components:
# This is a new function, so record the current function and its address.
current_symbol_addr = int(components.group(1), 16)
current_symbol = components.group(2)
# Does it have an optional offset like: "foo(..)+0x2c"?
components = offset_regexp.match(current_symbol)
if components:
current_symbol = components.group(1)
offset = components.group(2)
if offset:
current_symbol_addr -= int(offset, 16)
# Is it an disassembly line like:
# 177b2: b510 push {r4, lr}
components = asm_regexp.match(line)
if components:
addr = components.group(1)
i_addr = int(addr, 16)
i_target = StripPC(int(target_addr, 16))
if i_addr == i_target:
result[target_addr] = (current_symbol, i_target - current_symbol_addr)
stream.close()
return result
def CallCppFilt(mangled_symbol):
cmd = [ToolPath("c++filt")]
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
process.stdin.write(mangled_symbol)
process.stdin.write("\n")
process.stdin.close()
demangled_symbol = process.stdout.readline().strip()
process.stdout.close()
return demangled_symbol
def FormatSymbolWithOffset(symbol, offset):
if offset == 0:
return symbol
return "%s+%d" % (symbol, offset)
| |
"""Blocking and non-blocking HTTP client interfaces.
This module defines a common interface shared by two implementations,
``simple_httpclient`` and ``curl_httpclient``. Applications may either
instantiate their chosen implementation class directly or use the
`AsyncHTTPClient` class from this module, which selects an implementation
that can be overridden with the `AsyncHTTPClient.configure` method.
The default implementation is ``simple_httpclient``, and this is expected
to be suitable for most users' needs. However, some applications may wish
to switch to ``curl_httpclient`` for reasons such as the following:
* ``curl_httpclient`` has some features not found in ``simple_httpclient``,
including support for HTTP proxies and the ability to use a specified
network interface.
* ``curl_httpclient`` is more likely to be compatible with sites that are
not-quite-compliant with the HTTP spec, or sites that use little-exercised
features of HTTP.
* ``curl_httpclient`` is faster.
Note that if you are using ``curl_httpclient``, it is highly
recommended that you use a recent version of ``libcurl`` and
``pycurl``. Currently the minimum supported version of libcurl is
7.22.0, and the minimum version of pycurl is 7.18.2. It is highly
recommended that your ``libcurl`` installation is built with
asynchronous DNS resolver (threaded or c-ares), otherwise you may
encounter various problems with request timeouts (for more
information, see
http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS
and comments in curl_httpclient.py).
To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
import datetime
import functools
from io import BytesIO
import ssl
import time
import weakref
from tornado.concurrent import (
Future,
future_set_result_unless_cancelled,
future_set_exception_unless_cancelled,
)
from tornado.escape import utf8, native_str
from tornado import gen, httputil
from tornado.ioloop import IOLoop
from tornado.util import Configurable
from typing import Type, Any, Union, Dict, Callable, Optional, cast
class HTTPClient(object):
"""A blocking HTTP client.
This interface is provided to make it easier to share code between
synchronous and asynchronous applications. Applications that are
running an `.IOLoop` must use `AsyncHTTPClient` instead.
Typical usage looks like this::
http_client = httpclient.HTTPClient()
try:
response = http_client.fetch("http://www.google.com/")
print(response.body)
except httpclient.HTTPError as e:
# HTTPError is raised for non-200 responses; the response
# can be found in e.response.
print("Error: " + str(e))
except Exception as e:
# Other errors are possible, such as IOError.
print("Error: " + str(e))
http_client.close()
.. versionchanged:: 5.0
Due to limitations in `asyncio`, it is no longer possible to
use the synchronous ``HTTPClient`` while an `.IOLoop` is running.
Use `AsyncHTTPClient` instead.
"""
def __init__(
self,
async_client_class: "Optional[Type[AsyncHTTPClient]]" = None,
**kwargs: Any
) -> None:
# Initialize self._closed at the beginning of the constructor
# so that an exception raised here doesn't lead to confusing
# failures in __del__.
self._closed = True
self._io_loop = IOLoop(make_current=False)
if async_client_class is None:
async_client_class = AsyncHTTPClient
# Create the client while our IOLoop is "current", without
# clobbering the thread's real current IOLoop (if any).
async def make_client() -> "AsyncHTTPClient":
await gen.sleep(0)
assert async_client_class is not None
return async_client_class(**kwargs)
self._async_client = self._io_loop.run_sync(make_client)
self._closed = False
def __del__(self) -> None:
self.close()
def close(self) -> None:
"""Closes the HTTPClient, freeing any resources used."""
if not self._closed:
self._async_client.close()
self._io_loop.close()
self._closed = True
def fetch(
self, request: Union["HTTPRequest", str], **kwargs: Any
) -> "HTTPResponse":
"""Executes a request, returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
If an error occurs during the fetch, we raise an `HTTPError` unless
the ``raise_error`` keyword argument is set to False.
"""
response = self._io_loop.run_sync(
functools.partial(self._async_client.fetch, request, **kwargs)
)
return response
class AsyncHTTPClient(Configurable):
"""An non-blocking HTTP client.
Example usage::
async def f():
http_client = AsyncHTTPClient()
try:
response = await http_client.fetch("http://www.google.com")
except Exception as e:
print("Error: %s" % e)
else:
print(response.body)
The constructor for this class is magic in several respects: It
actually creates an instance of an implementation-specific
subclass, and instances are reused as a kind of pseudo-singleton
(one per `.IOLoop`). The keyword argument ``force_instance=True``
can be used to suppress this singleton behavior. Unless
``force_instance=True`` is used, no arguments should be passed to
the `AsyncHTTPClient` constructor. The implementation subclass as
well as arguments to its constructor can be set with the static
method `configure()`
All `AsyncHTTPClient` implementations support a ``defaults``
keyword argument, which can be used to set default values for
`HTTPRequest` attributes. For example::
AsyncHTTPClient.configure(
None, defaults=dict(user_agent="MyUserAgent"))
# or with force_instance:
client = AsyncHTTPClient(force_instance=True,
defaults=dict(user_agent="MyUserAgent"))
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
"""
_instance_cache = None # type: Dict[IOLoop, AsyncHTTPClient]
@classmethod
def configurable_base(cls) -> Type[Configurable]:
return AsyncHTTPClient
@classmethod
def configurable_default(cls) -> Type[Configurable]:
from tornado.simple_httpclient import SimpleAsyncHTTPClient
return SimpleAsyncHTTPClient
@classmethod
def _async_clients(cls) -> Dict[IOLoop, "AsyncHTTPClient"]:
attr_name = "_async_client_dict_" + cls.__name__
if not hasattr(cls, attr_name):
setattr(cls, attr_name, weakref.WeakKeyDictionary())
return getattr(cls, attr_name)
def __new__(cls, force_instance: bool = False, **kwargs: Any) -> "AsyncHTTPClient":
io_loop = IOLoop.current()
if force_instance:
instance_cache = None
else:
instance_cache = cls._async_clients()
if instance_cache is not None and io_loop in instance_cache:
return instance_cache[io_loop]
instance = super(AsyncHTTPClient, cls).__new__(cls, **kwargs) # type: ignore
# Make sure the instance knows which cache to remove itself from.
# It can't simply call _async_clients() because we may be in
# __new__(AsyncHTTPClient) but instance.__class__ may be
# SimpleAsyncHTTPClient.
instance._instance_cache = instance_cache
if instance_cache is not None:
instance_cache[instance.io_loop] = instance
return instance
def initialize(self, defaults: Optional[Dict[str, Any]] = None) -> None:
self.io_loop = IOLoop.current()
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
self._closed = False
def close(self) -> None:
"""Destroys this HTTP client, freeing any file descriptors used.
This method is **not needed in normal use** due to the way
that `AsyncHTTPClient` objects are transparently reused.
``close()`` is generally only necessary when either the
`.IOLoop` is also being closed, or the ``force_instance=True``
argument was used when creating the `AsyncHTTPClient`.
No other methods may be called on the `AsyncHTTPClient` after
``close()``.
"""
if self._closed:
return
self._closed = True
if self._instance_cache is not None:
cached_val = self._instance_cache.pop(self.io_loop, None)
# If there's an object other than self in the instance
# cache for our IOLoop, something has gotten mixed up. A
# value of None appears to be possible when this is called
# from a destructor (HTTPClient.__del__) as the weakref
# gets cleared before the destructor runs.
if cached_val is not None and cached_val is not self:
raise RuntimeError("inconsistent AsyncHTTPClient cache")
def fetch(
self,
request: Union[str, "HTTPRequest"],
raise_error: bool = True,
**kwargs: Any
) -> "Future[HTTPResponse]":
"""Executes a request, asynchronously returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
This method returns a `.Future` whose result is an
`HTTPResponse`. By default, the ``Future`` will raise an
`HTTPError` if the request returned a non-200 response code
(other errors may also be raised if the server could not be
contacted). Instead, if ``raise_error`` is set to False, the
response will always be returned regardless of the response
code.
If a ``callback`` is given, it will be invoked with the `HTTPResponse`.
In the callback interface, `HTTPError` is not automatically raised.
Instead, you must check the response's ``error`` attribute or
call its `~HTTPResponse.rethrow` method.
.. versionchanged:: 6.0
The ``callback`` argument was removed. Use the returned
`.Future` instead.
The ``raise_error=False`` argument only affects the
`HTTPError` raised when a non-200 response code is used,
instead of suppressing all errors.
"""
if self._closed:
raise RuntimeError("fetch() called on closed AsyncHTTPClient")
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
else:
if kwargs:
raise ValueError(
"kwargs can't be used if request is an HTTPRequest object"
)
# We may modify this (to add Host, Accept-Encoding, etc),
# so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects.
request.headers = httputil.HTTPHeaders(request.headers)
request_proxy = _RequestProxy(request, self.defaults)
future = Future() # type: Future[HTTPResponse]
def handle_response(response: "HTTPResponse") -> None:
if response.error:
if raise_error or not response._error_is_response_code:
future_set_exception_unless_cancelled(future, response.error)
return
future_set_result_unless_cancelled(future, response)
self.fetch_impl(cast(HTTPRequest, request_proxy), handle_response)
return future
def fetch_impl(
self, request: "HTTPRequest", callback: Callable[["HTTPResponse"], None]
) -> None:
raise NotImplementedError()
@classmethod
def configure(
cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any
) -> None:
"""Configures the `AsyncHTTPClient` subclass to use.
``AsyncHTTPClient()`` actually creates an instance of a subclass.
This method may be called with either a class object or the
fully-qualified name of such a class (or ``None`` to use the default,
``SimpleAsyncHTTPClient``)
If additional keyword arguments are given, they will be passed
to the constructor of each subclass instance created. The
keyword argument ``max_clients`` determines the maximum number
of simultaneous `~AsyncHTTPClient.fetch()` operations that can
execute in parallel on each `.IOLoop`. Additional arguments
may be supported depending on the implementation class in use.
Example::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
super(AsyncHTTPClient, cls).configure(impl, **kwargs)
class HTTPRequest(object):
"""HTTP client request object."""
_headers = None # type: Union[Dict[str, str], httputil.HTTPHeaders]
# Default values for HTTPRequest parameters.
# Merged with the values on the request object by AsyncHTTPClient
# implementations.
_DEFAULTS = dict(
connect_timeout=20.0,
request_timeout=20.0,
follow_redirects=True,
max_redirects=5,
decompress_response=True,
proxy_password="",
allow_nonstandard_methods=False,
validate_cert=True,
)
def __init__(
self,
url: str,
method: str = "GET",
headers: Optional[Union[Dict[str, str], httputil.HTTPHeaders]] = None,
body: Optional[Union[bytes, str]] = None,
auth_username: Optional[str] = None,
auth_password: Optional[str] = None,
auth_mode: Optional[str] = None,
connect_timeout: Optional[float] = None,
request_timeout: Optional[float] = None,
if_modified_since: Optional[Union[float, datetime.datetime]] = None,
follow_redirects: Optional[bool] = None,
max_redirects: Optional[int] = None,
user_agent: Optional[str] = None,
use_gzip: Optional[bool] = None,
network_interface: Optional[str] = None,
streaming_callback: Optional[Callable[[bytes], None]] = None,
header_callback: Optional[Callable[[str], None]] = None,
prepare_curl_callback: Optional[Callable[[Any], None]] = None,
proxy_host: Optional[str] = None,
proxy_port: Optional[int] = None,
proxy_username: Optional[str] = None,
proxy_password: Optional[str] = None,
proxy_auth_mode: Optional[str] = None,
allow_nonstandard_methods: Optional[bool] = None,
validate_cert: Optional[bool] = None,
ca_certs: Optional[str] = None,
allow_ipv6: Optional[bool] = None,
client_key: Optional[str] = None,
client_cert: Optional[str] = None,
body_producer: Optional[
Callable[[Callable[[bytes], None]], "Future[None]"]
] = None,
expect_100_continue: bool = False,
decompress_response: Optional[bool] = None,
ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
) -> None:
r"""All parameters except ``url`` are optional.
:arg str url: URL to fetch
:arg str method: HTTP method, e.g. "GET" or "POST"
:arg headers: Additional HTTP headers to pass on the request
:type headers: `~tornado.httputil.HTTPHeaders` or `dict`
:arg body: HTTP request body as a string (byte or unicode; if unicode
the utf-8 encoding will be used)
:type body: `str` or `bytes`
:arg collections.abc.Callable body_producer: Callable used for
lazy/asynchronous request bodies.
It is called with one argument, a ``write`` function, and should
return a `.Future`. It should call the write function with new
data as it becomes available. The write function returns a
`.Future` which can be used for flow control.
Only one of ``body`` and ``body_producer`` may
be specified. ``body_producer`` is not supported on
``curl_httpclient``. When using ``body_producer`` it is recommended
to pass a ``Content-Length`` in the headers as otherwise chunked
encoding will be used, and many servers do not support chunked
encoding on requests. New in Tornado 4.0
:arg str auth_username: Username for HTTP authentication
:arg str auth_password: Password for HTTP authentication
:arg str auth_mode: Authentication mode; default is "basic".
Allowed values are implementation-defined; ``curl_httpclient``
supports "basic" and "digest"; ``simple_httpclient`` only supports
"basic"
:arg float connect_timeout: Timeout for initial connection in seconds,
default 20 seconds (0 means no timeout)
:arg float request_timeout: Timeout for entire request in seconds,
default 20 seconds (0 means no timeout)
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
:type if_modified_since: `datetime` or `float`
:arg bool follow_redirects: Should redirects be followed automatically
or return the 3xx response? Default True.
:arg int max_redirects: Limit for ``follow_redirects``, default 5.
:arg str user_agent: String to send as ``User-Agent`` header
:arg bool decompress_response: Request a compressed response from
the server and decompress it after downloading. Default is True.
New in Tornado 4.0.
:arg bool use_gzip: Deprecated alias for ``decompress_response``
since Tornado 4.0.
:arg str network_interface: Network interface or source IP to use for request.
See ``curl_httpclient`` note below.
:arg collections.abc.Callable streaming_callback: If set, ``streaming_callback`` will
be run with each chunk of data as it is received, and
``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in
the final response.
:arg collections.abc.Callable header_callback: If set, ``header_callback`` will
be run with each header line as it is received (including the
first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line
containing only ``\r\n``. All lines include the trailing newline
characters). ``HTTPResponse.headers`` will be empty in the final
response. This is most useful in conjunction with
``streaming_callback``, because it's the only way to get access to
header data while the request is in progress.
:arg collections.abc.Callable prepare_curl_callback: If set, will be called with
a ``pycurl.Curl`` object to allow the application to make additional
``setopt`` calls.
:arg str proxy_host: HTTP proxy hostname. To use proxies,
``proxy_host`` and ``proxy_port`` must be set; ``proxy_username``,
``proxy_pass`` and ``proxy_auth_mode`` are optional. Proxies are
currently only supported with ``curl_httpclient``.
:arg int proxy_port: HTTP proxy port
:arg str proxy_username: HTTP proxy username
:arg str proxy_password: HTTP proxy password
:arg str proxy_auth_mode: HTTP proxy Authentication mode;
default is "basic". supports "basic" and "digest"
:arg bool allow_nonstandard_methods: Allow unknown values for ``method``
argument? Default is False.
:arg bool validate_cert: For HTTPS requests, validate the server's
certificate? Default is True.
:arg str ca_certs: filename of CA certificates in PEM format,
or None to use defaults. See note below when used with
``curl_httpclient``.
:arg str client_key: Filename for client SSL key, if any. See
note below when used with ``curl_httpclient``.
:arg str client_cert: Filename for client SSL certificate, if any.
See note below when used with ``curl_httpclient``.
:arg ssl.SSLContext ssl_options: `ssl.SSLContext` object for use in
``simple_httpclient`` (unsupported by ``curl_httpclient``).
Overrides ``validate_cert``, ``ca_certs``, ``client_key``,
and ``client_cert``.
:arg bool allow_ipv6: Use IPv6 when available? Default is True.
:arg bool expect_100_continue: If true, send the
``Expect: 100-continue`` header and wait for a continue response
before sending the request body. Only supported with
``simple_httpclient``.
.. note::
When using ``curl_httpclient`` certain options may be
inherited by subsequent fetches because ``pycurl`` does
not allow them to be cleanly reset. This applies to the
``ca_certs``, ``client_key``, ``client_cert``, and
``network_interface`` arguments. If you use these
options, you should pass them on every request (you don't
have to always use the same values, but it's not possible
to mix requests that specify these options with ones that
use the defaults).
.. versionadded:: 3.1
The ``auth_mode`` argument.
.. versionadded:: 4.0
The ``body_producer`` and ``expect_100_continue`` arguments.
.. versionadded:: 4.2
The ``ssl_options`` argument.
.. versionadded:: 4.5
The ``proxy_auth_mode`` argument.
"""
# Note that some of these attributes go through property setters
# defined below.
self.headers = headers # type: ignore
if if_modified_since:
self.headers["If-Modified-Since"] = httputil.format_timestamp(
if_modified_since
)
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_username = proxy_username
self.proxy_password = proxy_password
self.proxy_auth_mode = proxy_auth_mode
self.url = url
self.method = method
self.body = body # type: ignore
self.body_producer = body_producer
self.auth_username = auth_username
self.auth_password = auth_password
self.auth_mode = auth_mode
self.connect_timeout = connect_timeout
self.request_timeout = request_timeout
self.follow_redirects = follow_redirects
self.max_redirects = max_redirects
self.user_agent = user_agent
if decompress_response is not None:
self.decompress_response = decompress_response # type: Optional[bool]
else:
self.decompress_response = use_gzip
self.network_interface = network_interface
self.streaming_callback = streaming_callback
self.header_callback = header_callback
self.prepare_curl_callback = prepare_curl_callback
self.allow_nonstandard_methods = allow_nonstandard_methods
self.validate_cert = validate_cert
self.ca_certs = ca_certs
self.allow_ipv6 = allow_ipv6
self.client_key = client_key
self.client_cert = client_cert
self.ssl_options = ssl_options
self.expect_100_continue = expect_100_continue
self.start_time = time.time()
@property
def headers(self) -> httputil.HTTPHeaders:
# TODO: headers may actually be a plain dict until fairly late in
# the process (AsyncHTTPClient.fetch), but practically speaking,
# whenever the property is used they're already HTTPHeaders.
return self._headers # type: ignore
@headers.setter
def headers(self, value: Union[Dict[str, str], httputil.HTTPHeaders]) -> None:
if value is None:
self._headers = httputil.HTTPHeaders()
else:
self._headers = value # type: ignore
@property
def body(self) -> bytes:
return self._body
@body.setter
def body(self, value: Union[bytes, str]) -> None:
self._body = utf8(value)
class HTTPResponse(object):
"""HTTP Response object.
Attributes:
* ``request``: HTTPRequest object
* ``code``: numeric HTTP status code, e.g. 200 or 404
* ``reason``: human-readable reason phrase describing the status code
* ``headers``: `tornado.httputil.HTTPHeaders` object
* ``effective_url``: final location of the resource after following any
redirects
* ``buffer``: ``cStringIO`` object for response body
* ``body``: response body as bytes (created on demand from ``self.buffer``)
* ``error``: Exception object, if any
* ``request_time``: seconds from request start to finish. Includes all
network operations from DNS resolution to receiving the last byte of
data. Does not include time spent in the queue (due to the
``max_clients`` option). If redirects were followed, only includes
the final request.
* ``start_time``: Time at which the HTTP operation started, based on
`time.time` (not the monotonic clock used by `.IOLoop.time`). May
be ``None`` if the request timed out while in the queue.
* ``time_info``: dictionary of diagnostic timing information from the
request. Available data are subject to change, but currently uses timings
available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html,
plus ``queue``, which is the delay (if any) introduced by waiting for
a slot under `AsyncHTTPClient`'s ``max_clients`` setting.
.. versionadded:: 5.1
Added the ``start_time`` attribute.
.. versionchanged:: 5.1
The ``request_time`` attribute previously included time spent in the queue
for ``simple_httpclient``, but not in ``curl_httpclient``. Now queueing time
is excluded in both implementations. ``request_time`` is now more accurate for
``curl_httpclient`` because it uses a monotonic clock when available.
"""
# I'm not sure why these don't get type-inferred from the references in __init__.
error = None # type: Optional[BaseException]
_error_is_response_code = False
request = None # type: HTTPRequest
def __init__(
self,
request: HTTPRequest,
code: int,
headers: Optional[httputil.HTTPHeaders] = None,
buffer: Optional[BytesIO] = None,
effective_url: Optional[str] = None,
error: Optional[BaseException] = None,
request_time: Optional[float] = None,
time_info: Optional[Dict[str, float]] = None,
reason: Optional[str] = None,
start_time: Optional[float] = None,
) -> None:
if isinstance(request, _RequestProxy):
self.request = request.request
else:
self.request = request
self.code = code
self.reason = reason or httputil.responses.get(code, "Unknown")
if headers is not None:
self.headers = headers
else:
self.headers = httputil.HTTPHeaders()
self.buffer = buffer
self._body = None # type: Optional[bytes]
if effective_url is None:
self.effective_url = request.url
else:
self.effective_url = effective_url
self._error_is_response_code = False
if error is None:
if self.code < 200 or self.code >= 300:
self._error_is_response_code = True
self.error = HTTPError(self.code, message=self.reason, response=self)
else:
self.error = None
else:
self.error = error
self.start_time = start_time
self.request_time = request_time
self.time_info = time_info or {}
@property
def body(self) -> bytes:
if self.buffer is None:
return b""
elif self._body is None:
self._body = self.buffer.getvalue()
return self._body
def rethrow(self) -> None:
"""If there was an error on the request, raise an `HTTPError`."""
if self.error:
raise self.error
def __repr__(self) -> str:
args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items()))
return "%s(%s)" % (self.__class__.__name__, args)
class HTTPClientError(Exception):
"""Exception thrown for an unsuccessful HTTP request.
Attributes:
* ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is
used when no HTTP response was received, e.g. for a timeout.
* ``response`` - `HTTPResponse` object, if any.
Note that if ``follow_redirects`` is False, redirects become HTTPErrors,
and you can look at ``error.response.headers['Location']`` to see the
destination of the redirect.
.. versionchanged:: 5.1
Renamed from ``HTTPError`` to ``HTTPClientError`` to avoid collisions with
`tornado.web.HTTPError`. The name ``tornado.httpclient.HTTPError`` remains
as an alias.
"""
def __init__(
self,
code: int,
message: Optional[str] = None,
response: Optional[HTTPResponse] = None,
) -> None:
self.code = code
self.message = message or httputil.responses.get(code, "Unknown")
self.response = response
super().__init__(code, message, response)
def __str__(self) -> str:
return "HTTP %d: %s" % (self.code, self.message)
# There is a cyclic reference between self and self.response,
# which breaks the default __repr__ implementation.
# (especially on pypy, which doesn't have the same recursion
# detection as cpython).
__repr__ = __str__
HTTPError = HTTPClientError
class _RequestProxy(object):
"""Combines an object with a dictionary of defaults.
Used internally by AsyncHTTPClient implementations.
"""
def __init__(
self, request: HTTPRequest, defaults: Optional[Dict[str, Any]]
) -> None:
self.request = request
self.defaults = defaults
def __getattr__(self, name: str) -> Any:
request_attr = getattr(self.request, name)
if request_attr is not None:
return request_attr
elif self.defaults is not None:
return self.defaults.get(name, None)
else:
return None
def main() -> None:
from tornado.options import define, options, parse_command_line
define("print_headers", type=bool, default=False)
define("print_body", type=bool, default=True)
define("follow_redirects", type=bool, default=True)
define("validate_cert", type=bool, default=True)
define("proxy_host", type=str)
define("proxy_port", type=int)
args = parse_command_line()
client = HTTPClient()
for arg in args:
try:
response = client.fetch(
arg,
follow_redirects=options.follow_redirects,
validate_cert=options.validate_cert,
proxy_host=options.proxy_host,
proxy_port=options.proxy_port,
)
except HTTPError as e:
if e.response is not None:
response = e.response
else:
raise
if options.print_headers:
print(response.headers)
if options.print_body:
print(native_str(response.body))
client.close()
if __name__ == "__main__":
main()
| |
from mks_structure_analysis import MKSStructureAnalysis
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import Pipeline
class MKSHomogenizationModel(MKSStructureAnalysis):
"""
The `MKSHomogenizationModel` takes in microstructures and a their
associated macroscopic property, and created a low dimensional structure
property linkage. The `MKSHomogenizationModel` model is designed to
integrate with dimensionality reduction techniques and predictive models.
Attributes:
degree: Degree of the polynomial used by
`property_linker`.
n_components: Number of components used by `dimension_reducer`.
dimension_reducer: Instance of a dimensionality reduction class.
property_linker: Instance of class that maps materials property to the
microstuctures.
correlations: spatial correlations to be computed
basis: instance of a basis class
reduced_fit_data: Low dimensionality representation of spatial
correlations used to fit the model.
reduced_predict_data: Low dimensionality representation of spatial
correlations predicted by the model.
Below is an example of using MKSHomogenizationModel to predict (or
classify) the type of microstructure using PCA and Logistic Regression.
>>> import numpy as np
>>> n_states = 3
>>> domain = [-1, 1]
>>> from pymks.bases import LegendreBasis
>>> leg_basis = LegendreBasis(n_states=n_states, domain=domain)
>>> from sklearn.decomposition import PCA
>>> from sklearn.linear_model import LogisticRegression
>>> reducer = PCA(n_components=3)
>>> linker = LogisticRegression()
>>> model = MKSHomogenizationModel(
... basis=leg_basis, dimension_reducer=reducer, property_linker=linker)
>>> from pymks.datasets import make_cahn_hilliard
>>> X0, X1 = make_cahn_hilliard(n_samples=50)
>>> y0 = np.zeros(X0.shape[0])
>>> y1 = np.ones(X1.shape[0])
>>> X = np.concatenate((X0, X1))
>>> y = np.concatenate((y0, y1))
>>> model.fit(X, y)
>>> X0_test, X1_test = make_cahn_hilliard(n_samples=3)
>>> y0_test = model.predict(X0_test)
>>> y1_test = model.predict(X1_test)
>>> assert np.allclose(y0_test, [0, 0, 0])
>>> assert np.allclose(y1_test, [1, 1, 1])
"""
def __init__(self, basis=None, dimension_reducer=None, n_components=None,
property_linker=None, degree=1, correlations=None,
compute_correlations=True, store_correlations=False,
mean_center=True):
"""
Create an instance of a `MKSHomogenizationModel`.
Args:
basis (class, optional): an instance of a bases class.
dimension_reducer (class, optional): an instance of a
dimensionality reduction class with a fit_transform method. The
default class is RandomizedPCA.
property_linker (class, optional): an instance for a machine
learning class with fit and predict methods.
n_components (int, optional): number of components kept by the
dimension_reducer
degree (int, optional): degree of the polynomial used by
property_linker.
correlations (list, optional): list of spatial correlations to
compute, default is the autocorrelation with the first local
state and all of its cross correlations. For example if basis
has n_states=3, correlation would be [(0, 0), (0, 1), (0, 2)]
compute_correlations (boolean, optional): If false spatial
correlations will not be calculated as part of the fit and
predict methods. The spatial correlations can be passed as `X`
to both methods, default is True.
mean_center (boolean, optional): If true the data will be mean
centered before dimensionality reduction is computed.
"""
if property_linker is None:
property_linker = LinearRegression()
self._linker = Pipeline([('poly', PolynomialFeatures(degree=degree)),
('connector', property_linker)])
self.degree = degree
self.property_linker = property_linker
if not callable(getattr(self.property_linker, "fit", None)):
raise RuntimeError(
"property_linker does not have fit() method.")
if not callable(getattr(self.property_linker, "predict", None)):
raise RuntimeError(
"property_linker does not have predict() method.")
self.compute_correlations = compute_correlations
if self.compute_correlations:
if basis is None:
raise RuntimeError(('a basis is need to compute spatial ') +
('correlations'))
super(MKSHomogenizationModel,
self).__init__(store_correlations=store_correlations,
dimension_reducer=dimension_reducer,
correlations=correlations,
n_components=n_components, basis=basis,
mean_center=mean_center)
@property
def degree(self):
return self._degree
@degree.setter
def degree(self, value):
"""Setter for the polynomial degree for property_linker.
"""
self._degree = value
self._linker.set_params(poly__degree=value)
@property
def property_linker(self):
return self._property_linker
@property_linker.setter
def property_linker(self, prop_linker):
"""Setter for the property_linker class.
"""
self._property_linker = prop_linker
self._linker.set_params(connector=prop_linker)
def fit(self, X, y, reduce_labels=None,
periodic_axes=None, confidence_index=None, size=None):
"""
Fits data by calculating 2-point statistics from X, preforming
dimension reduction using dimension_reducer, and fitting the reduced
data with the property_linker.
Args:
X (ND array): The microstructures or spatial correlations, a
`(n_samples, n_x, ...)` shaped array where `n_samples` is the
number of samples and `n_x` is the spatial discretization.
y (1D array): The material property associated with `X`.
reducer_labels (1D array, optional): label for X used during the
fit_transform method for the `dimension_reducer`.
periodic_axes (list, optional): axes that are periodic. (0, 2)
would indicate that axes x and z are periodic in a 3D
microstrucure.
confidence_index (ND array, optional): array with same shape as X
used to assign a confidence value for each data point.
Example
>>> import numpy as np
>>> from sklearn.decomposition import PCA
>>> from sklearn.linear_model import LinearRegression
>>> from pymks.bases import PrimitiveBasis
>>> from pymks.stats import correlate
>>> reducer = PCA(n_components=2)
>>> linker = LinearRegression()
>>> prim_basis = PrimitiveBasis(n_states=2, domain=[0, 1])
>>> correlations = [(0, 0), (1, 1), (0, 1)]
>>> model = MKSHomogenizationModel(prim_basis,
... dimension_reducer=reducer,
... property_linker=linker,
... correlations=correlations)
>>> np.random.seed(99)
>>> X = np.random.randint(2, size=(3, 15))
>>> y = np.array([1, 2, 3])
>>> model.fit(X, y)
>>> X_ = prim_basis.discretize(X)
>>> X_stats = correlate(X_)
>>> X_reshaped = X_stats.reshape((X_stats.shape[0], X_stats[0].size))
>>> X_pca = reducer.fit_transform(X_reshaped - np.mean(X_reshaped,
... axis=1)[:, None])
>>> assert np.allclose(model.fit_data, X_pca)
Now let's use the same method with spatial correlations instead of
microtructures.
>>> from sklearn.decomposition import PCA
>>> from sklearn.linear_model import LinearRegression
>>> from pymks.bases import PrimitiveBasis
>>> from pymks.stats import correlate
>>> reducer = PCA(n_components=2)
>>> linker = LinearRegression()
>>> prim_basis = PrimitiveBasis(n_states=2, domain=[0, 1])
>>> correlations = [(0, 0), (1, 1), (0, 1)]
>>> model = MKSHomogenizationModel(dimension_reducer=reducer,
... property_linker=linker,
... compute_correlations=False)
>>> np.random.seed(99)
>>> X = np.random.randint(2, size=(3, 15))
>>> y = np.array([1, 2, 3])
>>> X_ = prim_basis.discretize(X)
>>> X_stats = correlate(X_, correlations=correlations)
>>> model.fit(X_stats, y)
>>> X_reshaped = X_stats.reshape((X_stats.shape[0], X_stats[0].size))
>>> X_pca = reducer.fit_transform(X_reshaped - np.mean(X_reshaped,
... axis=1)[:, None])
>>> assert np.allclose(model.fit_data, X_pca)
"""
if self.compute_correlations:
if periodic_axes is None:
periodic_axes = []
if size is not None:
new_shape = (X.shape[0],) + size
X = X.reshape(new_shape)
X = self._compute_stats(X, periodic_axes, confidence_index)
X_reshape = self._reduce_shape(X)
X_reduced = self._fit_transform(X_reshape, reduce_labels)
self._linker.fit(X_reduced, y)
def predict(self, X, periodic_axes=None, confidence_index=None):
"""Predicts macroscopic property for the microstructures `X`.
Args:
X (ND array): The microstructure, an `(n_samples, n_x, ...)`
shaped array where `n_samples` is the number of samples and
`n_x` is the spatial discretization.
periodic_axes (list, optional): axes that are periodic. (0, 2)
would indicate that axes x and z are periodic in a 3D
microstrucure.
confidence_index (ND array, optional): array with same shape as X
used to assign a confidence value for each data point.
Returns:
The predicted macroscopic property for `X`.
Example
>>> import numpy as np
>>> from sklearn.manifold import LocallyLinearEmbedding
>>> from sklearn.linear_model import BayesianRidge
>>> from pymks.bases import PrimitiveBasis
>>> np.random.seed(99)
>>> X = np.random.randint(2, size=(50, 100))
>>> y = np.random.random(50)
>>> reducer = LocallyLinearEmbedding()
>>> linker = BayesianRidge()
>>> prim_basis = PrimitiveBasis(2, domain=[0, 1])
>>> model = MKSHomogenizationModel(prim_basis, n_components=2,
... dimension_reducer=reducer,
... property_linker=linker)
>>> model.fit(X, y)
>>> X_test = np.random.randint(2, size=(1, 100))
Predict with microstructures
>>> y_pred = model.predict(X_test)
Predict with spatial correlations
>>> from pymks.stats import correlate
>>> model.compute_correlations = False
>>> X_ = prim_basis.discretize(X_test)
>>> X_corr = correlate(X_, correlations=[(0, 0), (0, 1)])
>>> y_pred_stats = model.predict(X_corr)
>>> assert y_pred_stats == y_pred
"""
if not hasattr(self._linker.get_params()['connector'], "coef_"):
print self._linker.get_params()['connector']
raise RuntimeError('fit() method must be run before predict().')
if self.compute_correlations is True:
if periodic_axes is None:
periodic_axes = []
X = self._compute_stats(X, periodic_axes, confidence_index)
X_reduced = self._transform(X)
self.reduced_predict_data = X_reduced
return self._linker.predict(X_reduced)
def score(self, X, y, periodic_axes=None, confidence_index=None):
"""
The score function for the MKSHomogenizationModel. It formats the
data and uses the score method from the property_linker.
Args:
X (ND array): The microstructure, an `(n_samples, n_x, ...)`
shaped array where `n_samples` is the number of samples and
`n_x` is the spatial discretization.
y (1D array): The material property associated with `X`.
periodic_axes (list, optional): axes that are periodic. (0, 2)
would indicate that axes x and z are periodic in a 3D
microstrucure.
confidence_index (ND array, optional): array with same shape as X
used to assign a confidence value for each data point.
Returns:
Score for MKSHomogenizationModel from the selected
property_linker.
"""
if periodic_axes is None:
periodic_axes = []
if not callable(getattr(self._linker, "score", None)):
raise RuntimeError(
"property_linker does not have score() method.")
if self.compute_correlations:
X = self._correlate(X, periodic_axes, confidence_index)
X_reduced = self._transform(X)
return self._linker.score(X_reduced, y)
| |
from django.utils.safestring import mark_safe
from django.contrib.gis.geos import fromstr, Point, LineString, LinearRing, Polygon
class GEvent(object):
"""
A Python wrapper for the Google GEvent object.
Events can be attached to any object derived from GOverlayBase with the
add_event() call.
For more information please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GEvent
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google import GoogleMap, GEvent, GPolyline
def sample_request(request):
polyline = GPolyline('LINESTRING(101 26, 112 26, 102 31)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
polyline.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(polylines=[polyline])})
"""
def __init__(self, event, action):
"""
Initializes a GEvent object.
Parameters:
event:
string for the event, such as 'click'. The event must be a valid
event for the object in the Google Maps API.
There is no validation of the event type within Django.
action:
string containing a Javascript function, such as
'function() { location.href = "newurl";}'
The string must be a valid Javascript function. Again there is no
validation fo the function within Django.
"""
self.event = event
self.action = action
def __unicode__(self):
"Returns the parameter part of a GEvent."
return mark_safe('"%s", %s' %(self.event, self.action))
class GOverlayBase(object):
def __init__(self):
self.events = []
def latlng_from_coords(self, coords):
"Generates a JavaScript array of GLatLng objects for the given coordinates."
return '[%s]' % ','.join(['new GLatLng(%s,%s)' % (y, x) for x, y in coords])
def add_event(self, event):
"Attaches a GEvent to the overlay object."
self.events.append(event)
def __unicode__(self):
"The string representation is the JavaScript API call."
return mark_safe('%s(%s)' % (self.__class__.__name__, self.js_params))
class GPolygon(GOverlayBase):
"""
A Python wrapper for the Google GPolygon object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GPolygon
"""
def __init__(self, poly,
stroke_color='#0000ff', stroke_weight=2, stroke_opacity=1,
fill_color='#0000ff', fill_opacity=0.4):
"""
The GPolygon object initializes on a GEOS Polygon or a parameter that
may be instantiated into GEOS Polygon. Please note that this will not
depict a Polygon's internal rings.
Keyword Options:
stroke_color:
The color of the polygon outline. Defaults to '#0000ff' (blue).
stroke_weight:
The width of the polygon outline, in pixels. Defaults to 2.
stroke_opacity:
The opacity of the polygon outline, between 0 and 1. Defaults to 1.
fill_color:
The color of the polygon fill. Defaults to '#0000ff' (blue).
fill_opacity:
The opacity of the polygon fill. Defaults to 0.4.
"""
if isinstance(poly, basestring): poly = fromstr(poly)
if isinstance(poly, (tuple, list)): poly = Polygon(poly)
if not isinstance(poly, Polygon):
raise TypeError('GPolygon may only initialize on GEOS Polygons.')
# Getting the envelope of the input polygon (used for automatically
# determining the zoom level).
self.envelope = poly.envelope
# Translating the coordinates into a JavaScript array of
# Google `GLatLng` objects.
self.points = self.latlng_from_coords(poly.shell.coords)
# Stroke settings.
self.stroke_color, self.stroke_opacity, self.stroke_weight = stroke_color, stroke_opacity, stroke_weight
# Fill settings.
self.fill_color, self.fill_opacity = fill_color, fill_opacity
super(GPolygon, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s, "%s", %s' % (self.points, self.stroke_color, self.stroke_weight, self.stroke_opacity,
self.fill_color, self.fill_opacity)
class GPolyline(GOverlayBase):
"""
A Python wrapper for the Google GPolyline object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GPolyline
"""
def __init__(self, geom, color='#0000ff', weight=2, opacity=1):
"""
The GPolyline object may be initialized on GEOS LineStirng, LinearRing,
and Polygon objects (internal rings not supported) or a parameter that
may instantiated into one of the above geometries.
Keyword Options:
color:
The color to use for the polyline. Defaults to '#0000ff' (blue).
weight:
The width of the polyline, in pixels. Defaults to 2.
opacity:
The opacity of the polyline, between 0 and 1. Defaults to 1.
"""
# If a GEOS geometry isn't passed in, try to contsruct one.
if isinstance(geom, basestring): geom = fromstr(geom)
if isinstance(geom, (tuple, list)): geom = Polygon(geom)
# Generating the lat/lng coordinate pairs.
if isinstance(geom, (LineString, LinearRing)):
self.latlngs = self.latlng_from_coords(geom.coords)
elif isinstance(geom, Polygon):
self.latlngs = self.latlng_from_coords(geom.shell.coords)
else:
raise TypeError('GPolyline may only initialize on GEOS LineString, LinearRing, and/or Polygon geometries.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
self.color, self.weight, self.opacity = color, weight, opacity
super(GPolyline, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s' % (self.latlngs, self.color, self.weight, self.opacity)
class GMarker(GOverlayBase):
"""
A Python wrapper for the Google GMarker object. For more information
please see the Google Maps API Reference:
http://code.google.com/apis/maps/documentation/reference.html#GMarker
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google.overlays import GMarker, GEvent
def sample_request(request):
marker = GMarker('POINT(101 26)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
marker.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(markers=[marker])})
"""
def __init__(self, geom, title=None):
"""
The GMarker object may initialize on GEOS Points or a parameter
that may be instantiated into a GEOS point. Keyword options map to
GMarkerOptions -- so far only the title option is supported.
Keyword Options:
title:
Title option for GMarker, will be displayed as a tooltip.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, basestring): geom = fromstr(geom)
if isinstance(geom, (tuple, list)): geom = Point(geom)
if isinstance(geom, Point):
self.latlng = self.latlng_from_coords(geom.coords)
else:
raise TypeError('GMarker may only initialize on GEOS Point geometry.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
# TODO: Add support for more GMarkerOptions
self.title = title
super(GMarker, self).__init__()
def latlng_from_coords(self, coords):
return 'new GLatLng(%s,%s)' %(coords[1], coords[0])
def options(self):
result = []
if self.title: result.append('title: "%s"' % self.title)
return '{%s}' % ','.join(result)
@property
def js_params(self):
return '%s, %s' % (self.latlng, self.options())
| |
#from otp.ai.AIBaseGlobal import *
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.DirectObject import DirectObject
from ConnectionRepository import *
from panda3d.core import ConfigVariableDouble, ConfigVariableInt, ConfigVariableBool
ASYNC_REQUEST_DEFAULT_TIMEOUT_IN_SECONDS = 8.0
ASYNC_REQUEST_INFINITE_RETRIES = -1
ASYNC_REQUEST_DEFAULT_NUM_RETRIES = 0
if __debug__:
_overrideTimeoutTimeForAllAsyncRequests = ConfigVariableDouble("async-request-timeout", -1.0)
_overrideNumRetriesForAllAsyncRequests = ConfigVariableInt("async-request-num-retries", -1)
_breakOnTimeout = ConfigVariableBool("async-request-break-on-timeout", False)
class AsyncRequest(DirectObject):
"""
This class is used to make asynchronos reads and creates to a database.
You can create a list of self.neededObjects and then ask for each to be
read or created, or if you only have one object that you need you can
skip the self.neededObjects because calling askForObject or createObject
will set the self.neededObjects value for you.
Once all the objects have been read or created, the self.finish() method
will be called. You may override this function to run your code in a
derived class.
If you wish to queue up several items that you all need before the finish
method is called, you can put items in self.neededObjects and then call
askForObject or createObject afterwards. That way the _checkCompletion
will not call finish until after all the requests have been done.
If you need to chain serveral object reads or creates, just add more
entries to the self.neededObjects dictionary in the self.finish function
and return without calling AsyncRequest.finish(). Your finish method
will be called again when the new self.neededObjects is complete. You
may repeat this as necessary.
"""
_asyncRequests = {}
notify = DirectNotifyGlobal.directNotify.newCategory('AsyncRequest')
def __init__(self, air, replyToChannelId = None,
timeoutTime = ASYNC_REQUEST_DEFAULT_TIMEOUT_IN_SECONDS,
numRetries = ASYNC_REQUEST_DEFAULT_NUM_RETRIES):
"""
air is the AI Respository.
replyToChannelId may be an avatarId, an accountId, or a channelId.
timeoutTime is how many seconds to wait before aborting the request.
numRetries is the number of times to retry the request before giving up.
"""
assert AsyncRequest.notify.debugCall()
if __debug__:
if _overrideTimeoutTimeForAllAsyncRequests.getValue() >= 0.0:
timeoutTime = _overrideTimeoutTimeForAllAsyncRequests.getValue()
if _overrideNumRetriesForAllAsyncRequests.getValue() >= 0:
numRetries = _overrideNumRetriesForAllAsyncRequests.getValue()
AsyncRequest._asyncRequests[id(self)] = self
self.deletingMessage = "AsyncRequest-deleting-%s"%(id(self,))
self.air = air
self.replyToChannelId = replyToChannelId
self.timeoutTask = None
self.neededObjects = {}
self._timeoutTime = timeoutTime
self._initialNumRetries = numRetries
def delete(self):
assert AsyncRequest.notify.debugCall()
del AsyncRequest._asyncRequests[id(self)]
self.ignoreAll()
self._resetTimeoutTask(False)
messenger.send(self.deletingMessage, [])
del self.neededObjects
del self.air
del self.replyToChannelId
def askForObjectField(
self, dclassName, fieldName, doId, key = None, context = None):
"""
Request an already created object, i.e. read from database.
"""
assert AsyncRequest.notify.debugCall()
if key is None:
# default the dictionary key to the fieldName
key = fieldName
assert doId
if context is None:
context = self.air.allocateContext()
self.air.contextToClassName[context] = dclassName
self.acceptOnce(
"doFieldResponse-%s"%(context,),
self._checkCompletion, [key])
self.neededObjects[key] = None
self.air.queryObjectField(dclassName, fieldName, doId, context)
self._resetTimeoutTask()
def askForObjectFields(
self, dclassName, fieldNames, doId, key = None, context = None):
"""
Request an already created object, i.e. read from database.
"""
assert AsyncRequest.notify.debugCall()
if key is None:
# default the dictionary key to the fieldName
key = fieldNames[0]
assert doId
if context is None:
context = self.air.allocateContext()
self.air.contextToClassName[context] = dclassName
self.acceptOnce(
"doFieldResponse-%s"%(context,),
self._checkCompletion, [key])
self.air.queryObjectFields(dclassName, fieldNames, doId, context)
self._resetTimeoutTask()
def askForObjectFieldsByString(self, dbId, dclassName, objString, fieldNames, key=None, context=None):
assert AsyncRequest.notify.debugCall()
assert dbId
if key is None:
# default the dictionary key to the fieldNames
key = fieldNames
if context is None:
context=self.air.allocateContext()
self.air.contextToClassName[context]=dclassName
self.acceptOnce(
"doFieldResponse-%s"%(context,),
self._checkCompletion, [key])
self.air.queryObjectStringFields(dbId,dclassName,objString,fieldNames,context)
self._resetTimeoutTask()
def askForObject(self, doId, context = None):
"""
Request an already created object, i.e. read from database.
"""
assert AsyncRequest.notify.debugCall()
assert doId
if context is None:
context = self.air.allocateContext()
self.acceptOnce(
"doRequestResponse-%s"%(context,),
self._checkCompletion, [None])
self.air.queryObjectAll(doId, context)
self._resetTimeoutTask()
def createObject(self, name, className,
databaseId = None, values = None, context = None):
"""
Create a new database object. You can get the doId from within
your self.finish() function.
This functions is different from createObjectId in that it does
generate the object when the response comes back. The object is
added to the doId2do and so forth and treated as a full regular
object (which it is). This is useful on the AI where we really
do want the object on the AI.
"""
assert AsyncRequest.notify.debugCall()
assert name
assert className
self.neededObjects[name] = None
if context is None:
context = self.air.allocateContext()
self.accept(
self.air.getDatabaseGenerateResponseEvent(context),
self._doCreateObject, [name, className, values])
self.air.requestDatabaseGenerate(
className, context, databaseId = databaseId, values = values)
self._resetTimeoutTask()
def createObjectId(self, name, className, values = None, context = None):
"""
Create a new database object. You can get the doId from within
your self.finish() function.
This functions is different from createObject in that it does not
generate the object when the response comes back. It only tells you
the doId. This is useful on the UD where we don't really want the
object on the UD, we just want the object created and the UD wants
to send messages to it using the ID.
"""
assert AsyncRequest.notify.debugCall()
assert name
assert className
self.neededObjects[name] = None
if context is None:
context = self.air.allocateContext()
self.accept(
self.air.getDatabaseGenerateResponseEvent(context),
self._checkCompletion, [name, None])
self.air.requestDatabaseGenerate(className, context, values = values)
self._resetTimeoutTask()
def finish(self):
"""
This is the function that gets called when all of the needed objects
are in (i.e. all the askForObject and createObject requests have
been satisfied).
If the other requests timeout, finish will not be called.
"""
assert self.notify.debugCall()
self.delete()
def _doCreateObject(self, name, className, values, doId):
isInDoId2do = doId in self.air.doId2do
distObj = self.air.generateGlobalObject(doId, className, values)
if not isInDoId2do and game.name == 'uberDog':
# only remove doId if this is the uberdog?, in pirates this was
# causing traded inventory objects to be generated twice, once
# here and again later when it was noticed the doId was not in
# the doId2do list yet.
self.air.doId2do.pop(doId, None)
self._checkCompletion(name, None, distObj)
def _checkCompletion(self, name, context, distObj):
"""
This checks whether we have all the needed objects and calls
finish() if we do.
"""
if name is not None:
self.neededObjects[name] = distObj
else:
self.neededObjects[distObj.doId] = distObj
for i in self.neededObjects.values():
if i is None:
return
self.finish()
def _resetTimeoutTask(self, createAnew = True):
if self.timeoutTask:
taskMgr.remove(self.timeoutTask)
self.timeoutTask = None
if createAnew:
self.numRetries = self._initialNumRetries
self.timeoutTask = taskMgr.doMethodLater(
self._timeoutTime, self.timeout,
"AsyncRequestTimer-%s"%(id(self,)))
def timeout(self, task):
assert AsyncRequest.notify.debugCall(
"neededObjects: %s"%(self.neededObjects,))
if self.numRetries > 0:
assert AsyncRequest.notify.debug(
'Timed out. Trying %d more time(s) : %s' %
(self.numRetries + 1, repr(self.neededObjects)))
self.numRetries -= 1
return Task.again
else:
if __debug__:
if _breakOnTimeout:
if hasattr(self, "avatarId"):
print "\n\nself.avatarId =", self.avatarId
print "\nself.neededObjects =", self.neededObjects
print "\ntimed out after %s seconds.\n\n"%(task.delayTime,)
import pdb; pdb.set_trace()
self.delete()
return Task.done
def cleanupAsyncRequests():
"""
Only call this when the application is shuting down.
"""
for asyncRequest in AsyncRequest._asyncRequests:
asyncRequest.delete()
assert AsyncRequest._asyncRequests == {}
| |
# pylint: disable=W0611
'''
Utils
=====
The Utils module provides a selection of general utility functions and classes
that may be useful for various applications. These include maths, color,
algebraic and platform functions.
.. versionchanged:: 1.6.0
The OrderedDict class has been removed. Use collections.OrderedDict
instead.
'''
__all__ = ('intersection', 'difference', 'strtotuple',
'get_color_from_hex', 'get_hex_from_color', 'get_random_color',
'is_color_transparent', 'hex_colormap', 'colormap', 'boundary',
'deprecated', 'SafeList',
'interpolate', 'QueryDict',
'platform', 'escape_markup', 'reify', 'rgba')
from os import environ
from sys import platform as _sys_platform
from re import match, split
from kivy.compat import string_types
def boundary(value, minvalue, maxvalue):
'''Limit a value between a minvalue and maxvalue.'''
return min(max(value, minvalue), maxvalue)
def intersection(set1, set2):
'''Return the intersection of 2 lists.'''
return [s for s in set1 if s in set2]
def difference(set1, set2):
'''Return the difference between 2 lists.'''
return [s for s in set1 if s not in set2]
def interpolate(value_from, value_to, step=10):
'''Interpolate between two values. This can be useful for smoothing some
transitions. For example::
# instead of setting directly
self.pos = pos
# use interpolate, and you'll have a nicer transition
self.pos = interpolate(self.pos, new_pos)
.. warning::
These interpolations work only on lists/tuples/doubles with the same
dimensions. No test is done to check the dimensions are the same.
'''
if type(value_from) in (list, tuple):
out = []
for x, y in zip(value_from, value_to):
out.append(interpolate(x, y, step))
return out
else:
return value_from + (value_to - value_from) / float(step)
def strtotuple(s):
'''Convert a tuple string into a tuple
with some security checks. Designed to be used
with the eval() function::
a = (12, 54, 68)
b = str(a) # return '(12, 54, 68)'
c = strtotuple(b) # return (12, 54, 68)
'''
# security
if not match('^[,.0-9 ()\[\]]*$', s):
raise Exception('Invalid characters in string for tuple conversion')
# fast syntax check
if s.count('(') != s.count(')'):
raise Exception('Invalid count of ( and )')
if s.count('[') != s.count(']'):
raise Exception('Invalid count of [ and ]')
r = eval(s)
if type(r) not in (list, tuple):
raise Exception('Conversion failed')
return r
def rgba(s, *args):
'''Return a Kivy color (4 value from 0-1 range) from either a hex string or
a list of 0-255 values.
.. versionadded:: 1.10.0
'''
if isinstance(s, string_types):
return get_color_from_hex(s)
if isinstance(s, (list, tuple)):
s = [x / 255. for x in s]
if len(s) == 3:
s.append(1)
return s
if isinstance(s, (int, float)):
s = [s / 255.]
s.extend(x / 255. for x in args)
if len(s) == 3:
s.append(1)
return s
raise Exception('Invalid value (not a string / list / tuple)')
def get_color_from_hex(s):
'''Transform a hex string color to a kivy
:class:`~kivy.graphics.Color`.
'''
if s.startswith('#'):
return get_color_from_hex(s[1:])
value = [int(x, 16) / 255.
for x in split('([0-9a-f]{2})', s.lower()) if x != '']
if len(value) == 3:
value.append(1)
return value
def get_hex_from_color(color):
'''Transform a kivy :class:`~kivy.graphics.Color` to a hex value::
>>> get_hex_from_color((0, 1, 0))
'#00ff00'
>>> get_hex_from_color((.25, .77, .90, .5))
'#3fc4e57f'
.. versionadded:: 1.5.0
'''
return '#' + ''.join(['{0:02x}'.format(int(x * 255)) for x in color])
def get_random_color(alpha=1.0):
'''Returns a random color (4 tuple).
:Parameters:
`alpha`: float, defaults to 1.0
If alpha == 'random', a random alpha value is generated.
'''
from random import random
if alpha == 'random':
return [random(), random(), random(), random()]
else:
return [random(), random(), random(), alpha]
def is_color_transparent(c):
'''Return True if the alpha channel is 0.'''
if len(c) < 4:
return False
if float(c[3]) == 0.:
return True
return False
hex_colormap = {
'aliceblue': '#f0f8ff',
'antiquewhite': '#faebd7',
'aqua': '#00ffff',
'aquamarine': '#7fffd4',
'azure': '#f0ffff',
'beige': '#f5f5dc',
'bisque': '#ffe4c4',
'black': '#000000',
'blanchedalmond': '#ffebcd',
'blue': '#0000ff',
'blueviolet': '#8a2be2',
'brown': '#a52a2a',
'burlywood': '#deb887',
'cadetblue': '#5f9ea0',
'chartreuse': '#7fff00',
'chocolate': '#d2691e',
'coral': '#ff7f50',
'cornflowerblue': '#6495ed',
'cornsilk': '#fff8dc',
'crimson': '#dc143c',
'cyan': '#00ffff',
'darkblue': '#00008b',
'darkcyan': '#008b8b',
'darkgoldenrod': '#b8860b',
'darkgray': '#a9a9a9',
'darkgrey': '#a9a9a9',
'darkgreen': '#006400',
'darkkhaki': '#bdb76b',
'darkmagenta': '#8b008b',
'darkolivegreen': '#556b2f',
'darkorange': '#ff8c00',
'darkorchid': '#9932cc',
'darkred': '#8b0000',
'darksalmon': '#e9967a',
'darkseagreen': '#8fbc8f',
'darkslateblue': '#483d8b',
'darkslategray': '#2f4f4f',
'darkslategrey': '#2f4f4f',
'darkturquoise': '#00ced1',
'darkviolet': '#9400d3',
'deeppink': '#ff1493',
'deepskyblue': '#00bfff',
'dimgray': '#696969',
'dimgrey': '#696969',
'dodgerblue': '#1e90ff',
'firebrick': '#b22222',
'floralwhite': '#fffaf0',
'forestgreen': '#228b22',
'fuchsia': '#ff00ff',
'gainsboro': '#dcdcdc',
'ghostwhite': '#f8f8ff',
'gold': '#ffd700',
'goldenrod': '#daa520',
'gray': '#808080',
'grey': '#808080',
'green': '#008000',
'greenyellow': '#adff2f',
'honeydew': '#f0fff0',
'hotpink': '#ff69b4',
'indianred': '#cd5c5c',
'indigo': '#4b0082',
'ivory': '#fffff0',
'khaki': '#f0e68c',
'lavender': '#e6e6fa',
'lavenderblush': '#fff0f5',
'lawngreen': '#7cfc00',
'lemonchiffon': '#fffacd',
'lightblue': '#add8e6',
'lightcoral': '#f08080',
'lightcyan': '#e0ffff',
'lightgoldenrodyellow': '#fafad2',
'lightgreen': '#90ee90',
'lightgray': '#d3d3d3',
'lightgrey': '#d3d3d3',
'lightpink': '#ffb6c1',
'lightsalmon': '#ffa07a',
'lightseagreen': '#20b2aa',
'lightskyblue': '#87cefa',
'lightslategray': '#778899',
'lightslategrey': '#778899',
'lightsteelblue': '#b0c4de',
'lightyellow': '#ffffe0',
'lime': '#00ff00',
'limegreen': '#32cd32',
'linen': '#faf0e6',
'magenta': '#ff00ff',
'maroon': '#800000',
'mediumaquamarine': '#66cdaa',
'mediumblue': '#0000cd',
'mediumorchid': '#ba55d3',
'mediumpurple': '#9370db',
'mediumseagreen': '#3cb371',
'mediumslateblue': '#7b68ee',
'mediumspringgreen': '#00fa9a',
'mediumturquoise': '#48d1cc',
'mediumvioletred': '#c71585',
'midnightblue': '#191970',
'mintcream': '#f5fffa',
'mistyrose': '#ffe4e1',
'moccasin': '#ffe4b5',
'navajowhite': '#ffdead',
'navy': '#000080',
'oldlace': '#fdf5e6',
'olive': '#808000',
'olivedrab': '#6b8e23',
'orange': '#ffa500',
'orangered': '#ff4500',
'orchid': '#da70d6',
'palegoldenrod': '#eee8aa',
'palegreen': '#98fb98',
'paleturquoise': '#afeeee',
'palevioletred': '#db7093',
'papayawhip': '#ffefd5',
'peachpuff': '#ffdab9',
'peru': '#cd853f',
'pink': '#ffc0cb',
'plum': '#dda0dd',
'powderblue': '#b0e0e6',
'purple': '#800080',
'red': '#ff0000',
'rosybrown': '#bc8f8f',
'royalblue': '#4169e1',
'saddlebrown': '#8b4513',
'salmon': '#fa8072',
'sandybrown': '#f4a460',
'seagreen': '#2e8b57',
'seashell': '#fff5ee',
'sienna': '#a0522d',
'silver': '#c0c0c0',
'skyblue': '#87ceeb',
'slateblue': '#6a5acd',
'slategray': '#708090',
'slategrey': '#708090',
'snow': '#fffafa',
'springgreen': '#00ff7f',
'steelblue': '#4682b4',
'tan': '#d2b48c',
'teal': '#008080',
'thistle': '#d8bfd8',
'tomato': '#ff6347',
'turquoise': '#40e0d0',
'violet': '#ee82ee',
'wheat': '#f5deb3',
'white': '#ffffff',
'whitesmoke': '#f5f5f5',
'yellow': '#ffff00',
'yellowgreen': '#9acd32',
}
colormap = {k: get_color_from_hex(v) for k, v in hex_colormap.items()}
DEPRECATED_CALLERS = []
def deprecated(func=None, msg=''):
'''This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted the first time
the function is used.'''
import inspect
import functools
if func is None:
return functools.partial(deprecated, msg=msg)
@functools.wraps(func)
def new_func(*args, **kwargs):
file, line, caller = inspect.stack()[1][1:4]
caller_id = "%s:%s:%s" % (file, line, caller)
# We want to print deprecated warnings only once:
if caller_id not in DEPRECATED_CALLERS:
DEPRECATED_CALLERS.append(caller_id)
warning = (
'Call to deprecated function %s in %s line %d.'
'Called from %s line %d'
' by %s().' % (
func.__name__,
func.__code__.co_filename,
func.__code__.co_firstlineno + 1,
file, line, caller))
if msg:
warning = '{}: {}'.format(msg, warning)
warning = 'Deprecated: ' + warning
from kivy.logger import Logger
Logger.warning(warning)
if func.__doc__:
Logger.warning(func.__doc__)
return func(*args, **kwargs)
return new_func
class SafeList(list):
'''List with a clear() method.
.. warning::
Usage of the iterate() function will decrease your performance.
'''
def clear(self):
del self[:]
@deprecated
def iterate(self, reverse=False):
if reverse:
return iter(reversed(self))
return iter(self)
class QueryDict(dict):
'''QueryDict is a dict() that can be queried with dot.
::
d = QueryDict()
# create a key named toto, with the value 1
d.toto = 1
# it's the same as
d['toto'] = 1
.. versionadded:: 1.0.4
'''
def __getattr__(self, attr):
try:
return self.__getitem__(attr)
except KeyError:
return super(QueryDict, self).__getattr__(attr)
def __setattr__(self, attr, value):
self.__setitem__(attr, value)
def format_bytes_to_human(size, precision=2):
'''Format a byte value to a human readable representation (B, KB, MB...).
.. versionadded:: 1.0.8
:Parameters:
`size`: int
Number that represents the bytes value
`precision`: int, defaults to 2
Precision after the comma
Examples::
>>> format_bytes_to_human(6463)
'6.31 KB'
>>> format_bytes_to_human(646368746541)
'601.98 GB'
'''
size = int(size)
fmt = '%%1.%df %%s' % precision
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
if size < 1024.0:
return fmt % (size, unit)
size /= 1024.0
def _get_platform():
# On Android sys.platform returns 'linux2', so prefer to check the
# presence of python-for-android environment variables (ANDROID_ARGUMENT
# or ANDROID_PRIVATE).
if 'ANDROID_ARGUMENT' in environ:
return 'android'
elif environ.get('KIVY_BUILD', '') == 'ios':
return 'ios'
elif _sys_platform in ('win32', 'cygwin'):
return 'win'
elif _sys_platform == 'darwin':
return 'macosx'
elif _sys_platform.startswith('linux'):
return 'linux'
elif _sys_platform.startswith('freebsd'):
return 'linux'
return 'unknown'
platform = _get_platform()
'''
A string identifying the current operating system. It is one
of: `'win'`, `'linux'`, `'android'`, `'macosx'`, `'ios'` or `'unknown'`.
You can use it as follows::
from kivy.utils import platform
if platform == 'linux':
do_linux_things()
.. versionadded:: 1.3.0
.. versionchanged:: 1.8.0
platform is now a variable instead of a function.
'''
def escape_markup(text):
'''
Escape markup characters found in the text. Intended to be used when markup
text is activated on the Label::
untrusted_text = escape_markup('Look at the example [1]')
text = '[color=ff0000]' + untrusted_text + '[/color]'
w = Label(text=text, markup=True)
.. versionadded:: 1.3.0
'''
return text.replace('&', '&').replace('[', '&bl;').replace(']', '&br;')
class reify(object):
'''
Put the result of a method which uses this (non-data) descriptor decorator
in the instance dict after the first call, effectively replacing the
decorator with an instance variable.
It acts like @property, except that the function is only ever called once;
after that, the value is cached as a regular attribute. This gives you lazy
attribute creation on objects that are meant to be immutable.
Taken from the `Pyramid project <https://pypi.python.org/pypi/pyramid/>`_.
To use this as a decorator::
@reify
def lazy(self):
...
return hard_to_compute_int
first_time = self.lazy # lazy is reify obj, reify.__get__() runs
second_time = self.lazy # lazy is hard_to_compute_int
'''
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, inst, cls):
if inst is None:
return self
retval = self.func(inst)
setattr(inst, self.func.__name__, retval)
return retval
| |
#!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = "/home/app/content"
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user )
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
pkg_infos = line.split()
if len(pkg_infos) == 4:
continue
name = pkg_infos[5]
name = name.lstrip('[').rstrip(']')
print "name is: %s" % name
if pkg_name == name:
test_pkg_id = pkg_infos[3]
test_pkg_id = test_pkg_id.lstrip('[').rstrip(']')
print test_pkg_id
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".wgt"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t wgt -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
(return_code, output) = doRemoteCMD(
"mkdir -p %s/stablonglast3d" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("stablonglast3d", "%s/stablonglast3d" % PKG_SRC_DIR):
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith("%s.wgt" % PKG_NAME):
if not doRemoteCopy(os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
action_status = False
(return_code, output) = doRemoteCMD(
"pkgcmd -i -t wgt -q -p %s/%s" % (SRC_DIR, file))
doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
for line in output:
if "Failure" in line:
action_status = False
break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
for item in glob.glob("%s/*" % SCRIPT_DIR):
if item.endswith("inst.py"):
continue
else:
item_name = os.path.basename(item)
if not doRemoteCopy(item, PKG_SRC_DIR):
action_status = False
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0 :
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket"%str(userid)
else:
print "[Error] cmd commands error : %s"%str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| |
from socket import error as socket_error
from django import forms
from django.conf import settings
from django.conf.urls import url
from django.contrib import admin
from django.contrib import messages
from django.contrib.admin import SimpleListFilter
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.core.urlresolvers import reverse
from django.db.models import Count, Q
from django.forms import ValidationError
from django.http import HttpResponseRedirect
from dal import autocomplete
from celery.task.sets import TaskSet
from functools import update_wrapper
from import_export.fields import Field
from import_export.resources import ModelResource
from sorl.thumbnail.admin import AdminImageMixin
from mozillians.common.mixins import MozilliansAdminExportMixin
from mozillians.common.templatetags.helpers import get_datetime
from mozillians.groups.admin import BaseGroupMembershipAutocompleteForm
from mozillians.groups.models import GroupMembership, Skill
from mozillians.users.models import get_languages_for_locale
from mozillians.users.models import (AbuseReport, ExternalAccount, Language, PUBLIC,
UserProfile, UsernameBlacklist, Vouch)
from mozillians.users.tasks import (check_celery, subscribe_user_to_basket,
unsubscribe_from_basket_task, index_all_profiles)
admin.site.unregister(Group)
Q_PUBLIC_PROFILES = Q()
for field in UserProfile.privacy_fields():
key = 'privacy_%s' % field
Q_PUBLIC_PROFILES |= Q(**{key: PUBLIC})
def subscribe_to_basket_action(newsletter):
"""Subscribe to Basket action."""
def subscribe_to_basket(modeladmin, request, queryset):
"""Subscribe to Basket or update details of already subscribed."""
ts = [(subscribe_user_to_basket.subtask(args=[userprofile.id, [newsletter]]))
for userprofile in queryset]
TaskSet(ts).apply_async()
messages.success(request, 'Basket update started.')
subscribe_to_basket.short_description = 'Subscribe to or Update {0}'.format(newsletter)
subscribe_to_basket.__name__ = 'subscribe_to_basket_{0}'.format(newsletter.replace('-', '_'))
return subscribe_to_basket
def unsubscribe_from_basket_action(newsletter):
"""Unsubscribe from Basket action."""
def unsubscribe_from_basket(modeladmin, request, queryset):
"""Unsubscribe from Basket."""
ts = [(unsubscribe_from_basket_task.subtask(args=[userprofile.user.email,
[newsletter]]))
for userprofile in queryset]
TaskSet(ts).apply_async()
messages.success(request, 'Basket update started.')
unsubscribe_from_basket.short_description = 'Unsubscribe from {0}'.format(newsletter)
func_name = 'unsubscribe_from_basket_{0}'.format(newsletter.replace('-', '_'))
unsubscribe_from_basket.__name__ = func_name
return unsubscribe_from_basket
def update_vouch_flags_action():
"""Update can_vouch, is_vouched flag action."""
def update_vouch_flags(modeladmin, request, queryset):
for profile in queryset:
vouches_received = profile.vouches_received.count()
profile.can_vouch = vouches_received >= settings.CAN_VOUCH_THRESHOLD
profile.is_vouched = vouches_received > 0
profile.save()
update_vouch_flags.short_description = 'Update vouch flags'
return update_vouch_flags
class SuperUserFilter(SimpleListFilter):
"""Admin filter for superusers."""
title = 'has access to admin interface'
parameter_name = 'superuser'
def lookups(self, request, model_admin):
return (('False', 'No'),
('True', 'Yes'))
def queryset(self, request, queryset):
if self.value() is None:
return queryset
value = self.value() == 'True'
return queryset.filter(user__is_staff=value)
class PublicProfileFilter(SimpleListFilter):
"""Admin filter for public profiles."""
title = 'public profile'
parameter_name = 'public_profile'
def lookups(self, request, model_admin):
return (('False', 'No'),
('True', 'Yes'))
def queryset(self, request, queryset):
if self.value() is None:
return queryset
if self.value() == 'True':
return queryset.filter(Q_PUBLIC_PROFILES)
return queryset.exclude(Q_PUBLIC_PROFILES)
class CompleteProfileFilter(SimpleListFilter):
"""Admin filter for complete profiles."""
title = 'complete profile'
parameter_name = 'complete_profile'
def lookups(self, request, model_admin):
return (('False', 'Incomplete'),
('True', 'Complete'))
def queryset(self, request, queryset):
if self.value() is None:
return queryset
elif self.value() == 'True':
return queryset.exclude(full_name='')
else:
return queryset.filter(full_name='')
class DateJoinedFilter(SimpleListFilter):
"""Admin filter for date joined."""
title = 'date joined'
parameter_name = 'date_joined'
def lookups(self, request, model_admin):
join_dates = User.objects.values_list('date_joined', flat=True)
join_years = [x.year for x in join_dates]
return [(str(x), x) for x in set(join_years)]
def queryset(self, request, queryset):
if self.value() is None:
return queryset
else:
return queryset.filter(user__date_joined__year=self.value())
return queryset
class LastLoginFilter(SimpleListFilter):
"""Admin filter for last login."""
title = 'last login'
parameter_name = 'last_login'
def lookups(self, request, model_admin):
# Number is in days
return (('<7', 'Less than a week'),
('<30', 'Less than a month'),
('<90', 'Less than 3 months'),
('<180', 'Less than 6 months'),
('>180', 'Between 6 and 12 months'),
('>360', 'More than a year'))
def queryset(self, request, queryset):
if self.value() == '<7':
return queryset.filter(user__last_login__gte=get_datetime(-7))
elif self.value() == '<30':
return queryset.filter(user__last_login__gte=get_datetime(-30))
elif self.value() == '<90':
return queryset.filter(user__last_login__gte=get_datetime(-90))
elif self.value() == '<180':
return queryset.filter(user__last_login__gte=get_datetime(-180))
elif self.value() == '>180':
return queryset.filter(user__last_login__lt=get_datetime(-180),
user__last_login__gt=get_datetime(-360))
elif self.value() == '>360':
return queryset.filter(user__last_login__lt=get_datetime(-360))
return queryset
class AlternateEmailFilter(SimpleListFilter):
"""Admin filter for users with alternate emails."""
title = 'alternate email'
parameter_name = 'alternate_email'
def lookups(self, request, model_admin):
return(('False', 'No'), ('True', 'Yes'))
def queryset(self, request, queryset):
if self.value() is None:
return queryset
if self.value() == 'True':
return queryset.filter(externalaccount__type=ExternalAccount.TYPE_EMAIL)
return queryset.exclude(externalaccount__type=ExternalAccount.TYPE_EMAIL)
class LegacyVouchFilter(SimpleListFilter):
"""Admin filter for profiles with new or legacy vouch type."""
title = 'vouch type'
parameter_name = 'vouch_type'
def lookups(self, request, model_admin):
return (('legacy', 'Legacy'),
('new', 'New'))
def queryset(self, request, queryset):
vouched = queryset.filter(is_vouched=True)
newvouches = (Vouch.objects
.exclude(description='')
.values_list('vouchee', flat=True)
.distinct())
# Load into memory
newvouches = list(newvouches)
if self.value() == 'legacy':
return vouched.exclude(pk__in=newvouches)
elif self.value() == 'new':
return vouched.filter(pk__in=newvouches)
return queryset
class NDAMemberFilter(SimpleListFilter):
"""Admin filter for profiles member of the NDA group"""
title = "NDA member"
parameter_name = 'nda_member'
def lookups(self, request, model_admin):
return (('False', 'No'),
('True', 'Yes'))
def queryset(self, request, queryset):
from mozillians.groups.models import Group, GroupMembership
try:
group = Group.objects.get(name=settings.NDA_GROUP)
except Group.DoesNotExist:
return queryset
memberships = GroupMembership.objects.filter(group=group, status=GroupMembership.MEMBER)
profile_ids = memberships.values_list('userprofile__id', flat=True)
if self.value() == 'False':
return queryset.exclude(id__in=profile_ids)
elif self.value() == 'True':
return queryset.filter(id__in=profile_ids)
return queryset
class BasketTokenFilter(SimpleListFilter):
"""Admin filter for profiles with associated basket token"""
title = 'has basket token'
parameter_name = 'basket_token'
def lookups(self, request, model_admin):
return (('yes', 'Yes'),
('no', 'No'))
def queryset(self, request, queryset):
if self.value() == 'yes':
return queryset.exclude(basket_token='')
elif self.value() == 'no':
return queryset.filter(basket_token='')
return queryset
class MissingCountry(SimpleListFilter):
"""Admin filter for profiles missing country information"""
title = 'Missing country'
parameter_name = 'missing_country'
def lookups(self, request, model_admin):
return (('both', 'Both geo_country/country'),
('geo_country', 'Only geo_country'),
('country', 'Only country'))
def queryset(self, request, queryset):
if self.value() == 'both':
return queryset.filter(country__isnull=True, geo_country__isnull=True)
elif self.value() == 'geo_country':
return queryset.filter(geo_country__isnull=True)
elif self.value() == 'country':
return queryset.filter(country__isnull=True)
return queryset
class MissingRegion(SimpleListFilter):
"""Admin filter for profiles missing region information"""
title = 'Missing region'
parameter_name = 'missing_region'
def lookups(self, request, model_admin):
return (('both', 'Both geo_region/region'),
('geo_region', 'Only geo_region'),
('region', 'Only region'))
def queryset(self, request, queryset):
if self.value() == 'both':
return queryset.filter(region__isnull=True, geo_region__isnull=True)
elif self.value() == 'geo_region':
return queryset.filter(geo_region__isnull=True)
elif self.value() == 'region':
return queryset.filter(region__isnull=True)
return queryset
class MissingCity(SimpleListFilter):
"""Admin filter for profiles missing city information"""
title = 'Missing city'
parameter_name = 'missing_city'
def lookups(self, request, model_admin):
return (('both', 'Both geo_city/city'),
('geo_city', 'Only geo_city'),
('city', 'Only city'))
def queryset(self, request, queryset):
if self.value() == 'both':
return queryset.filter(city__isnull=True, geo_city__isnull=True)
elif self.value() == 'geo_city':
return queryset.filter(geo_city__isnull=True)
elif self.value() == 'city':
return queryset.filter(city__isnull=True)
return queryset
class UsernameBlacklistAdmin(MozilliansAdminExportMixin, admin.ModelAdmin):
"""UsernameBlacklist Admin."""
save_on_top = True
search_fields = ['value']
list_filter = ['is_regex']
list_display = ['value', 'is_regex']
admin.site.register(UsernameBlacklist, UsernameBlacklistAdmin)
class MissingLanguagesFilter(SimpleListFilter):
title = 'Missing language'
parameter_name = 'missing_language'
def lookups(self, request, model_admin):
return (('False', 'No'),
('True', 'Yes'))
def queryset(self, request, queryset):
current_language_codes = set(Language.objects.values_list('code', flat=True))
babel_language_codes = set([code for code, lang in get_languages_for_locale('en')])
if self.value() == 'True':
missing_language_codes = current_language_codes.difference(babel_language_codes)
return queryset.filter(code__in=list(missing_language_codes))
if self.value() == 'False':
return queryset.filter(code__in=list(babel_language_codes))
return queryset
class LanguageResource(ModelResource):
"""django-import-export Language resource."""
email = Field(attribute='userprofile__user__email')
class Meta:
model = Language
class LanguageAdmin(MozilliansAdminExportMixin, admin.ModelAdmin):
resource_class = LanguageResource
search_fields = ['userprofile__full_name', 'userprofile__user__email', 'code']
list_display = ['get_code', 'get_language_name', 'userprofile']
list_filter = ['code', MissingLanguagesFilter]
def get_code(self, obj):
return obj.code
get_code.short_description = 'Code'
def get_language_name(self, obj):
return obj.get_code_display()
get_language_name.short_description = 'Name'
admin.site.register(Language, LanguageAdmin)
class SkillInline(admin.TabularInline):
model = Skill
extra = 1
class UserMembershipAutocompleteForm(BaseGroupMembershipAutocompleteForm):
class Meta:
widgets = {
'group': autocomplete.ModelSelect2(url='groups:group-autocomplete'),
}
class GroupMembershipInline(admin.TabularInline):
model = GroupMembership
extra = 1
form = UserMembershipAutocompleteForm
class LanguageInline(admin.TabularInline):
model = Language
extra = 1
class ExternalAccountInline(admin.TabularInline):
model = ExternalAccount
extra = 1
def queryset(self, request):
"""Exclude alternate emails from external accounts"""
qs = super(ExternalAccountInline, self).queryset(request)
return qs.exclude(type=ExternalAccount.TYPE_EMAIL)
class AlternateEmailForm(forms.ModelForm):
def save(self, *args, **kwargs):
self.instance.type = ExternalAccount.TYPE_EMAIL
return super(AlternateEmailForm, self).save(*args, **kwargs)
class Meta:
model = ExternalAccount
exclude = ['type']
class AlternateEmailInline(admin.TabularInline):
form = AlternateEmailForm
model = ExternalAccount
extra = 1
verbose_name = 'Alternate Email'
verbose_name_plural = 'Alternate Emails'
def queryset(self, request):
"""Limit queryset to alternate emails."""
qs = super(AlternateEmailInline, self).queryset(request)
return qs.filter(type=ExternalAccount.TYPE_EMAIL)
class UserProfileAdminForm(forms.ModelForm):
username = forms.CharField()
email = forms.CharField()
last_login = forms.DateTimeField(required=False)
date_joined = forms.DateTimeField(required=False)
def __init__(self, *args, **kwargs):
self.instance = kwargs.get('instance')
if self.instance:
self.base_fields['username'].initial = self.instance.user.username
self.base_fields['email'].initial = self.instance.user.email
super(UserProfileAdminForm, self).__init__(*args, **kwargs)
def clean_username(self):
username = self.cleaned_data['username']
if (User.objects.exclude(pk=self.instance.user.pk)
.filter(username=username).exists()):
raise ValidationError('Username already exists')
return username
def clean_email(self):
email = self.cleaned_data['email']
if (User.objects.exclude(pk=self.instance.user.pk)
.filter(email=email).exists()):
raise ValidationError('Email already exists')
return email
def save(self, *args, **kwargs):
if self.instance:
self.instance.user.username = self.cleaned_data.get('username')
self.instance.user.email = self.cleaned_data.get('email')
self.instance.user.save()
return super(UserProfileAdminForm, self).save(*args, **kwargs)
class Meta:
model = UserProfile
fields = '__all__'
class UserProfileResource(ModelResource):
"""django-import-export UserProfile Resource."""
username = Field(attribute='user__username')
email = Field(attribute='user__email')
country_name = Field(attribute='geo_country__name')
country_code = Field(attribute='geo_country__code')
region_name = Field(attribute='geo_region__name')
region_code = Field(attribute='geo_region__code')
city_name = Field(attribute='geo_city__name')
city_code = Field(attribute='geo_city__code')
class Meta:
model = UserProfile
class UserProfileAdmin(AdminImageMixin, MozilliansAdminExportMixin, admin.ModelAdmin):
resource_class = UserProfileResource
inlines = [LanguageInline, GroupMembershipInline, ExternalAccountInline,
AlternateEmailInline]
search_fields = ['full_name', 'user__email', 'user__username', 'ircname',
'geo_country__name', 'geo_region__name', 'geo_city__name']
readonly_fields = ['date_vouched', 'vouched_by', 'user', 'date_joined', 'last_login',
'is_vouched', 'can_vouch', 'referral_source']
form = UserProfileAdminForm
list_filter = ['is_vouched', 'can_vouch', DateJoinedFilter,
LastLoginFilter, LegacyVouchFilter, SuperUserFilter,
CompleteProfileFilter, PublicProfileFilter, AlternateEmailFilter,
NDAMemberFilter, BasketTokenFilter, MissingCountry, MissingRegion,
MissingCity, 'externalaccount__type']
save_on_top = True
list_display = ['full_name', 'email', 'username', 'geo_country', 'is_vouched', 'can_vouch',
'number_of_vouchees', 'date_joined']
list_display_links = ['full_name', 'email', 'username']
actions = [subscribe_to_basket_action(settings.BASKET_VOUCHED_NEWSLETTER),
unsubscribe_from_basket_action(settings.BASKET_VOUCHED_NEWSLETTER),
subscribe_to_basket_action(settings.BASKET_NDA_NEWSLETTER),
unsubscribe_from_basket_action(settings.BASKET_NDA_NEWSLETTER),
update_vouch_flags_action()]
fieldsets = (
('Account', {
'fields': ('full_name', 'full_name_local', 'username', 'email', 'photo',)
}),
(None, {
'fields': ('title', 'bio', 'tshirt', 'ircname', 'date_mozillian',)
}),
('Important dates', {
'fields': ('date_joined', 'last_login')
}),
('Vouch Info', {
'fields': ('date_vouched', 'is_vouched', 'can_vouch')
}),
('Location', {
'fields': ('geo_country', 'geo_region', 'geo_city',
'lng', 'lat', 'timezone')
}),
('Privacy Settings', {
'fields': ('privacy_photo', 'privacy_full_name', 'privacy_full_name_local',
'privacy_ircname', 'privacy_email', 'privacy_bio',
'privacy_geo_city', 'privacy_geo_region', 'privacy_geo_country',
'privacy_groups', 'privacy_skills', 'privacy_languages',
'privacy_date_mozillian', 'privacy_timezone',
'privacy_tshirt', 'privacy_title'),
'classes': ('collapse',)
}),
('Basket', {
'fields': ('basket_token',),
'classes': ('collapse',)
}),
('Skills', {
'fields': ('skills',)
}),
)
def get_queryset(self, request):
qs = super(UserProfileAdmin, self).get_queryset(request)
qs = qs.annotate(vouches_made_count=Count('vouches_made'))
return qs
def email(self, obj):
return obj.user.email
email.admin_order_field = 'user__email'
def username(self, obj):
return obj.user.username
username.admin_order_field = 'user__username'
def is_vouched(self, obj):
return obj.userprofile.is_vouched
is_vouched.boolean = True
is_vouched.admin_order_field = 'is_vouched'
def vouched_by(self, obj):
voucher = obj.vouched_by
if voucher:
voucher_url = reverse('admin:auth_user_change', args=[voucher.id])
return '<a href="%s">%s</a>' % (voucher_url, voucher)
vouched_by.admin_order_field = 'vouched_by'
vouched_by.allow_tags = True
def number_of_vouchees(self, obj):
"""Return the number of vouchees for obj."""
return obj.vouches_made_count
number_of_vouchees.admin_order_field = 'vouches_made_count'
def last_login(self, obj):
return obj.user.last_login
def date_joined(self, obj):
return obj.user.date_joined
def get_actions(self, request):
"""Return bulk actions for UserAdmin without bulk delete."""
actions = super(UserProfileAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
def index_profiles(self, request):
# Rebuild the search index.
index_all_profiles()
messages.success(request, 'Rebuilding index.')
return HttpResponseRedirect(reverse('admin:users_userprofile_changelist'))
def check_celery(self, request):
try:
investigator = check_celery.delay()
except socket_error as e:
messages.error(request, 'Cannot connect to broker: %s' % e)
return HttpResponseRedirect(reverse('admin:users_userprofile_changelist'))
try:
investigator.get(timeout=5)
except investigator.TimeoutError as e:
messages.error(request, 'Worker timeout: %s' % e)
except Exception as e:
raise e
else:
messages.success(request, 'Celery is OK')
return HttpResponseRedirect(reverse('admin:users_userprofile_changelist'))
def get_urls(self):
"""Return custom and UserProfileAdmin urls."""
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
urls = super(UserProfileAdmin, self).get_urls()
urls += [
url(r'index_profiles', wrap(self.index_profiles), name='users_index_profiles'),
url(r'check_celery', wrap(self.check_celery), name='users_check_celery')
]
return urls
admin.site.register(UserProfile, UserProfileAdmin)
class NullProfileFilter(SimpleListFilter):
"""Admin filter for null profiles."""
title = 'has user profile'
parameter_name = 'has_user_profile'
def lookups(self, request, model_admin):
return (('False', 'No'),
('True', 'Yes'))
def queryset(self, request, queryset):
if not self.value():
return queryset
value = self.value() != 'True'
return queryset.filter(userprofile__isnull=value)
class UserAdmin(UserAdmin):
list_filter = [NullProfileFilter]
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
class GroupAdmin(MozilliansAdminExportMixin, GroupAdmin):
pass
admin.site.register(Group, GroupAdmin)
class VouchAutocompleteForm(forms.ModelForm):
class Meta:
model = Vouch
fields = '__all__'
widgets = {
'vouchee': autocomplete.ModelSelect2(url='users:vouchee-autocomplete'),
'voucher': autocomplete.ModelSelect2(url='users:voucher-autocomplete')
}
class VouchAdmin(admin.ModelAdmin):
save_on_top = True
search_fields = ['voucher__user__username', 'voucher__full_name',
'vouchee__user__username', 'vouchee__full_name',
'voucher__user__email', 'vouchee__user__email']
list_display = ['vouchee', 'voucher', 'date', 'autovouch']
list_filter = ['autovouch']
form = VouchAutocompleteForm
admin.site.register(Vouch, VouchAdmin)
class AbuseReportAutocompleteForm(forms.ModelForm):
class Meta:
model = AbuseReport
fields = '__all__'
widgets = {
'profile': autocomplete.ModelSelect2(url='users:vouchee-autocomplete'),
'reporter': autocomplete.ModelSelect2(url='users:vouchee-autocomplete'),
}
class AbuseReportAdmin(admin.ModelAdmin):
form = AbuseReportAutocompleteForm
list_display = ['profile', 'reporter', 'type', 'created', 'updated']
list_filter = ['type', 'is_akismet']
admin.site.register(AbuseReport, AbuseReportAdmin)
| |
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import re
import time
from datetime import datetime, timedelta
import platform
from ..core.fileoperations import _marker
from cement.utils.misc import minimal_logger
from cement.utils.shell import exec_cmd
from botocore.compat import six
from ebcli.operations import buildspecops
from ..core import fileoperations, io
from ..containers import dockerrun
from ..lib import aws, ec2, elasticbeanstalk, heuristics, s3, utils, codecommit
from ..lib.aws import InvalidParameterValueError
from ..objects.exceptions import *
from ..objects.solutionstack import SolutionStack
from ..objects.sourcecontrol import SourceControl
from ..resources.strings import strings, responses, prompts
LOG = minimal_logger(__name__)
def wait_for_success_events(request_id, timeout_in_minutes=None,
sleep_time=5, stream_events=True, can_abort=False,
streamer=None, app_name=None, env_name=None, version_label=None):
if timeout_in_minutes == 0:
return
if timeout_in_minutes is None:
timeout_in_minutes = 10
start = datetime.now()
timediff = timedelta(seconds=timeout_in_minutes * 60)
# default to now, will update if request_id is provided
last_time = start
if streamer is None:
streamer = io.get_event_streamer()
if can_abort:
streamer.prompt += strings['events.abortmessage']
events = []
# If the even stream is terminated before we finish streaming application version events we will not
# be able to continue the command so we must warn the user it is not safe to quit.
safe_to_quit = True
if version_label is not None and request_id is None:
safe_to_quit = False
try:
# Get first event in order to get start time
while not events:
events = elasticbeanstalk.get_new_events(
None, None, request_id, last_event_time=None, version_label=version_label
)
if len(events) > 0:
event = events[-1]
app_name = event.app_name
env_name = event.environment_name
if stream_events:
streamer.stream_event(get_event_string(event), safe_to_quit=safe_to_quit)
# Test event message for success string
if _is_success_string(event.message):
return
last_time = event.event_date
else:
time.sleep(sleep_time)
# Get remaining events without request id
while (datetime.now() - start) < timediff:
time.sleep(sleep_time)
events = elasticbeanstalk.get_new_events(
app_name, env_name, None, last_event_time=last_time
)
for event in reversed(events):
if stream_events:
streamer.stream_event(get_event_string(event), safe_to_quit=safe_to_quit)
# We dont need to update last_time if we are not printing.
# This can solve timing issues
last_time = event.event_date
# Test event message for success string
if _is_success_string(event.message):
return
finally:
streamer.end_stream()
# We have timed out
raise TimeoutError('Timed out while waiting for command to Complete. The timeout can be set using the --timeout option.')
def wait_for_multiple_success_events(request_ids, timeout_in_minutes=None,
sleep_time=5, stream_events=True,
can_abort=False):
if timeout_in_minutes == 0:
return
if timeout_in_minutes is None:
timeout_in_minutes = 10
start = datetime.now()
timediff = timedelta(seconds=timeout_in_minutes * 60)
last_times = []
events_matrix = []
app_names = []
env_names = []
successes = []
for i in range(len(request_ids)):
# Like indices of last_times and events_matrix correspond
# to the same environment
last_times.append(None)
app_names.append(None)
env_names.append(None)
events_matrix.append([])
successes.append(False)
streamer = io.get_event_streamer()
if can_abort:
streamer.prompt += strings['events.abortmessage']
try:
# Get first events from all requests for start times
for index in range(len(request_ids)):
while not events_matrix[index]:
events_matrix[index] = elasticbeanstalk.get_new_events(
None, None, request_ids[index], last_event_time=None
)
if len(events_matrix[index]) > 0:
event = events_matrix[index][-1]
app_name = event.app_name
env_name = event.environment_name
app_names[index] = app_name
env_names[index] = env_name
if stream_events:
streamer.stream_event(get_env_event_string(event))
if _is_success_string(event.message):
successes[index] = True
last_times[index] = event.event_date
else:
time.sleep(sleep_time)
# Poll for events from all environments
while (datetime.now() - start) < timediff:
# Check for success from all environments
if all(successes):
return
for index in range(len(env_names)):
if successes[index]:
continue
time.sleep(sleep_time)
events_matrix[index] = elasticbeanstalk.get_new_events(
app_names[index], env_names[index], None,
last_event_time=last_times[index]
)
for event in reversed(events_matrix[index]):
if stream_events:
streamer.stream_event(get_env_event_string(event))
last_times[index] = event.event_date
if _is_success_string(event.message):
successes[index] = True
finally:
streamer.end_stream()
raise TimeoutError('Timed out while waiting for commands to Complete')
def wait_for_compose_events(request_id, app_name, grouped_envs, timeout_in_minutes=None,
sleep_time=5, stream_events=True,
can_abort=False):
if timeout_in_minutes == 0:
return
if timeout_in_minutes is None:
timeout_in_minutes = 15
start = datetime.now()
timediff = timedelta(seconds=timeout_in_minutes * 60)
last_times = []
events_matrix = []
successes = []
last_time_compose = datetime.utcnow()
compose_events = []
for i in range(len(grouped_envs)):
# Like indices of last_times and events_matrix correspond
# to the same environment
last_times.append(datetime.utcnow())
events_matrix.append([])
successes.append(False)
streamer = io.get_event_streamer()
if can_abort:
streamer.prompt += strings['events.abortmessage']
try:
# Poll for events from all environments
while (datetime.now() - start) < timediff:
# Check for success from all environments
if all(successes):
return
# Poll for ComposeEnvironments events
compose_events = elasticbeanstalk.get_new_events(app_name=app_name,
env_name=None,
request_id=request_id,
last_event_time=last_time_compose)
for event in reversed(compose_events):
if stream_events:
streamer.stream_event(get_compose_event_string(event))
last_time_compose = event.event_date
for index in range(len(grouped_envs)):
if successes[index]:
continue
time.sleep(sleep_time)
events_matrix[index] = elasticbeanstalk.get_new_events(
app_name, grouped_envs[index], None,
last_event_time=last_times[index]
)
for event in reversed(events_matrix[index]):
if stream_events:
streamer.stream_event(get_env_event_string(event))
last_times[index] = event.event_date
if _is_success_string(event.message):
successes[index] = True
finally:
streamer.end_stream()
raise TimeoutError('Timed out while waiting for commands to Complete')
def _is_success_string(message):
if message == responses['event.greenmessage']:
return True
if message.startswith(responses['event.launchsuccess']):
return True
if message == responses['event.redmessage']:
raise ServiceError(message)
if message.startswith(responses['event.launchbad']):
raise ServiceError(message)
if message.startswith(responses['event.updatebad']):
raise ServiceError(message)
if message == responses['event.failedlaunch']:
raise ServiceError(message)
if message == responses['event.faileddeploy']:
raise ServiceError(message)
if message == responses['event.failedupdate']:
raise ServiceError(message)
if message == responses['logs.pulled']:
return True
if message.startswith(responses['logs.fail']):
raise ServiceError(message)
if message == responses['env.terminated']:
return True
if message == responses['env.updatesuccess']:
return True
if message == responses['env.configsuccess']:
return True
if message == responses['app.deletesuccess']:
return True
if responses['logs.successtail'] in message:
return True
if responses['logs.successbundle'] in message:
return True
if message.startswith(responses['swap.success']):
return True
if message.startswith(responses['create.ecsdockerrun1']):
raise NotSupportedError(prompts['create.dockerrunupgrade'])
if message == responses['event.updatefailed']:
raise ServiceError(message)
if message.startswith(responses['appversion.finished']) and message.endswith('FAILED.'):
raise ServiceError(message)
if message.startswith(responses['appversion.finished']) and message.endswith('PROCESSED.'):
return True
return False
def get_event_string(event, long_format=False):
message = event.message
severity = event.severity
date = event.event_date
if long_format:
return u'{0} {1} {2}'.format(
date.strftime("%Y-%m-%d %H:%M:%S").ljust(22),
severity.ljust(7),
message)
else:
return u'{0}: {1}'.format(severity, message)
def get_compose_event_string(event, long_format=False):
app_name = event.application_name
message = event.message
severity = event.severity
date = event.event_date
if long_format:
return u'{0} - {1} {2} {3}'.format(
app_name,
date.strftime("%Y-%m-%d %H:%M:%S").ljust(22),
severity.ljust(7),
message
)
else:
return u'{0} - {1}: {2}'.format(app_name, severity, message)
def get_env_event_string(event, long_format=False):
environment = event.environment_name
message = event.message
severity = event.severity
date = event.event_date
if long_format:
return u'{0} - {1} {2} {3}'.format(
environment.rjust(40),
date.strftime("%Y-%m-%d %H:%M:%S").ljust(22),
severity.ljust(7),
message)
else:
return u'{0} - {1}: {2}'.format(environment.rjust(40), severity, message)
def get_all_env_names():
envs = elasticbeanstalk.get_all_environments()
return [e.name for e in envs]
def get_env_names(app_name):
envs = elasticbeanstalk.get_app_environments(app_name)
return [e.name for e in envs]
def get_app_version_labels(app_name):
app_versions = elasticbeanstalk.get_application_versions(app_name)['ApplicationVersions']
return [v['VersionLabel'] for v in app_versions]
def get_app_version_s3_location(app_name, version_label):
# Check if the application version already exists. If so get the S3 key to fetch.
s3_key = None
s3_bucket = None
app_versions = elasticbeanstalk.get_application_versions(app_name, tuple(version_label,))['ApplicationVersions']
app_version = {}
for v in app_versions:
if v['VersionLabel'] == version_label:
app_version = v
break
if app_version:
s3_bucket = app_version['SourceBundle']['S3Bucket']
s3_key = app_version['SourceBundle']['S3Key']
io.log_info("Application Version '{0}' exists. Source from S3: {1}/{2}.".format(version_label, s3_bucket, s3_key))
return s3_bucket, s3_key
def prompt_for_solution_stack(module_name=None):
solution_stacks = elasticbeanstalk.get_available_solution_stacks()
# get list of platforms
platforms = []
for stack in solution_stacks:
if stack.platform not in platforms:
platforms.append(stack.platform)
cwd = os.getcwd()
# First check to see if we know what language the project is in
try:
fileoperations._traverse_to_project_root()
platform = heuristics.find_language_type()
if platform == 'Docker':
# Check to see if dockerrun is version one or two
dockerrun_file = dockerrun.get_dockerrun(
os.path.join(os.getcwd(), 'Dockerrun.aws.json'))
if dockerrun_file:
if dockerrun_file.get('AWSEBDockerrunVersion') in (1, '1'):
platform = 'Docker'
else:
platform = 'Multi-container Docker'
finally:
os.chdir(cwd)
if platform is not None:
io.echo()
io.echo(prompts['platform.validate'].replace('{platform}', platform))
correct = io.get_boolean_response()
if not platform or not correct:
# ask for platform
io.echo()
io.echo(prompts['platform.prompt']
if not module_name
else prompts['platform.prompt.withmodule'].replace('{module_name}',
module_name))
platform = utils.prompt_for_item_in_list(platforms)
# filter
solution_stacks = [x for x in solution_stacks if x.platform == platform]
#get Versions
versions = []
for stack in solution_stacks:
if stack.version not in versions:
versions.append(stack.version)
#now choose a version (if applicable)
if len(versions) > 1:
io.echo()
io.echo(prompts['sstack.version'])
version = utils.prompt_for_item_in_list(versions)
else:
version = versions[0]
return get_latest_solution_stack(version, stack_list=solution_stacks)
def get_latest_solution_stack(platform_version, stack_list=None):
if stack_list:
solution_stacks = stack_list
else:
solution_stacks = elasticbeanstalk.\
get_available_solution_stacks()
#filter
solution_stacks = [x for x in solution_stacks
if x.version == platform_version]
#Lastly choose a server type
servers = []
for stack in solution_stacks:
if stack.server not in servers:
servers.append(stack.server)
# Default to latest version of server
# We are assuming latest is always first in list.
if len(servers) < 1:
raise NotFoundError(strings['sstacks.notaversion'].
replace('{version}', platform_version))
server = servers[0]
#filter
solution_stacks = [x for x in solution_stacks if x.server == server]
#should have 1 and only have 1 result
assert len(solution_stacks) == 1, 'Filtered Solution Stack list ' \
'contains multiple results'
return solution_stacks[0]
def create_app(app_name, default_env=None):
# Attempt to create app
try:
io.log_info('Creating application: ' + app_name)
elasticbeanstalk.create_application(
app_name,
strings['app.description']
)
set_environment_for_current_branch(None)
set_group_suffix_for_current_branch(None)
io.echo('Application', app_name,
'has been created.')
return None, None
except AlreadyExistsError:
io.log_info('Application already exists.')
return pull_down_app_info(app_name, default_env=default_env)
def pull_down_app_info(app_name, default_env=None):
# App exists, set up default environment
envs = elasticbeanstalk.get_app_environments(app_name)
if len(envs) == 0:
# no envs, set None as default to override
set_environment_for_current_branch(None)
return None, None
elif len(envs) == 1:
# Set only env as default
env = envs[0]
io.log_info('Setting only environment "' +
env.name + '" as default')
elif len(envs) > 1:
if default_env:
if default_env == '/ni':
env = envs[0]
else:
env = next((env for env in envs if env.name == default_env),
None)
if not default_env or env is None:
# Prompt for default
io.echo(prompts['init.selectdefaultenv'])
env = utils.prompt_for_item_in_list(envs)
set_environment_for_current_branch(env.name)
io.log_info('Pulling down defaults from environment ' + env.name)
# Get keyname
keyname = elasticbeanstalk.get_specific_configuration_for_env(
app_name, env.name, 'aws:autoscaling:launchconfiguration',
'EC2KeyName'
)
if keyname is None:
keyname = -1
return env.platform.name, keyname
def open_webpage_in_browser(url, ssl=False):
io.log_info('Opening webpage with default browser.')
if not url.startswith('http'):
if ssl:
url = 'https://' + url
else:
url = 'http://' + url
LOG.debug('url={}'.format(url))
if utils.is_ssh() or platform.system().startswith('Win'):
# Prefered way for ssh or windows
# Windows cant do a fork so we have to do inline
LOG.debug('Running webbrowser inline.')
import webbrowser
webbrowser.open_new_tab(url)
else:
# this is the prefered way to open a web browser on *nix.
# It squashes all output which can be typical on *nix.
LOG.debug('Running webbrowser as subprocess.')
from subprocess import Popen, PIPE
p = Popen(['{python} -m webbrowser \'{url}\''
.format(python=sys.executable, url=url)],
stderr=PIPE, stdout=PIPE, shell=True)
'''
We need to fork the process for various reasons
1. Calling p.communicate waits for the thread. Some browsers
(if opening a new window) dont return to the thread until
the browser closes. We dont want the terminal to hang in
this case
2. If we dont call p.communicate, there is a race condition. If
the main process terminates before the browser call is made,
the call never gets made and the browser doesn't open.
Therefor the solution is to fork, then wait for the child
in the backround.
'''
pid = os.fork()
if pid == 0: # Is child
p.communicate()
# Else exit
def get_application_names():
app_list = elasticbeanstalk.get_all_applications()
return [n.name for n in app_list]
def print_env_details(env, health=True):
region = aws.get_region_name()
io.echo('Environment details for:', env.name)
io.echo(' Application name:', env.app_name)
io.echo(' Region:', region)
io.echo(' Deployed Version:', env.version_label)
io.echo(' Environment ID:', env.id)
io.echo(' Platform:', env.platform)
io.echo(' Tier:', env.tier)
io.echo(' CNAME:', env.cname)
io.echo(' Updated:', env.date_updated)
print_env_links(env)
if health:
io.echo(' Status:', env.status)
io.echo(' Health:', env.health)
def print_env_links(env):
if env.environment_links is not None and len(env.environment_links) > 0:
links = {}
linked_envs = []
# Process information returned in EnvironmentLinks
for link in env.environment_links:
link_data = dict(link_name=link['LinkName'], env_name=link['EnvironmentName'])
links[link_data['env_name']] = link_data
linked_envs.append(link_data['env_name'])
# Call DescribeEnvironments for linked environments
linked_env_descriptions = elasticbeanstalk.get_environments(linked_envs)
for linked_env in linked_env_descriptions:
if linked_env.tier.name == 'WebServer':
links[linked_env.name]['value'] = linked_env.cname
elif linked_env.tier.name == 'Worker':
links[linked_env.name]['value'] = get_worker_sqs_url(linked_env.name)
time.sleep(.5)
io.echo(' Environment Links:')
for link in links.values():
io.echo(' {}:'.format(link['env_name']))
io.echo(' {}: {}'.format(link['link_name'],
link['value']))
def get_worker_sqs_url(env_name):
resources = elasticbeanstalk.get_environment_resources(env_name)['EnvironmentResources']
queues = resources['Queues']
worker_queue = None
for queue in queues:
if queue['Name'] == 'WorkerQueue':
worker_queue = queue
if worker_queue is None:
raise WorkerQueueNotFound
return worker_queue['URL']
def create_envvars_list(var_list, as_option_settings=True):
namespace = 'aws:elasticbeanstalk:application:environment'
options = dict()
options_to_remove = set()
for pair in var_list:
## validate
if not re.match('^[\w\\_.:/+@-][^=]*=.*$', pair):
raise InvalidOptionsError(strings['setenv.invalidformat'])
try:
option_name, value = pair.split('=', 1)
except ValueError:
raise InvalidOptionsError(strings['setenv.invalidformat'])
if value:
options[option_name] = value
else:
options_to_remove.add(option_name)
if as_option_settings:
option_dict = options
options = list()
remove_list = options_to_remove
options_to_remove = list()
for k, v in six.iteritems(option_dict):
options.append(
dict(Namespace=namespace,
OptionName=k,
Value=v))
for k in remove_list:
options_to_remove.append(
dict(Namespace=namespace,
OptionName=k))
return options, options_to_remove
def create_dummy_app_version(app_name):
version_label = 'Sample Application'
return _create_application_version(app_name, version_label, None,
None, None, warning=False)
def create_app_version(app_name, process=False, label=None, message=None, staged=False, build_config=None):
cwd = os.getcwd()
fileoperations._traverse_to_project_root()
try:
if heuristics.directory_is_empty():
io.log_warning(strings['appversion.none'])
return None
finally:
os.chdir(cwd)
source_control = SourceControl.get_source_control()
if source_control.untracked_changes_exist():
io.log_warning(strings['sc.unstagedchanges'])
#get version_label
if label:
version_label = label
else:
version_label = source_control.get_version_label()
if staged:
# Make a unique version label
timestamp = datetime.now().strftime("%y%m%d_%H%M%S")
version_label = version_label + '-stage-' + timestamp
# get description
if message:
description = message
else:
description = source_control.get_message()
if len(description) > 200:
description = description[:195] + '...'
# Check for zip or artifact deploy
artifact = fileoperations.get_config_setting('deploy', 'artifact')
if artifact:
file_name, file_extension = os.path.splitext(artifact)
file_name = version_label + file_extension
file_path = artifact
s3_key = None
s3_bucket = None
else:
# Check if the app version already exists
s3_bucket, s3_key = get_app_version_s3_location(app_name, version_label)
# Create zip file if the application version doesn't exist
if s3_bucket is None and s3_key is None:
file_name, file_path = _zip_up_project(
version_label, source_control, staged=staged)
else:
file_name = None
file_path = None
# Get s3 location
bucket = elasticbeanstalk.get_storage_location() if s3_bucket is None else s3_bucket
key = app_name + '/' + file_name if s3_key is None else s3_key
# Upload to S3 if needed
try:
s3.get_object_info(bucket, key)
io.log_info('S3 Object already exists. Skipping upload.')
except NotFoundError:
# If we got the bucket/key from the app version describe call and it doesn't exist then
# the application version must have been deleted out-of-band and we should throw an exception
if file_name is None and file_path is None:
raise NotFoundError('Application Version does not exist in the S3 bucket.'
' Try uploading the Application Version again.')
# Otherwise attempt to upload the local application version
io.log_info('Uploading archive to s3 location: ' + key)
s3.upload_application_version(bucket, key, file_path)
fileoperations.delete_app_versions()
io.log_info('Creating AppVersion ' + version_label)
return _create_application_version(app_name, version_label, description,
bucket, key, process, build_config=build_config)
def create_codecommit_app_version(app_name, process=False, label=None, message=None, build_config=None):
cwd = os.getcwd()
fileoperations._traverse_to_project_root()
source_control = SourceControl.get_source_control()
if source_control.get_current_commit() is None:
io.log_warning('There are no commits for the current branch, attempting to create an empty commit and launching with the sample application')
source_control.create_initial_commit()
if source_control.untracked_changes_exist():
io.log_warning(strings['sc.unstagedchanges'])
#get version_label
if label:
version_label = label
else:
version_label = source_control.get_version_label()
# get description
if message:
description = message
else:
description = source_control.get_message()
if len(description) > 200:
description = description[:195] + '...'
# Push code with git
try:
source_control.push_codecommit_code()
except CommandError as e:
io.echo("Could not push code to the CodeCommit repository:")
raise e
# Get additional arguments for deploying code commit and poll
# for the commit to propagate to code commit.
from . import gitops
repository = gitops.get_default_repository()
commit_id = source_control.get_current_commit()
if repository is None or commit_id is None:
raise ServiceError("Could not find repository or commit id to create an application version")
# Deploy Application version with freshly pushed git commit
io.log_info('Creating AppVersion ' + version_label)
return _create_application_version(app_name, version_label, description,
None, None, process, repository=repository, commit_id=commit_id,
build_config=build_config)
def create_app_version_from_source(app_name, source, process=False, label=None, message=None, build_config=None):
cwd = os.getcwd()
fileoperations._traverse_to_project_root()
try:
if heuristics.directory_is_empty():
io.log_warning(strings['appversion.none'])
return None
finally:
os.chdir(cwd)
source_control = SourceControl.get_source_control()
if source_control.untracked_changes_exist():
io.log_warning(strings['sc.unstagedchanges'])
# get version_label
if label:
version_label = label
else:
version_label = source_control.get_version_label()
# get description
if message:
description = message
else:
description = source_control.get_message()
if len(description) > 200:
description = description[:195] + '...'
# Parse the source and attempt to push via code commit
source_location, repository, branch = utils.parse_source(source)
if source_location == "codecommit":
try:
result = codecommit.get_branch(repository, branch)
except ServiceError as ex:
io.log_error("Could not get branch '{0}' for the repository '{1}' because of this error: {2}".format(branch,
repository,
ex.code))
raise ex
commit_id = result['branch']['commitId']
if repository is None or commit_id is None:
raise ServiceError("Could not find repository or commit id to create an application version")
else:
LOG.debug("Source location '{0}' is not supported".format(source_location))
raise InvalidOptionsError("This command does not support the given source location: {0}".format(source_location))
# Deploy Application version with freshly pushed git commit
io.log_info('Creating AppVersion ' + version_label)
return _create_application_version(app_name, version_label, description,
None, None, process, repository=repository, commit_id=commit_id,
build_config=build_config)
def _create_application_version(app_name, version_label, description,
bucket, key, process=False, warning=True,
repository=None, commit_id=None,
build_config=None):
"""
A wrapper around elasticbeanstalk.create_application_version that
handles certain error cases:
* application doesnt exist
* version already exists
* validates BuildSpec files for CodeBuild
"""
if build_config is not None:
buildspecops.validate_build_config(build_config)
while True:
try:
elasticbeanstalk.create_application_version(
app_name, version_label, description, bucket, key, process, repository, commit_id, build_config
)
return version_label
except InvalidParameterValueError as e:
if e.message.startswith('Application Version ') and \
e.message.endswith(' already exists.'):
# we must be deploying with an existing app version
if warning:
io.log_warning('Deploying a previously deployed commit.')
return version_label
elif e.message == responses['app.notexists'].replace(
'{app-name}', '\'' + app_name + '\''):
# App doesnt exist, must be a new region.
## Lets create the app in the region
create_app(app_name)
else:
raise
def _zip_up_project(version_label, source_control, staged=False):
# Create zip file
file_name = version_label + '.zip'
file_path = fileoperations.get_zip_location(file_name)
# Check to see if file already exists from previous attempt
if not fileoperations.file_exists(file_path):
# If it doesn't already exist, create it
io.echo(strings['appversion.create'].replace('{version}',
version_label))
ignore_files = fileoperations.get_ebignore_list()
if ignore_files is None:
source_control.do_zip(file_path, staged)
else:
io.log_info('Found .ebignore, using system zip.')
fileoperations.zip_up_project(file_path, ignore_list=ignore_files)
return file_name, file_path
def update_environment(env_name, changes, nohang, remove=None,
template=None, timeout=None, template_body=None,
solution_stack_name=None):
try:
request_id = elasticbeanstalk.update_environment(
env_name, changes, remove=remove, template=template,
template_body=template_body,
solution_stack_name=solution_stack_name)
except InvalidStateError:
io.log_error(prompts['update.invalidstate'])
return
except InvalidSyntaxError as e:
io.log_error(prompts['update.invalidsyntax'] +
'\nError = ' + e.message)
return
if nohang:
return
io.echo('Printing Status:')
try:
wait_for_success_events(request_id, timeout_in_minutes=timeout,
can_abort=True)
except TimeoutError:
io.log_error(strings['timeout.error'])
# BRANCH-DEFAULTS FOR CONFIG FILE
def write_setting_to_current_branch(keyname, value):
source_control = SourceControl.get_source_control()
branch_name = source_control.get_current_branch()
fileoperations.write_config_setting(
'branch-defaults',
branch_name,
{keyname: value}
)
def set_environment_for_current_branch(value):
write_setting_to_current_branch('environment', value)
def set_group_suffix_for_current_branch(value):
write_setting_to_current_branch('group_suffix', value)
def get_current_branch_environment():
return get_setting_from_current_branch('environment')
def get_current_branch_group_suffix():
return get_setting_from_current_branch('group_suffix')
def get_default_keyname():
return get_config_setting_from_branch_or_default('default_ec2_keyname')
def get_default_profile(require_default=False):
try:
profile = get_config_setting_from_branch_or_default('profile')
if profile is None and require_default:
return "default"
return profile
except NotInitializedError:
return None
def get_default_region():
try:
return get_config_setting_from_branch_or_default('default_region')
except NotInitializedError:
return None
def get_default_solution_stack():
return get_config_setting_from_branch_or_default('default_platform')
def get_setting_from_current_branch(keyname):
try:
source_control = SourceControl.get_source_control()
branch_name = source_control.get_current_branch()
except CommandError as ex:
LOG.debug("Git is not installed returning None for setting: %s".format(keyname))
return None
branch_dict = fileoperations.get_config_setting('branch-defaults', branch_name)
if branch_dict is None:
return None
else:
try:
return branch_dict[keyname]
except KeyError:
return None
def get_config_setting_from_branch_or_default(key_name, default=_marker):
setting = get_setting_from_current_branch(key_name)
if setting is not None:
return setting
else:
return fileoperations.get_config_setting('global', key_name, default=default)
def get_solution_stack(solution_string):
#If string is explicit, do not check
if re.match(r'^\d\dbit [\w\s]+[0-9.]* v[0-9.]+ running .*$',
solution_string):
return SolutionStack(solution_string)
solution_string = solution_string.lower()
solution_stacks = elasticbeanstalk.get_available_solution_stacks()
# check for exact string
stacks = [x for x in solution_stacks if x.name.lower() == solution_string]
if len(stacks) == 1:
return stacks[0]
#should only have 1 result
if len(stacks) > 1:
LOG.error('Platform list contains '
'multiple results')
return None
# No exact match, check for versions
string = solution_string.replace('-', ' ')
# put dash back in preconfigured types
string = re.sub('preconfigured\\s+docker', 'preconfigured - docker', string)
# put dash back in multi-container types
string = re.sub('multi\\s+container', 'multi-container', string)
string = re.sub(r'([a-z])([0-9])', '\\1 \\2', string)
stacks = [x for x in solution_stacks if x.version.lower() == string]
if len(stacks) > 0:
# Give the latest version. Latest is always first in list
return stacks[0]
# No exact match, check for platforms
stacks = [x for x in solution_stacks if x.platform.lower() == string]
if len(stacks) > 0:
# Give the latest version. Latest is always first in list
return stacks[0]
raise NotFoundError(prompts['sstack.invalidkey'].replace('{string}',
solution_string))
def is_cname_available(cname):
return elasticbeanstalk.is_cname_available(cname)
def get_instance_ids(app_name, env_name):
env = elasticbeanstalk.get_environment_resources(env_name)
instances = [i['Id'] for i in env['EnvironmentResources']['Instances']]
return instances
def upload_keypair_if_needed(keyname):
keys = [k['KeyName'] for k in ec2.get_key_pairs()]
if keyname in keys:
return
key_material = _get_public_ssh_key(keyname)
try:
ec2.import_key_pair(keyname, key_material)
except AlreadyExistsError:
return
region = aws.get_region_name()
io.log_warning(strings['ssh.uploaded'].replace('{keyname}', keyname)
.replace('{region}', region))
def _get_public_ssh_key(keypair_name):
key_file = fileoperations.get_ssh_folder() + keypair_name
if os.path.exists(key_file):
file_name = key_file
elif os.path.exists(key_file + '.pem'):
file_name = key_file + '.pem'
else:
raise NotSupportedError(strings['ssh.filenotfound'].replace(
'{key-name}', keypair_name))
try:
stdout, stderr, returncode = exec_cmd(['ssh-keygen', '-y', '-f',
file_name])
if returncode != 0:
raise CommandError('An error occurred while trying '
'to get ssh public key')
key_material = stdout
return key_material
except OSError:
CommandError(strings['ssh.notpresent'])
def wait_for_processed_app_versions(app_name, version_labels, timeout=5):
versions_to_check = list(version_labels)
processed = {}
failed = {}
io.echo('--- Waiting for Application Versions to be pre-processed ---')
for version in version_labels:
processed[version] = False
failed[version] = False
start_time = datetime.utcnow()
while not all([(processed[version] or failed[version]) for version in versions_to_check]):
if datetime.utcnow() - start_time >= timedelta(minutes=timeout):
io.log_error(strings['appversion.processtimeout'])
return False
io.LOG.debug('Retrieving app versions.')
app_versions = elasticbeanstalk.get_application_versions(app_name, versions_to_check)["ApplicationVersions"]
for v in app_versions:
if v['Status'] == 'PROCESSED':
processed[v['VersionLabel']] = True
io.echo('Finished processing application version {}'
.format(v['VersionLabel']))
versions_to_check.remove(v['VersionLabel'])
elif v['Status'] == 'FAILED':
failed[version] = True
io.log_error(strings['appversion.processfailed'].replace('{app_version}',
v['VersionLabel']))
versions_to_check.remove(v['VersionLabel'])
if all(processed.values()):
return True
time.sleep(4)
if any(failed.values()):
io.log_error(strings['appversion.cannotdeploy'])
return False
return True
| |
# Copyright 2011 Gilt Groupe, INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
cobbler functionality
"""
import os
import re
import sys
import types
import xmlrpclib
import subprocess
import mothership.validate
import mothership.network_mapper
from mothership.transkey import transkey
class CobblerAPI:
def __init__(self, cfg, site_id=None):
# transkey map for cobbler interface to mothership hardware
self.map_hardware = {
'power_address': 'power_switch',
# 'hw_tag' : 'hw_tag',
}
# transkey map for cobbler interface to mothership servers
self.map_server = {
'name' : 'hostname',
'profile' : 'cobbler_profile',
'virtual' : 'virtual',
'virt_cpus' : 'cores',
'virt_ram' : 'ram',
'virt_file_size': 'disk',
'virt_path' : 'storage',
# 'site_id' : 'site_id',
# 'realm' : 'realm',
# 'hw_tag' : 'hw_tag',
}
# transkey map for cobbler interface to mothership network
self.map_network = {
'mac_address' : 'mac',
'dhcp_tag' : 'vlan',
'subnet' : 'netmask',
'ip_address' : 'ip',
'bonding_opts' : 'bond_options',
'static_route' : 'static_route',
'interface' : 'interface',
'dns_name' : 'dns',
# 'server_id' : 'server_id',
# 'site_id' : 'site_id',
# 'realm' : 'realm',
# 'hw_tag' : 'hw_tag',
}
# transkey map for mothership to cobbler interface
self.map_interface = {
'mac' : 'macaddress',
'ip' : 'ipaddress',
'vlan' : 'dhcptag',
'netmask' : 'subnet',
'dns_name' : 'dnsname',
'bonding' : 'bonding',
'bond_master' : 'bondingmaster',
'bond_options' : 'bondingopts',
'static' : 'static',
'static_routes' : 'staticroutes',
}
# transkey map for mothership to cobbler system
self.map_system = {
'hostname' :'hostname',
'netboot_enabled' : False,
'power_type' : 'power_type',
'power_switch' : 'power_address',
'power_port' : 'power_id',
'name' : [ 'hostname', 'realm', 'site_id' ],
'cobbler_profile' : 'profile',
'virtual' : 'virtual',
'cores' : 'virt_cpus',
'ram' : 'virt_ram',
'disk' : 'virt_file_size',
'storage' : 'virt_path',
'kernel_options_post' : 'crashkernel=128M@16M',
# 'gateway' : 'gateway',
# '' :'comment',
# '' :'ctime',
# '' :'depth',
# '' :'kernel_options',
# '' :'kernel_options_post',
# '' :'ks_meta',
# '' :'mgmt_classes',
# '' :'mtime',
# '' :'name_servers',
# '' :'name_servers_search',
# '' :'owners',
# '' :'template_remote_kickstarts',
# '' :'uid',
}
# set up primary cobbler control
self.cfg = cfg
self.coblive = cfg.coblive
self.cobremote = None
self.cobtoken = None
if self.coblive:
try:
self.cobremote = xmlrpclib.Server('http://%s/cobbler_api' % cfg.cobconfig['host'])
self.cobtoken = self.cobremote.login(cfg.cobconfig['user'], cfg.cobconfig['pass'])
except:
sys.stderr.write('Cobbler configuration error. Check cobbler API server')
# set up remote cobbler control
self.subremote = None
self.subtoken = None
if site_id and 'sites' in cfg.cobconfig.keys():
for s in cfg.cobconfig['sites']:
if s['id'] == site_id:
try:
self.subremote = xmlrpclib.Server('http://%s/cobbler_api' % s['host'])
self.subtoken = self.subremote.login(s['user'], s['pass'])
except:
sys.stderr.write('Cobbler could not configure subsite. Check cobbler API server')
return
# retrieve os dict
self.osdict = self.get_os_dict(cfg)
def add_system(self, cfg, host_dict):
self._add_system(cfg, host_dict, self.cobremote, self.cobtoken)
if self.subremote is not None:
self._add_system(cfg, host_dict, self.subremote, self.subtoken)
return True
def _add_system(self, cfg, host_dict, remote, token):
hostname = '%s.%s.%s' % (host_dict['hostname'],
host_dict['realm'], host_dict['site_id'])
if not host_dict['cobbler_profile']:
print '%s has empty profile: system not provisioned for cobbler!' % hostname
return
if self.coblive:
if self._find_system_by_hostname(hostname, remote):
print 'System already exists: %s' % hostname
self._delete_system(hostname, remote, token)
print 'Adding system to %s: %s' % (remote._ServerProxy__host, hostname)
handle = remote.new_system(token)
else:
print 'API: remote.find_system({\'name\':hostname})'
print 'API: if found: remote.remove_system(hostname, token)'
print 'API: set new handle = remote.new_system(token)'
sysdict = transkey(host_dict, self.map_system)
# Adjust power management values after transkey
if sysdict['virtual']:
sysdict['power_user'] = 'root'
sysdict['power_pass'] = ''
else:
sysdict['power_user'] = cfg.puser
sysdict['power_pass'] = cfg.ppass
if self.coblive:
for k in sysdict.keys():
if '-xen-' not in sysdict['profile'] and 'virt_' in k:
continue # do not set cpu,ram,disk for baremetal
if sysdict[k] is not None:
if k == 'virt_ram':
# convert mothership GB to cobbler MB
sysdict[k] = int(sysdict[k]) * 1024
#print 'Modifying %s system values: %s' % (hostname, k)
remote.modify_system(handle, k, sysdict[k], token)
else:
from pprint import pprint
print 'API: sysdict = { \'key\':\'value\', }'
pprint(sysdict, indent=4)
print 'API: loop through all the cobbler \'system\' values:'
print 'API: remote.modify_system(handle, key, sysdict[key], token)'
print 'API: then loop through all interfaces...'
ifbond = False
for k in sorted(host_dict['interfaces'].keys()):
x = host_dict['interfaces'][k].copy()
# remove ip if world
if 'ip' in x and x['ip'] == '0.0.0.0':
del x['ip']
# if valid ip, set static to True
if 'ip' in x:
x['static'] = 'True'
# set static_routes
if x['static_route']:
x['static_routes'] = [ '0.0.0.0/0:%s' % x['static_route'] ]
# add appropriate dns_names for each interface
domain = None
if x['ip']:
domain = mothership.network_mapper.remap(cfg, 'dom', nic=x['interface'], siteid=x['site_id'], ip=x['ip'])[0]
if domain:
x['dns_name'] = '%s%s' % (host_dict['hostname'], domain)
# set the bond0 master interface
if not ifbond and x['bond_options']:
ifbond = 'bond0'
ifdict = x.copy()
ifdict['bonding'] = 'master'
del ifdict['mac']
# modify system interface bond0
if self.coblive:
#print 'Modifying %s network values: %s' % (hostname, ifbond)
remote.modify_system(handle, 'modify_interface',
self.append_value_to_keyname(cfg, transkey(ifdict,
self.map_interface, True), '-'+ifbond), token)
else:
print 'API: since bond_options are set:'
print 'API: remote.modify_system(handle, \'modify_interface\', ifbond-dict-map, token)'
# if xenserver, then add template for bond0
if 'xenserver' in sysdict['profile']:
if self.coblive:
#print 'Modifying %s templates values' % hostname
remote.modify_system(handle, 'template_files', cfg.cobconfig['xentemplates'], token)
else:
print 'API: if \'xenserver\' profile:'
print 'API: remote.modify_system(handle, \'template_files\', {template-path:alias}, token)'
# set the bond0 slave interfaces
if x['bond_options']:
x['bonding'] = 'slave'
x['bond_master'] = ifbond
x['ip'] = ifdict['ip']
del x['bond_options']
# modify system interface 'k'
if self.coblive and host_dict['interfaces'][k]['mac']:
try:
remote.modify_system(handle, 'modify_interface',
self.append_value_to_keyname(cfg, transkey(x,
self.map_interface, True), '-'+k), token)
except xmlrpclib.Fault, err:
print 'Aborting cobbler add, failed to modify %s %s' % (hostname, k)
print ' ' + str(err)
return False
else:
print 'API: remote.modify_system(handle, \'modify_interface\', %s-dict-map, token)' % k
# save all system changes
if self.coblive:
remote.save_system(handle, token)
else:
print 'API: remote.save_system(handle, token)'
def append_value_to_keyname(self, cfg, olddict, suffix):
# loop through all keys and append the suffix to the keyname
newdict = {}
for k in olddict.keys():
if olddict[k]:
newdict['%s%s' % (k,suffix)] = olddict[k]
return newdict
def delete_system(self, hostname):
self._delete_system(hostname, self.cobremote, self.cobtoken)
if self.subremote is not None:
self._delete_system(hostname, self.subremote, self.subtoken)
def _delete_system(self, hostname, remote, token):
if self.coblive:
if not self._find_system_by_hostname(hostname, remote):
print 'Skipping cobbler delete, system does not exist: %s' % hostname
return
print 'Deleting cobbler system from %s: %s' % (remote._ServerProxy__host, hostname)
remote.remove_system(hostname, token)
else:
print 'API: remote.remove_system(hostname, token)'
def extract_system_by_hostname(self, hostname):
return self._extract_system_by_hostname(self.cobremote, hostname)
def _extract_system_by_hostname(self, remote, hostname):
info = {}
print 'Extracting cobbler system: %s' % hostname
system = remote.get_system(hostname)
if '-xen-' in system['profile']:
system['virtual'] = True
else:
system['virtual'] = False
info['server'] = [ transkey(system, self.map_server) ]
info['hardware'] = [ transkey(system, self.map_hardware) ]
info['network'] = []
sysif = system['interfaces']
for k in sysif.keys():
if k.startswith('bond'): continue
sysif[k]['interface'] = k
if sysif[k]['static_routes']:
sysif[k]['static_route'] = sysif[k]['static_routes'][0].split(':')[1]
info['network'].append(transkey(system['interfaces'][k], self.map_network, True))
return info
def append_kickstart_info(self, info):
if self.subremote is not None:
return self._append_kickstart_info(info, self.subremote)
else:
return self._append_kickstart_info(info, self.cobremote)
def _append_kickstart_info(self, info, remote):
profile = remote.get_profile(info['server'][0]['cobbler_profile'])
while profile['parent'] != '':
profile = remote.get_profile(profile['parent'])
distro = remote.get_distro(profile['distro'])['name']
cobblerip = remote.get_settings()['server']
info['kick'] = {}
info['kick']['repo'] = 'http://%s/cblr/links/%s/' % (cobblerip, distro)
info['kick']['ks'] = 'http://%s/cblr/svc/op/ks/system' % cobblerip
return info
def _find_system_by_hostname(self, hostname, remote):
return remote.find_system({'name':hostname})
def get_os_dict(self, cfg):
osdict = { 'profile':{}, 'default':{} }
for profile in self.list_all_profiles(cfg):
if profile['comment'] != '':
osdict['profile'][profile['name']] = profile['comment']
if profile['dhcp_tag'] != 'default':
osdict['default'][profile['dhcp_tag']] = profile['name']
return osdict
def list_all_profiles(self, remote):
return self._list_all_profiles(self.cobremote)
def _list_all_profiles(self, remote):
return remote.get_profiles()
def list_all_systems(self, remote):
return self._list_all_systems(self.cobremote)
def _list_all_systems(self, remote):
return remote.get_systems()
def set_system_netboot(self, hostname, state=False):
if self.subremote is not None:
self._set_system_netboot(hostname, self.subremote, self.subtoken, state)
else:
self._set_system_netboot(hostname, self.cobremote, self.cobtoken, state)
def _set_system_netboot(self, hostname, remote, token, state=False):
if self.coblive:
if not self._find_system_by_hostname(hostname, remote):
print 'Skipping netboot setting, system does not exist: %s' % hostname
return
print 'Setting netboot "%s" for %s on %s' % (state, hostname,
remote._ServerProxy__host)
handle = remote.get_system_handle(hostname, token)
remote.modify_system(handle, 'netboot_enabled', state, token)
else:
print 'API: set handle = remote.get_system_handle(hostname, token)'
print 'API: remote.modify_system(handle, \'netboot_enabled\', state, token)'
def set_system_power(self, hostname, state='reboot', virtual=False):
if self.subremote is not None and not virtual:
self._set_system_power(hostname, self.subremote, self.subtoken, state)
else:
self._set_system_power(hostname, self.cobremote, self.cobtoken, state)
def _set_system_power(self, hostname, remote, token, state='reboot'):
if self.coblive:
if not self._find_system_by_hostname(hostname, remote):
print 'Skipping power setting, system does not exist: %s' % hostname
return
print 'Setting power "%s" for %s on %s' % (state, hostname,
remote._ServerProxy__host)
handle = remote.get_system_handle(hostname, token)
try:
remote.power_system(handle, state, token)
except xmlrpclib.Fault, err:
print 'ERROR occurred during cobbler power: %s' \
% str(err).replace('\n', ' ')
return
else:
print 'API: set handle = remote.get_system_handle(hostname, token)'
print 'API: remote.power_system(handle, state, token)'
def sync_cobbler(self, hostname):
class opts:
all = False
system = True
fqdn = mothership.validate.v_get_fqn(self.cfg, hostname)
domain = re.sub('^\w+\.', '', fqdn)
# for the rewrite: make this only happen if it's a new box
# if it's just a reinstall the DNS isn't changing.
mothership.dns.generate_dns_output(self.cfg, domain, opts)
self._sync_cobbler(self.cobremote, self.cobtoken)
if self.subremote is not None:
self._sync_cobbler(self.subremote, self.subtoken)
def _sync_cobbler(self, remote, token):
if self.coblive:
print 'Syncing cobbler configurations on %s' % remote._ServerProxy__host
try:
remote.sync(token)
except xmlrpclib.Fault, err:
print 'ERROR occurred during cobbler sync: %s' % str(err)
return
else:
print 'API: remote.sync(token)'
def abort_kick(self, name, host):
if self.subremote is not None:
self._abort_kick(name, host, self.subremote)
else:
self._abort_kick(name, host, self.cobremote)
def _abort_kick(self, name, host, remote):
# abort kick if host.realm.site_id not defined for current system
try:
if str(self._extract_system_by_hostname(remote,
host)).find(host) < 0:
print '!! IP Address for %s not defined' % host
print 'Please run: %s mod_vlan %s <vlan#>' % (name, host)
print 'or equivalent command before kickstarting'
return True
except:
return False
# confirm kick if host.realm.site_id responds to pings
pingcheck = os.popen("ping -q -c2 -t5 "+host,"r")
while 1:
line = pingcheck.readline()
if not line: break
match = re.findall(re.compile(r"(\d)( packets)? received"),line)
if match:
if int(match[0][0]) == 2:
print '%s is an existing host, responsive to pings' % host
ans = raw_input('If you still want to continue, type "cobbler_forcekick_%s": ' % host)
if ans != 'cobbler_forcekick_%s' % host:
print 'cobbler force kick of %s aborted.' % host
return True
return False
def clear_puppetca(self, host):
try:
# clear out the puppet sign CA for host
os.system('sudo /usr/sbin/puppetca --clean %s' % host)
print 'Removed puppet ssl for %s' % host
return True
except OSError:
print 'Failed to clear puppet ssl for %s (are you root?)' % host
return False
def _check_known_hosts(self, host, remote, user='~'):
print 'Checking known_hosts for %s' % host
system = remote.get_system(host)
host = system['power_address']
# check for ssh host key
hasKey = False
userknownhosts = os.path.expanduser('%s/.ssh/known_hosts' % user)
for file in [ '/etc/ssh/ssh_known_hosts', userknownhosts ]:
if os.path.exists(file):
for line in open(file):
if host in line:
hasKey = True
break
if not hasKey:
print '+=== Adding %s to known_hosts' % host
key = subprocess.Popen(['ssh-keyscan', host],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).\
communicate()[0]
f = open(userknownhosts, 'a')
f.write(key)
| |
#!/usr/bin/env python
# Copyright 2009 Jay Reding
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from xml.etree import ElementTree # for Python 2.5 users
except:
from elementtree import ElementTree
import os
import base64
import time
import string
import gtk
import gtk.glade
import gnome.ui
import gobject
import feedparser
import gdata
import atom
# Internal Libraries
import config
import editor
import firstrun
class FileHandler():
def __init__(self):
return None
def main(self):
return True
def saveFile(self, postStruct, parentWin):
# Our first task is to create an XML file with our post data
postAsXML = self.postStructToXML(postStruct)
# Now, we need to create our dialog for saving our file.
saveFileDialog = gtk.FileChooserDialog('Save File', parentWin, gtk.FILE_CHOOSER_ACTION_SAVE)
# We need to establish some file filters to help users narrow their choices
saveFilter = gtk.FileFilter()
saveFilter.add_pattern('*.blogtk')
saveFilter.set_name('BloGTK Saved Posts')
allFilter = gtk.FileFilter()
allFilter.add_pattern('*')
allFilter.set_name('All Files')
saveFileDialog.add_filter(saveFilter)
saveFileDialog.add_filter(allFilter)
# Set our defaults for the save dialog
saveFileDialog.set_do_overwrite_confirmation(True)
saveFileDialog.add_buttons(gtk.STOCK_SAVE, gtk.RESPONSE_ACCEPT, gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT)
saveFileDialog.set_current_name('.blogtk')
response = saveFileDialog.run()
if response == gtk.RESPONSE_ACCEPT:
filename = saveFileDialog.get_filename()
try:
saveFile = open(filename, "w")
saveFile.write(postAsXML)
saveFile.close()
except Exception, e:
saveFileDialog.destroy()
return False, str(e)
saveFileDialog.destroy()
return True, filename
else:
saveFileDialog.destroy()
return False, 'cancel'
def resaveFile(self, postStruct, filename):
postAsXML = self.postStructToXML(postStruct)
try:
saveFile = open(filename, "w")
saveFile.write(postAsXML)
saveFile.close()
except Exception, e:
return False, str(e)
return True, filename
def openFile(self, parentWin):
# Now, we need to create our dialog for loading our file.
openFileDialog = gtk.FileChooserDialog('Open File', parentWin, gtk.FILE_CHOOSER_ACTION_OPEN)
# We need to establish some file filters to help users narrow their choices
openFilter = gtk.FileFilter()
openFilter.add_pattern('*.blogtk')
openFilter.set_name('BloGTK Saved Posts')
allFilter = gtk.FileFilter()
allFilter.add_pattern('*')
allFilter.set_name('All Files')
openFileDialog.add_filter(openFilter)
openFileDialog.add_filter(allFilter)
# Set our defaults for the open dialog
openFileDialog.add_buttons(gtk.STOCK_OPEN, gtk.RESPONSE_ACCEPT, gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT)
response = openFileDialog.run()
if response == gtk.RESPONSE_ACCEPT:
filename = openFileDialog.get_filename()
try:
openFile = open(filename, "r")
postXML = openFile.read()
openFile.close()
try:
postStruct = self.postXMLToStruct(filename)
openFileDialog.destroy()
return True, filename, postStruct
except Exception, e:
print str(Exception), str(e)
openFileDialog.destroy()
return False, 'Not a valid BloGTK save file'
except Exception, e:
print str(Exception), str(e)
openFileDialog.destroy()
return False, str(e)
openFileDialog.destroy()
return True, filename
else:
openFileDialog.destroy()
return False, 'cancel'
def postXMLToStruct(self, filename):
#parser = feedparser.parse(postXML)
tree = ElementTree.parse(filename).getroot()
postStruct = []
entry = tree.find('entry')
title = entry.find('title').text
# When we pull the XML data, it contains extra whitespace that
# should be stripped
content = entry.findall('content')[0].text.lstrip('\n\t\t\t').rstrip('\n\n\t\t\t')
extended = entry.findall('content')[1].text.lstrip('\n\t\t\t').rstrip('\n\n\t\t\t')
keywords = ''
for index, keyword in enumerate(entry.findall('category')):
if index != (len(entry.findall('category')) -1):
keywords = keywords + keyword.attrib['term'] + ','
else:
keywords = keywords + keyword.attrib['term']
postStruct.append(title)
postStruct.append(content)
postStruct.append(extended)
postStruct.append(keywords)
return postStruct
def postStructToXML(self, postStruct):
timestamp = time.strftime( "%Y-%m-%dT%H:%M:%S", time.gmtime())
postAsXML = '<?xml version="1.0" encoding="utf-8" standalone="yes"?>\n\
<feed version="0.3" xml:lang="en-US">\n\
<title mode="escaped" type="text/html">BloGTK Generated Feed</title>\n\
<id>BloGTK2SaveFile</id>\n\
<modified>' + timestamp + '</modified>\n\
<generator url="http://blogtk.sourceforge.net/" version="2.0">BloGTK</generator>\n\
<entry>\n\
<link href="blogtk://" rel="service.edit" title="BloGTK Saved Post File" type="application/atom+xml"/>\n\
<author>\n\
<name>BloGTK Saved Listing</name>\n\
</author>\n\
<issued>' + timestamp + '</issued>\n\
<modified>' + timestamp + '</modified>\n\
<created>' + timestamp + '</created>\n'
try:
for tag in postStruct[3].split(','):
postAsXML = postAsXML + ' <category scheme="keyword" term="' + tag + '" />'
except:
pass
# Make entry text XML safe. Yes, this is a kludge - any better ideas
# appreciated.
no_amps = string.replace(postStruct[1], '&', '&')
no_lts = string.replace(no_amps, '<', '<')
entry_text = string.replace(no_lts, '>', '>')
no_amps2 = string.replace(postStruct[2], '&', '&')
no_lts2 = string.replace(no_amps2, '<', '<')
extended_text = string.replace(no_lts2, '>', '>')
postAsXML = postAsXML + '<id>1</id>\n\
<title mode="escaped" type="text/html">' + postStruct[0] + '</title>\n\
<summary type="text/plain" mode="escaped"> </summary>\n\
<content type="application/xhtml+xml" xml:space="preserve">\n\
' + entry_text + '\n\
</content>\n\
<content type="application/xhtml+xml" xml:space="preserve">\n\
' + extended_text + '\n\
</content>\n\
</entry>\n\
</feed>'
return postAsXML
| |
"""
============================
``ctypes`` Utility Functions
============================
See Also
---------
load_library : Load a C library.
ndpointer : Array restype/argtype with verification.
as_ctypes : Create a ctypes array from an ndarray.
as_array : Create an ndarray from a ctypes array.
References
----------
.. [1] "SciPy Cookbook: ctypes", http://www.scipy.org/Cookbook/Ctypes
Examples
--------
Load the C library:
>>> _lib = np.ctypeslib.load_library('libmystuff', '.') #doctest: +SKIP
Our result type, an ndarray that must be of type double, be 1-dimensional
and is C-contiguous in memory:
>>> array_1d_double = np.ctypeslib.ndpointer(
... dtype=np.double,
... ndim=1, flags='CONTIGUOUS') #doctest: +SKIP
Our C-function typically takes an array and updates its values
in-place. For example::
void foo_func(double* x, int length)
{
int i;
for (i = 0; i < length; i++) {
x[i] = i*i;
}
}
We wrap it using:
>>> _lib.foo_func.restype = None #doctest: +SKIP
>>> _lib.foo_func.argtypes = [array_1d_double, c_int] #doctest: +SKIP
Then, we're ready to call ``foo_func``:
>>> out = np.empty(15, dtype=np.double)
>>> _lib.foo_func(out, len(out)) #doctest: +SKIP
"""
from __future__ import division, absolute_import, print_function
__all__ = ['load_library', 'ndpointer', 'test', 'ctypes_load_library',
'c_intp', 'as_ctypes', 'as_array']
import sys, os
from numpy import integer, ndarray, dtype as _dtype, deprecate, array
from numpy.core.multiarray import _flagdict, flagsobj
try:
import ctypes
except ImportError:
ctypes = None
if ctypes is None:
def _dummy(*args, **kwds):
"""
Dummy object that raises an ImportError if ctypes is not available.
Raises
------
ImportError
If ctypes is not available.
"""
raise ImportError("ctypes is not available.")
ctypes_load_library = _dummy
load_library = _dummy
as_ctypes = _dummy
as_array = _dummy
from numpy import intp as c_intp
_ndptr_base = object
else:
import numpy.core._internal as nic
c_intp = nic._getintp_ctype()
del nic
_ndptr_base = ctypes.c_void_p
# Adapted from Albert Strasheim
def load_library(libname, loader_path):
"""
It is possible to load a library using
>>> lib = ctypes.cdll[<full_path_name>]
But there are cross-platform considerations, such as library file extensions,
plus the fact Windows will just load the first library it finds with that name.
NumPy supplies the load_library function as a convenience.
Parameters
----------
libname : str
Name of the library, which can have 'lib' as a prefix,
but without an extension.
loader_path : str
Where the library can be found.
Returns
-------
ctypes.cdll[libpath] : library object
A ctypes library object
Raises
------
OSError
If there is no library with the expected extension, or the
library is defective and cannot be loaded.
"""
if ctypes.__version__ < '1.0.1':
import warnings
warnings.warn("All features of ctypes interface may not work " \
"with ctypes < 1.0.1", stacklevel=2)
ext = os.path.splitext(libname)[1]
if not ext:
# Try to load library with platform-specific name, otherwise
# default to libname.[so|pyd]. Sometimes, these files are built
# erroneously on non-linux platforms.
from numpy.distutils.misc_util import get_shared_lib_extension
so_ext = get_shared_lib_extension()
libname_ext = [libname + so_ext]
# mac, windows and linux >= py3.2 shared library and loadable
# module have different extensions so try both
so_ext2 = get_shared_lib_extension(is_python_ext=True)
if not so_ext2 == so_ext:
libname_ext.insert(0, libname + so_ext2)
else:
libname_ext = [libname]
loader_path = os.path.abspath(loader_path)
if not os.path.isdir(loader_path):
libdir = os.path.dirname(loader_path)
else:
libdir = loader_path
for ln in libname_ext:
libpath = os.path.join(libdir, ln)
if os.path.exists(libpath):
try:
return ctypes.cdll[libpath]
except OSError:
## defective lib file
raise
## if no successful return in the libname_ext loop:
raise OSError("no file with expected extension")
ctypes_load_library = deprecate(load_library, 'ctypes_load_library',
'load_library')
def _num_fromflags(flaglist):
num = 0
for val in flaglist:
num += _flagdict[val]
return num
_flagnames = ['C_CONTIGUOUS', 'F_CONTIGUOUS', 'ALIGNED', 'WRITEABLE',
'OWNDATA', 'UPDATEIFCOPY', 'WRITEBACKIFCOPY']
def _flags_fromnum(num):
res = []
for key in _flagnames:
value = _flagdict[key]
if (num & value):
res.append(key)
return res
class _ndptr(_ndptr_base):
def _check_retval_(self):
"""This method is called when this class is used as the .restype
attribute for a shared-library function. It constructs a numpy
array from a void pointer."""
return array(self)
@property
def __array_interface__(self):
return {'descr': self._dtype_.descr,
'__ref': self,
'strides': None,
'shape': self._shape_,
'version': 3,
'typestr': self._dtype_.descr[0][1],
'data': (self.value, False),
}
@classmethod
def from_param(cls, obj):
if not isinstance(obj, ndarray):
raise TypeError("argument must be an ndarray")
if cls._dtype_ is not None \
and obj.dtype != cls._dtype_:
raise TypeError("array must have data type %s" % cls._dtype_)
if cls._ndim_ is not None \
and obj.ndim != cls._ndim_:
raise TypeError("array must have %d dimension(s)" % cls._ndim_)
if cls._shape_ is not None \
and obj.shape != cls._shape_:
raise TypeError("array must have shape %s" % str(cls._shape_))
if cls._flags_ is not None \
and ((obj.flags.num & cls._flags_) != cls._flags_):
raise TypeError("array must have flags %s" %
_flags_fromnum(cls._flags_))
return obj.ctypes
# Factory for an array-checking class with from_param defined for
# use with ctypes argtypes mechanism
_pointer_type_cache = {}
def ndpointer(dtype=None, ndim=None, shape=None, flags=None):
"""
Array-checking restype/argtypes.
An ndpointer instance is used to describe an ndarray in restypes
and argtypes specifications. This approach is more flexible than
using, for example, ``POINTER(c_double)``, since several restrictions
can be specified, which are verified upon calling the ctypes function.
These include data type, number of dimensions, shape and flags. If a
given array does not satisfy the specified restrictions,
a ``TypeError`` is raised.
Parameters
----------
dtype : data-type, optional
Array data-type.
ndim : int, optional
Number of array dimensions.
shape : tuple of ints, optional
Array shape.
flags : str or tuple of str
Array flags; may be one or more of:
- C_CONTIGUOUS / C / CONTIGUOUS
- F_CONTIGUOUS / F / FORTRAN
- OWNDATA / O
- WRITEABLE / W
- ALIGNED / A
- WRITEBACKIFCOPY / X
- UPDATEIFCOPY / U
Returns
-------
klass : ndpointer type object
A type object, which is an ``_ndtpr`` instance containing
dtype, ndim, shape and flags information.
Raises
------
TypeError
If a given array does not satisfy the specified restrictions.
Examples
--------
>>> clib.somefunc.argtypes = [np.ctypeslib.ndpointer(dtype=np.float64,
... ndim=1,
... flags='C_CONTIGUOUS')]
... #doctest: +SKIP
>>> clib.somefunc(np.array([1, 2, 3], dtype=np.float64))
... #doctest: +SKIP
"""
if dtype is not None:
dtype = _dtype(dtype)
num = None
if flags is not None:
if isinstance(flags, str):
flags = flags.split(',')
elif isinstance(flags, (int, integer)):
num = flags
flags = _flags_fromnum(num)
elif isinstance(flags, flagsobj):
num = flags.num
flags = _flags_fromnum(num)
if num is None:
try:
flags = [x.strip().upper() for x in flags]
except Exception:
raise TypeError("invalid flags specification")
num = _num_fromflags(flags)
try:
return _pointer_type_cache[(dtype, ndim, shape, num)]
except KeyError:
pass
if dtype is None:
name = 'any'
elif dtype.names:
name = str(id(dtype))
else:
name = dtype.str
if ndim is not None:
name += "_%dd" % ndim
if shape is not None:
try:
strshape = [str(x) for x in shape]
except TypeError:
strshape = [str(shape)]
shape = (shape,)
shape = tuple(shape)
name += "_"+"x".join(strshape)
if flags is not None:
name += "_"+"_".join(flags)
else:
flags = []
klass = type("ndpointer_%s"%name, (_ndptr,),
{"_dtype_": dtype,
"_shape_" : shape,
"_ndim_" : ndim,
"_flags_" : num})
_pointer_type_cache[(dtype, shape, ndim, num)] = klass
return klass
if ctypes is not None:
ct = ctypes
################################################################
# simple types
# maps the numpy typecodes like '<f8' to simple ctypes types like
# c_double. Filled in by prep_simple.
_typecodes = {}
def prep_simple(simple_type, dtype):
"""Given a ctypes simple type, construct and attach an
__array_interface__ property to it if it does not yet have one.
"""
try: simple_type.__array_interface__
except AttributeError: pass
else: return
typestr = _dtype(dtype).str
_typecodes[typestr] = simple_type
def __array_interface__(self):
return {'descr': [('', typestr)],
'__ref': self,
'strides': None,
'shape': (),
'version': 3,
'typestr': typestr,
'data': (ct.addressof(self), False),
}
simple_type.__array_interface__ = property(__array_interface__)
simple_types = [
((ct.c_byte, ct.c_short, ct.c_int, ct.c_long, ct.c_longlong), "i"),
((ct.c_ubyte, ct.c_ushort, ct.c_uint, ct.c_ulong, ct.c_ulonglong), "u"),
((ct.c_float, ct.c_double), "f"),
]
# Prep that numerical ctypes types:
for types, code in simple_types:
for tp in types:
prep_simple(tp, "%c%d" % (code, ct.sizeof(tp)))
################################################################
# array types
_ARRAY_TYPE = type(ct.c_int * 1)
def prep_array(array_type):
"""Given a ctypes array type, construct and attach an
__array_interface__ property to it if it does not yet have one.
"""
try: array_type.__array_interface__
except AttributeError: pass
else: return
shape = []
ob = array_type
while type(ob) is _ARRAY_TYPE:
shape.append(ob._length_)
ob = ob._type_
shape = tuple(shape)
ai = ob().__array_interface__
descr = ai['descr']
typestr = ai['typestr']
def __array_interface__(self):
return {'descr': descr,
'__ref': self,
'strides': None,
'shape': shape,
'version': 3,
'typestr': typestr,
'data': (ct.addressof(self), False),
}
array_type.__array_interface__ = property(__array_interface__)
def prep_pointer(pointer_obj, shape):
"""Given a ctypes pointer object, construct and
attach an __array_interface__ property to it if it does not
yet have one.
"""
try: pointer_obj.__array_interface__
except AttributeError: pass
else: return
contents = pointer_obj.contents
dtype = _dtype(type(contents))
inter = {'version': 3,
'typestr': dtype.str,
'data': (ct.addressof(contents), False),
'shape': shape}
pointer_obj.__array_interface__ = inter
################################################################
# public functions
def as_array(obj, shape=None):
"""Create a numpy array from a ctypes array or a ctypes POINTER.
The numpy array shares the memory with the ctypes object.
The size parameter must be given if converting from a ctypes POINTER.
The size parameter is ignored if converting from a ctypes array
"""
tp = type(obj)
try: tp.__array_interface__
except AttributeError:
if hasattr(obj, 'contents'):
prep_pointer(obj, shape)
else:
prep_array(tp)
return array(obj, copy=False)
def as_ctypes(obj):
"""Create and return a ctypes object from a numpy array. Actually
anything that exposes the __array_interface__ is accepted."""
ai = obj.__array_interface__
if ai["strides"]:
raise TypeError("strided arrays not supported")
if ai["version"] != 3:
raise TypeError("only __array_interface__ version 3 supported")
addr, readonly = ai["data"]
if readonly:
raise TypeError("readonly arrays unsupported")
tp = _typecodes[ai["typestr"]]
for dim in ai["shape"][::-1]:
tp = tp * dim
result = tp.from_address(addr)
result.__keep = ai
return result
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Main entry point into the assignment service."""
import abc
import six
from keystone import clean
from keystone.common import cache
from keystone.common import dependency
from keystone.common import manager
from keystone import config
from keystone import exception
from keystone import notifications
from keystone.openstack.common import log as logging
CONF = config.CONF
LOG = logging.getLogger(__name__)
SHOULD_CACHE = cache.should_cache_fn('assignment')
DEFAULT_DOMAIN = {'description':
(u'Owns users and tenants (i.e. projects)'
' available on Identity API v2.'),
'enabled': True,
'id': CONF.identity.default_domain_id,
'name': u'Default'}
@dependency.provider('assignment_api')
@dependency.requires('identity_api')
class Manager(manager.Manager):
"""Default pivot point for the Assignment backend.
See :mod:`keystone.common.manager.Manager` for more details on how this
dynamically calls the backend.
assignment.Manager() and identity.Manager() have a circular dependency.
The late import works around this. THe if block prevents creation of the
api object by both managers.
"""
def __init__(self):
assignment_driver = CONF.assignment.driver
if assignment_driver is None:
identity_driver = dependency.REGISTRY['identity_api'].driver
assignment_driver = identity_driver.default_assignment_driver()
super(Manager, self).__init__(assignment_driver)
@notifications.created('project')
def create_project(self, tenant_id, tenant_ref):
tenant = tenant_ref.copy()
tenant.setdefault('enabled', True)
tenant['enabled'] = clean.project_enabled(tenant['enabled'])
tenant.setdefault('description', '')
ret = self.driver.create_project(tenant_id, tenant_ref)
if SHOULD_CACHE(ret):
self.get_project.set(ret, self, tenant_id)
self.get_project_by_name.set(ret, self, ret['name'],
ret['domain_id'])
return ret
@notifications.updated('project')
def update_project(self, tenant_id, tenant_ref):
tenant = tenant_ref.copy()
if 'enabled' in tenant:
tenant['enabled'] = clean.project_enabled(tenant['enabled'])
ret = self.driver.update_project(tenant_id, tenant_ref)
self.get_project.invalidate(self, tenant_id)
self.get_project_by_name.invalidate(self, ret['name'],
ret['domain_id'])
return ret
@notifications.deleted('project')
def delete_project(self, tenant_id):
project = self.driver.get_project(tenant_id)
ret = self.driver.delete_project(tenant_id)
self.get_project.invalidate(self, tenant_id)
self.get_project_by_name.invalidate(self, project['name'],
project['domain_id'])
return ret
def get_roles_for_user_and_project(self, user_id, tenant_id):
"""Get the roles associated with a user within given project.
This includes roles directly assigned to the user on the
project, as well as those by virtue of group membership. If
the OS-INHERIT extension is enabled, then this will also
include roles inherited from the domain.
:returns: a list of role ids.
:raises: keystone.exception.UserNotFound,
keystone.exception.ProjectNotFound
"""
def _get_group_project_roles(user_id, project_ref):
role_list = []
group_refs = self.identity_api.list_groups_for_user(user_id)
for x in group_refs:
try:
metadata_ref = self._get_metadata(
group_id=x['id'], tenant_id=project_ref['id'])
role_list += self._roles_from_role_dicts(
metadata_ref.get('roles', {}), False)
except exception.MetadataNotFound:
# no group grant, skip
pass
if CONF.os_inherit.enabled:
# Now get any inherited group roles for the owning domain
try:
metadata_ref = self._get_metadata(
group_id=x['id'],
domain_id=project_ref['domain_id'])
role_list += self._roles_from_role_dicts(
metadata_ref.get('roles', {}), True)
except (exception.MetadataNotFound,
exception.NotImplemented):
pass
return role_list
def _get_user_project_roles(user_id, project_ref):
role_list = []
try:
metadata_ref = self._get_metadata(user_id=user_id,
tenant_id=project_ref['id'])
role_list = self._roles_from_role_dicts(
metadata_ref.get('roles', {}), False)
except exception.MetadataNotFound:
pass
if CONF.os_inherit.enabled:
# Now get any inherited roles for the owning domain
try:
metadata_ref = self._get_metadata(
user_id=user_id, domain_id=project_ref['domain_id'])
role_list += self._roles_from_role_dicts(
metadata_ref.get('roles', {}), True)
except (exception.MetadataNotFound, exception.NotImplemented):
pass
return role_list
project_ref = self.get_project(tenant_id)
user_role_list = _get_user_project_roles(user_id, project_ref)
group_role_list = _get_group_project_roles(user_id, project_ref)
# Use set() to process the list to remove any duplicates
return list(set(user_role_list + group_role_list))
def get_roles_for_user_and_domain(self, user_id, domain_id):
"""Get the roles associated with a user within given domain.
:returns: a list of role ids.
:raises: keystone.exception.UserNotFound,
keystone.exception.DomainNotFound
"""
def _get_group_domain_roles(user_id, domain_id):
role_list = []
group_refs = self.identity_api.list_groups_for_user(user_id)
for x in group_refs:
try:
metadata_ref = self._get_metadata(group_id=x['id'],
domain_id=domain_id)
role_list += self._roles_from_role_dicts(
metadata_ref.get('roles', {}), False)
except (exception.MetadataNotFound, exception.NotImplemented):
# MetadataNotFound implies no group grant, so skip.
# Ignore NotImplemented since not all backends support
# domains.
pass
return role_list
def _get_user_domain_roles(user_id, domain_id):
metadata_ref = {}
try:
metadata_ref = self._get_metadata(user_id=user_id,
domain_id=domain_id)
except (exception.MetadataNotFound, exception.NotImplemented):
# MetadataNotFound implies no user grants.
# Ignore NotImplemented since not all backends support
# domains
pass
return self._roles_from_role_dicts(
metadata_ref.get('roles', {}), False)
self.get_domain(domain_id)
user_role_list = _get_user_domain_roles(user_id, domain_id)
group_role_list = _get_group_domain_roles(user_id, domain_id)
# Use set() to process the list to remove any duplicates
return list(set(user_role_list + group_role_list))
def add_user_to_project(self, tenant_id, user_id):
"""Add user to a tenant by creating a default role relationship.
:raises: keystone.exception.ProjectNotFound,
keystone.exception.UserNotFound
"""
try:
self.driver.add_role_to_user_and_project(
user_id,
tenant_id,
config.CONF.member_role_id)
except exception.RoleNotFound:
LOG.info(_("Creating the default role %s "
"because it does not exist.") %
config.CONF.member_role_id)
role = {'id': CONF.member_role_id,
'name': CONF.member_role_name}
self.driver.create_role(config.CONF.member_role_id, role)
#now that default role exists, the add should succeed
self.driver.add_role_to_user_and_project(
user_id,
tenant_id,
config.CONF.member_role_id)
def remove_user_from_project(self, tenant_id, user_id):
"""Remove user from a tenant
:raises: keystone.exception.ProjectNotFound,
keystone.exception.UserNotFound
"""
roles = self.get_roles_for_user_and_project(user_id, tenant_id)
if not roles:
raise exception.NotFound(tenant_id)
for role_id in roles:
self.driver.remove_role_from_user_and_project(user_id, tenant_id,
role_id)
def list_projects_for_user(self, user_id):
# NOTE(henry-nash): In order to get a complete list of user projects,
# the driver will need to look at group assignments. To avoid cross
# calling between the assignment and identity driver we get the group
# list here and pass it in. The rest of the detailed logic of listing
# projects for a user is pushed down into the driver to enable
# optimization with the various backend technologies (SQL, LDAP etc.).
group_ids = [x['id'] for
x in self.identity_api.list_groups_for_user(user_id)]
return self.driver.list_projects_for_user(user_id, group_ids)
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.assignment.cache_time)
def get_domain(self, domain_id):
return self.driver.get_domain(domain_id)
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.assignment.cache_time)
def get_domain_by_name(self, domain_name):
return self.driver.get_domain_by_name(domain_name)
def create_domain(self, domain_id, domain):
ret = self.driver.create_domain(domain_id, domain)
if SHOULD_CACHE(ret):
self.get_domain.set(ret, self, domain_id)
self.get_domain_by_name.set(ret, self, ret['name'])
return ret
def update_domain(self, domain_id, domain):
ret = self.driver.update_domain(domain_id, domain)
self.get_domain.invalidate(self, domain_id)
self.get_domain_by_name.invalidate(self, ret['name'])
return ret
def delete_domain(self, domain_id):
domain = self.driver.get_domain(domain_id)
self.driver.delete_domain(domain_id)
self.get_domain.invalidate(self, domain_id)
self.get_domain_by_name.invalidate(self, domain['name'])
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.assignment.cache_time)
def get_project(self, project_id):
return self.driver.get_project(project_id)
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.assignment.cache_time)
def get_project_by_name(self, tenant_name, domain_id):
return self.driver.get_project_by_name(tenant_name, domain_id)
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.assignment.cache_time)
def get_role(self, role_id):
return self.driver.get_role(role_id)
@notifications.created('role')
def create_role(self, role_id, role):
ret = self.driver.create_role(role_id, role)
if SHOULD_CACHE(ret):
self.get_role.set(ret, self, role_id)
return ret
@notifications.updated('role')
def update_role(self, role_id, role):
ret = self.driver.update_role(role_id, role)
self.get_role.invalidate(self, role_id)
return ret
@notifications.deleted('role')
def delete_role(self, role_id):
self.driver.delete_role(role_id)
self.get_role.invalidate(self, role_id)
def list_role_assignments_for_role(self, role_id=None):
# NOTE(henry-nash): Currently the efficiency of the key driver
# implementation (SQL) of list_role_assignments is severely hampered by
# the existence of the multiple grant tables - hence there is little
# advantage in pushing the logic of this method down into the driver.
# Once the single assignment table is implemented, then this situation
# will be different, and this method should have its own driver
# implementation.
return [r for r in self.driver.list_role_assignments()
if r['role_id'] == role_id]
@six.add_metaclass(abc.ABCMeta)
class Driver(object):
def _role_to_dict(self, role_id, inherited):
role_dict = {'id': role_id}
if inherited:
role_dict['inherited_to'] = 'projects'
return role_dict
def _roles_from_role_dicts(self, dict_list, inherited):
role_list = []
for d in dict_list:
if ((not d.get('inherited_to') and not inherited) or
(d.get('inherited_to') == 'projects' and inherited)):
role_list.append(d['id'])
return role_list
def _add_role_to_role_dicts(self, role_id, inherited, dict_list,
allow_existing=True):
# There is a difference in error semantics when trying to
# assign a role that already exists between the coded v2 and v3
# API calls. v2 will error if the assignment already exists,
# while v3 is silent. Setting the 'allow_existing' parameter
# appropriately lets this call be used for both.
role_set = set([frozenset(r.items()) for r in dict_list])
key = frozenset(self._role_to_dict(role_id, inherited).items())
if not allow_existing and key in role_set:
raise KeyError
role_set.add(key)
return [dict(r) for r in role_set]
def _remove_role_from_role_dicts(self, role_id, inherited, dict_list):
role_set = set([frozenset(r.items()) for r in dict_list])
role_set.remove(frozenset(self._role_to_dict(role_id,
inherited).items()))
return [dict(r) for r in role_set]
@abc.abstractmethod
def get_project_by_name(self, tenant_name, domain_id):
"""Get a tenant by name.
:returns: tenant_ref
:raises: keystone.exception.ProjectNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_user_ids_for_project(self, tenant_id):
"""Lists all user IDs with a role assignment in the specified project.
:returns: a list of user_ids or an empty set.
:raises: keystone.exception.ProjectNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def add_role_to_user_and_project(self, user_id, tenant_id, role_id):
"""Add a role to a user within given tenant.
:raises: keystone.exception.UserNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def remove_role_from_user_and_project(self, user_id, tenant_id, role_id):
"""Remove a role from a user within given tenant.
:raises: keystone.exception.UserNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
# assignment/grant crud
@abc.abstractmethod
def create_grant(self, role_id, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
"""Creates a new assignment/grant.
If the assignment is to a domain, then optionally it may be
specified as inherited to owned projects (this requires
the OS-INHERIT extension to be enabled).
:raises: keystone.exception.UserNotFound,
keystone.exception.GroupNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.DomainNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_grants(self, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
"""Lists assignments/grants.
:raises: keystone.exception.UserNotFound,
keystone.exception.GroupNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.DomainNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_grant(self, role_id, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
"""Lists assignments/grants.
:raises: keystone.exception.UserNotFound,
keystone.exception.GroupNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.DomainNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def delete_grant(self, role_id, user_id=None, group_id=None,
domain_id=None, project_id=None,
inherited_to_projects=False):
"""Lists assignments/grants.
:raises: keystone.exception.UserNotFound,
keystone.exception.GroupNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.DomainNotFound,
keystone.exception.ProjectNotFound,
keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_role_assignments(self):
raise exception.NotImplemented()
# domain crud
@abc.abstractmethod
def create_domain(self, domain_id, domain):
"""Creates a new domain.
:raises: keystone.exception.Conflict
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_domains(self):
"""List all domains in the system.
:returns: a list of domain_refs or an empty list.
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_domain(self, domain_id):
"""Get a domain by ID.
:returns: domain_ref
:raises: keystone.exception.DomainNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_domain_by_name(self, domain_name):
"""Get a domain by name.
:returns: domain_ref
:raises: keystone.exception.DomainNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def update_domain(self, domain_id, domain):
"""Updates an existing domain.
:raises: keystone.exception.DomainNotFound,
keystone.exception.Conflict
"""
raise exception.NotImplemented()
@abc.abstractmethod
def delete_domain(self, domain_id):
"""Deletes an existing domain.
:raises: keystone.exception.DomainNotFound
"""
raise exception.NotImplemented()
# project crud
@abc.abstractmethod
def create_project(self, project_id, project):
"""Creates a new project.
:raises: keystone.exception.Conflict
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_projects(self, domain_id=None):
"""List all projects in the system.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_projects_for_user(self, user_id, group_ids):
"""List all projects associated with a given user.
:param user_id: the user in question
:param group_ids: the groups this user is a member of. This list is
built in the Manager, so that the driver itself
does not have to call across to identity.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_project(self, project_id):
"""Get a project by ID.
:returns: project_ref
:raises: keystone.exception.ProjectNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def update_project(self, project_id, project):
"""Updates an existing project.
:raises: keystone.exception.ProjectNotFound,
keystone.exception.Conflict
"""
raise exception.NotImplemented()
@abc.abstractmethod
def delete_project(self, project_id):
"""Deletes an existing project.
:raises: keystone.exception.ProjectNotFound
"""
raise exception.NotImplemented()
"""Interface description for an assignment driver."""
# role crud
@abc.abstractmethod
def create_role(self, role_id, role):
"""Creates a new role.
:raises: keystone.exception.Conflict
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_roles(self):
"""List all roles in the system.
:returns: a list of role_refs or an empty list.
"""
raise exception.NotImplemented()
@abc.abstractmethod
def get_role(self, role_id):
"""Get a role by ID.
:returns: role_ref
:raises: keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def update_role(self, role_id, role):
"""Updates an existing role.
:raises: keystone.exception.RoleNotFound,
keystone.exception.Conflict
"""
raise exception.NotImplemented()
@abc.abstractmethod
def delete_role(self, role_id):
"""Deletes an existing role.
:raises: keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
#TODO(ayoung): determine what else these two functions raise
@abc.abstractmethod
def delete_user(self, user_id):
"""Deletes all assignments for a user.
:raises: keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
@abc.abstractmethod
def delete_group(self, group_id):
"""Deletes all assignments for a group.
:raises: keystone.exception.RoleNotFound
"""
raise exception.NotImplemented()
#domain management functions for backends that only allow a single domain.
#currently, this is only LDAP, but might be used by PAM or other backends
#as well. This is used by both identity and assignment drivers.
def _set_default_domain(self, ref):
"""If the domain ID has not been set, set it to the default."""
if isinstance(ref, dict):
if 'domain_id' not in ref:
ref = ref.copy()
ref['domain_id'] = CONF.identity.default_domain_id
return ref
elif isinstance(ref, list):
return [self._set_default_domain(x) for x in ref]
else:
raise ValueError(_('Expected dict or list: %s') % type(ref))
def _validate_default_domain(self, ref):
"""Validate that either the default domain or nothing is specified.
Also removes the domain from the ref so that LDAP doesn't have to
persist the attribute.
"""
ref = ref.copy()
domain_id = ref.pop('domain_id', CONF.identity.default_domain_id)
self._validate_default_domain_id(domain_id)
return ref
def _validate_default_domain_id(self, domain_id):
"""Validate that the domain ID specified belongs to the default domain.
"""
if domain_id != CONF.identity.default_domain_id:
raise exception.DomainNotFound(domain_id=domain_id)
| |
"""Test report state."""
from unittest.mock import patch
from homeassistant import core
from homeassistant.components.alexa import state_report
from . import DEFAULT_CONFIG, TEST_URL
async def test_report_state(hass, aioclient_mock):
"""Test proactive state reports."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"binary_sensor.test_contact",
"off",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa"
assert call_json["event"]["header"]["name"] == "ChangeReport"
assert (
call_json["event"]["payload"]["change"]["properties"][0]["value"]
== "NOT_DETECTED"
)
assert call_json["event"]["endpoint"]["endpointId"] == "binary_sensor#test_contact"
async def test_report_state_instance(hass, aioclient_mock):
"""Test proactive state reports with instance."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"fan.test_fan",
"off",
{
"friendly_name": "Test fan",
"supported_features": 15,
"oscillating": False,
"preset_mode": None,
"preset_modes": ["auto", "smart"],
"percentage": None,
},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"fan.test_fan",
"on",
{
"friendly_name": "Test fan",
"supported_features": 15,
"oscillating": True,
"preset_mode": "smart",
"preset_modes": ["auto", "smart"],
"percentage": 90,
},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa"
assert call_json["event"]["header"]["name"] == "ChangeReport"
change_reports = call_json["event"]["payload"]["change"]["properties"]
checks = 0
for report in change_reports:
if report["name"] == "toggleState":
assert report["value"] == "ON"
assert report["instance"] == "fan.oscillating"
assert report["namespace"] == "Alexa.ToggleController"
checks += 1
if report["name"] == "mode":
assert report["value"] == "preset_mode.smart"
assert report["instance"] == "fan.preset_mode"
assert report["namespace"] == "Alexa.ModeController"
checks += 1
if report["name"] == "rangeValue":
assert report["value"] == 90
assert report["instance"] == "fan.percentage"
assert report["namespace"] == "Alexa.RangeController"
checks += 1
assert checks == 3
assert call_json["event"]["endpoint"]["endpointId"] == "fan#test_fan"
async def test_send_add_or_update_message(hass, aioclient_mock):
"""Test sending an AddOrUpdateReport message."""
aioclient_mock.post(TEST_URL, text="")
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_send_add_or_update_message(
hass, DEFAULT_CONFIG, ["binary_sensor.test_contact", "zwave.bla"]
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.Discovery"
assert call_json["event"]["header"]["name"] == "AddOrUpdateReport"
assert len(call_json["event"]["payload"]["endpoints"]) == 1
assert (
call_json["event"]["payload"]["endpoints"][0]["endpointId"]
== "binary_sensor#test_contact"
)
async def test_send_delete_message(hass, aioclient_mock):
"""Test sending an AddOrUpdateReport message."""
aioclient_mock.post(TEST_URL, json={"data": "is irrelevant"})
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await state_report.async_send_delete_message(
hass, DEFAULT_CONFIG, ["binary_sensor.test_contact", "zwave.bla"]
)
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.Discovery"
assert call_json["event"]["header"]["name"] == "DeleteReport"
assert len(call_json["event"]["payload"]["endpoints"]) == 1
assert (
call_json["event"]["payload"]["endpoints"][0]["endpointId"]
== "binary_sensor#test_contact"
)
async def test_doorbell_event(hass, aioclient_mock):
"""Test doorbell press reports."""
aioclient_mock.post(TEST_URL, text="", status=202)
hass.states.async_set(
"binary_sensor.test_doorbell",
"off",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
hass.states.async_set(
"binary_sensor.test_doorbell",
"on",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
# To trigger event listener
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
call = aioclient_mock.mock_calls
call_json = call[0][2]
assert call_json["event"]["header"]["namespace"] == "Alexa.DoorbellEventSource"
assert call_json["event"]["header"]["name"] == "DoorbellPress"
assert call_json["event"]["payload"]["cause"]["type"] == "PHYSICAL_INTERACTION"
assert call_json["event"]["endpoint"]["endpointId"] == "binary_sensor#test_doorbell"
hass.states.async_set(
"binary_sensor.test_doorbell",
"off",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
hass.states.async_set(
"binary_sensor.test_doorbell",
"on",
{"friendly_name": "Test Doorbell Sensor", "device_class": "occupancy"},
)
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 2
async def test_proactive_mode_filter_states(hass, aioclient_mock):
"""Test all the cases that filter states."""
aioclient_mock.post(TEST_URL, text="", status=202)
await state_report.async_enable_proactive_mode(hass, DEFAULT_CONFIG)
# First state should report
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
aioclient_mock.clear_requests()
# Second one shouldn't
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
assert len(aioclient_mock.mock_calls) == 0
# hass not running should not report
hass.states.async_set(
"binary_sensor.test_contact",
"off",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
with patch.object(hass, "state", core.CoreState.stopping):
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 0
# unsupported entity should not report
hass.states.async_set(
"binary_sensor.test_contact",
"on",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
with patch.dict(
"homeassistant.components.alexa.state_report.ENTITY_ADAPTERS", {}, clear=True
):
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 0
# Not exposed by config should not report
hass.states.async_set(
"binary_sensor.test_contact",
"off",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
with patch.object(DEFAULT_CONFIG, "should_expose", return_value=False):
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 0
# Removing an entity
hass.states.async_remove("binary_sensor.test_contact")
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 0
# If serializes to same properties, it should not report
aioclient_mock.post(TEST_URL, text="", status=202)
with patch(
"homeassistant.components.alexa.entities.AlexaEntity.serialize_properties",
return_value=[{"same": "info"}],
):
hass.states.async_set(
"binary_sensor.same_serialize",
"off",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await hass.async_block_till_done()
hass.states.async_set(
"binary_sensor.same_serialize",
"off",
{"friendly_name": "Test Contact Sensor", "device_class": "door"},
)
await hass.async_block_till_done()
assert len(aioclient_mock.mock_calls) == 1
| |
from games_puzzles_algorithms.puzzles.sliding_tile_puzzle import SlidingTilePuzzle
from games_puzzles_algorithms.puzzles.solvable_sliding_tile_puzzle import SolvableSlidingTilePuzzle
import unittest
import random
class TestSlidingTilePuzzle(unittest.TestCase):
def test_init(self):
size = 3
puzzle = SlidingTilePuzzle(size)
self.assertEqual(puzzle.size1, size)
self.assertEqual(puzzle.size2, size)
self.assertEqual(puzzle.puzzle.size(), (size, size))
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
self.assertLessEqual(puzzle.num_correct_tiles, size * size)
self.assertGreaterEqual(puzzle.num_correct_tiles, 0)
def test_seed_init(self):
size = 3
seed = 1
a = SlidingTilePuzzle(size, seed)
b = SlidingTilePuzzle(size, seed)
self.assertEqual(a.size1, b.size1)
self.assertEqual(a.size2, b.size2)
for i in range(size):
for j in range(size):
self.assertEqual(a.puzzle[(i, j)], b.puzzle[(i, j)])
self.assertEqual(a.blank_index, b.blank_index)
self.assertEqual(a.num_correct_tiles, b.num_correct_tiles)
def test_init_rectangle(self):
size1 = 2
size2 = 3
puzzle = SlidingTilePuzzle(size1=size1, size2=size2)
self.assertEqual(puzzle.size1, size1)
self.assertEqual(puzzle.size2, size2)
self.assertEqual(puzzle.puzzle.size(), (size1, size2))
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
self.assertLessEqual(puzzle.num_correct_tiles, size1 * size2)
self.assertGreaterEqual(puzzle.num_correct_tiles, 0)
def test_is_solved_false(self):
size = 2
seed = 10
puzzle = SlidingTilePuzzle(size, seed)
self.assertFalse(puzzle.is_solved())
def test_is_solved_true(self):
size = 2
seed = 10
puzzle = SlidingTilePuzzle(size, seed)
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["down"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["right"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["up"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["left"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["down"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["right"])
self.assertTrue(puzzle.is_solved())
def test_correct_num(self):
size = 3
puzzle = SlidingTilePuzzle(size)
number = 0
for i in range(size):
for j in range(size):
self.assertEqual(puzzle.correct_num((i, j)), number)
number += 1
def test_correct_num_rectangle(self):
size1 = 3
size2 = 2
puzzle = SlidingTilePuzzle(size1=size1, size2=size2)
number = 0
for i in range(size1):
for j in range(size2):
self.assertEqual(puzzle.correct_num((i, j)), number)
number += 1
def test_correct_tile(self):
size = 3
puzzle = SlidingTilePuzzle(size)
number = 0
for i in range(size):
for j in range(size):
self.assertEqual(puzzle.correct_tile(number), (i, j))
number += 1
def test_correct_tile_rectangle(self):
size1 = 2
size2 = 3
puzzle = SlidingTilePuzzle(size1=size1, size2=size2)
number = 0
for i in range(size1):
for j in range(size2):
self.assertEqual(puzzle.correct_tile(number), (i, j))
number += 1
def test_apply_move_up(self):
size = 2
seed = 5
puzzle = SlidingTilePuzzle(size, seed)
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS['up'])
self.assertEqual(puzzle.puzzle[(0, 0)], 3)
self.assertEqual(puzzle.puzzle[(0, 1)], 1)
self.assertEqual(puzzle.puzzle[(1, 0)], 0)
self.assertEqual(puzzle.puzzle[(1, 1)], 2)
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
def test_apply_move_down(self):
size = 2
seed = 10
puzzle = SlidingTilePuzzle(size, seed)
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS['down'])
self.assertEqual(puzzle.puzzle[(0, 0)], 3)
self.assertEqual(puzzle.puzzle[(0, 1)], 0)
self.assertEqual(puzzle.puzzle[(1, 0)], 1)
self.assertEqual(puzzle.puzzle[(1, 1)], 2)
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
def test_apply_move_left(self):
size = 2
seed = 5
puzzle = SlidingTilePuzzle(size, seed)
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS['left'])
self.assertEqual(puzzle.puzzle[(0, 0)], 1)
self.assertEqual(puzzle.puzzle[(0, 1)], 0)
self.assertEqual(puzzle.puzzle[(1, 0)], 3)
self.assertEqual(puzzle.puzzle[(1, 1)], 2)
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
def test_apply_move_right(self):
size = 2
seed = 10
puzzle = SlidingTilePuzzle(size, seed)
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS['right'])
self.assertEqual(puzzle.puzzle[(0, 0)], 3)
self.assertEqual(puzzle.puzzle[(0, 1)], 2)
self.assertEqual(puzzle.puzzle[(1, 0)], 0)
self.assertEqual(puzzle.puzzle[(1, 1)], 1)
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
def test_valid_moves_some(self):
size = 2
seed = 1
puzzle = SlidingTilePuzzle(size, seed)
moves = puzzle.valid_moves()
moves.sort()
expected_moves = [SlidingTilePuzzle.DIRECTIONS['up'],
SlidingTilePuzzle.DIRECTIONS['right']]
expected_moves.sort()
self.assertEqual(moves, expected_moves)
def test_valid_moves_all(self):
size = 4
seed = 2
puzzle = SlidingTilePuzzle(size, seed)
moves = puzzle.valid_moves()
moves.sort()
expected_moves = [SlidingTilePuzzle.DIRECTIONS['up'],
SlidingTilePuzzle.DIRECTIONS['left'],
SlidingTilePuzzle.DIRECTIONS['down'],
SlidingTilePuzzle.DIRECTIONS['right']]
expected_moves.sort()
self.assertEqual(moves, expected_moves)
def test_str_moves(self):
size = 4
seed = 2
puzzle = SlidingTilePuzzle(size, seed)
moves = puzzle.valid_moves()
string_moves = puzzle.str_moves(moves)
string_moves.sort()
expected_moves = ['up', 'down', 'left', 'right']
expected_moves.sort()
self.assertEqual(string_moves, expected_moves)
def test_copy(self):
size = 3
a = SlidingTilePuzzle(size)
b = a.copy()
self.assertEqual(a.size1, b.size1)
self.assertEqual(a.size2, b.size2)
for i in range(size):
for j in range(size):
self.assertEqual(a.puzzle[(i, j)], b.puzzle[(i, j)])
self.assertEqual(a.blank_index, b.blank_index)
self.assertEqual(a.num_correct_tiles, b.num_correct_tiles)
def test_value(self):
size = 2
seed = 1
puzzle = SlidingTilePuzzle(size, seed)
puzzle_value = puzzle.value()
expected_value = (3, 0, 2, 1)
self.assertEqual(puzzle_value, expected_value)
def test_equals_true(self):
size = 3
seed = 1
a = SlidingTilePuzzle(size, seed)
b = SlidingTilePuzzle(size, seed)
self.assertTrue(a.equals(b))
def test_equals_false(self):
size = 3
seed1 = 1
seed2 = 2
a = SlidingTilePuzzle(size, seed1)
b = SlidingTilePuzzle(size, seed2)
self.assertFalse(a.equals(b))
def test_misplaced_tiles_some(self):
size = 3
seed = 4
puzzle = SlidingTilePuzzle(size, seed)
num_tiles = puzzle.misplaced_tiles()
expected_num = 8
self.assertEqual(num_tiles, expected_num)
def test_misplaced_tiles_none(self):
size = 2
seed = 10
puzzle = SlidingTilePuzzle(size, seed)
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["down"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["right"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["up"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["left"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["down"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["right"])
num_tiles = puzzle.misplaced_tiles()
expected_num = 0
self.assertEqual(num_tiles, expected_num)
def test_misplaced_tiles_rectangle(self):
size1 = 2
size2 = 3
seed = 1
puzzle = SlidingTilePuzzle(size1, seed, size2)
num_tiles = puzzle.misplaced_tiles()
expected_num = 5
self.assertEqual(num_tiles, expected_num)
def test_manhattan_distance_incomplete(self):
size = 3
seed = 1
puzzle = SlidingTilePuzzle(size, seed)
distance = puzzle.manhattan_distance()
expected_distance = 3 + 2 + 2 + 1 + 1 + 3 + 3 + 3 + 2
self.assertEqual(distance, expected_distance)
def test_manhattan_distance_complete(self):
size = 2
seed = 10
puzzle = SlidingTilePuzzle(size, seed)
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["down"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["right"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["up"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["left"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["down"])
puzzle.apply_move(SlidingTilePuzzle.DIRECTIONS["right"])
num_tiles = puzzle.manhattan_distance()
expected_num = 0
self.assertEqual(num_tiles, expected_num)
def test_manhattan_distance_rectangle(self):
size1 = 2
size2 = 3
seed = 1
puzzle = SlidingTilePuzzle(size1, seed, size2)
num_tiles = puzzle.manhattan_distance()
expected_num = 1 + 2 + 2 + 2 + 0 + 1
self.assertEqual(num_tiles, expected_num)
def test_heuristic_misplaced(self):
size = 3
seed = 4
puzzle = SlidingTilePuzzle(size, seed)
num_tiles = puzzle.heuristic('misplaced tiles')
expected_num = 8
self.assertEqual(num_tiles, expected_num)
def test_heuristic_manhattan(self):
size = 3
seed = 1
puzzle = SlidingTilePuzzle(size, seed)
distance = puzzle.heuristic('manhattan distance')
expected_distance = 3 + 2 + 2 + 1 + 1 + 3 + 3 + 3 + 2
self.assertEqual(distance, expected_distance)
def test_str_single_digits(self):
size = 2
seed = 1
puzzle = SlidingTilePuzzle(size, seed)
expected_str = '\n 3 _ \n 2 1 \n'
self.assertEqual(str(puzzle), expected_str)
def test_str_double_digits(self):
size = 4
seed = 1
puzzle = SlidingTilePuzzle(size, seed)
expected_str = ('\n 2 10 _ 14 '
'\n 6 5 3 8 '
'\n 7 11 15 1 '
'\n 12 13 9 4 \n')
self.assertEqual(str(puzzle), expected_str)
def test_str_rectangle(self):
size1 = 2
size2 = 3
seed = 1
puzzle = SlidingTilePuzzle(size1, seed, size2)
expected_str = '\n 2 3 5 \n _ 4 1 \n'
self.assertEqual(str(puzzle), expected_str)
def reverse_move(move):
if move == SlidingTilePuzzle.DIRECTIONS['up']:
return SlidingTilePuzzle.DIRECTIONS['down']
elif move == SlidingTilePuzzle.DIRECTIONS['down']:
return SlidingTilePuzzle.DIRECTIONS['up']
elif move == SlidingTilePuzzle.DIRECTIONS['left']:
return SlidingTilePuzzle.DIRECTIONS['right']
else:
return SlidingTilePuzzle.DIRECTIONS['left']
class TestSolvableSlidingTilePuzzle(unittest.TestCase):
def test_init(self):
size = 3
puzzle = SolvableSlidingTilePuzzle(size)
self.assertEqual(puzzle.size1, size)
self.assertEqual(puzzle.size2, size)
self.assertEqual(puzzle.puzzle.size(), (size, size))
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
self.assertLessEqual(puzzle.num_correct_tiles, size * size)
self.assertGreaterEqual(puzzle.num_correct_tiles, 0)
def test_seed_init(self):
size = 3
seed = 1
a = SolvableSlidingTilePuzzle(size, seed)
b = SolvableSlidingTilePuzzle(size, seed)
self.assertEqual(a.size1, b.size1)
self.assertEqual(a.size2, b.size2)
for i in range(size):
for j in range(size):
self.assertEqual(a.puzzle[(i, j)], b.puzzle[(i, j)])
self.assertEqual(a.blank_index, b.blank_index)
self.assertEqual(a.num_correct_tiles, b.num_correct_tiles)
random.seed(seed)
moves = []
number = 0
for i in range(size):
for j in range(size):
b.puzzle[(i, j)] = number
number += 1
b.blank_index = (0, 0)
for i in range(size * size * 10):
move_choice = b.valid_moves()
move = random.choice(move_choice)
b.apply_move(move)
moves.append(reverse_move(move))
moves.reverse()
for move in moves:
a.apply_move(move)
self.assertTrue(a.is_solved())
def test_init_rectangle(self):
size1 = 2
size2 = 3
puzzle = SolvableSlidingTilePuzzle(size1=size1, size2=size2)
self.assertEqual(puzzle.size1, size1)
self.assertEqual(puzzle.size2, size2)
self.assertEqual(puzzle.puzzle.size(), (size1, size2))
self.assertEqual(puzzle.puzzle[puzzle.blank_index], 0)
self.assertLessEqual(puzzle.num_correct_tiles, size1 * size2)
self.assertGreaterEqual(puzzle.num_correct_tiles, 0)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
| |
from cattle import ApiError
from common_fixtures import * # NOQA
def test_agent_unique(super_client):
agents = super_client.list_agent(uri='sim://unique')
if len(agents) == 0:
agent = super_client.create_agent(uri='sim://unique')
agent = super_client.wait_success(agent)
assert agent.state == 'active'
agent.deactivate()
try:
super_client.create_agent(uri='sim://unique')
assert False
except ApiError, e:
assert e.error.code == 'NotUnique'
pass
def test_list_sort(super_client, context):
name = random_str()
containers = []
for i in range(2):
c = context.create_container_no_success(name=name, startOnCreate=False)
containers.append(c)
r = super_client.list_container(name=name)
for i in range(len(r)):
assert containers[i].id == r[i].id
r = super_client.list_container(name=name, sort='created', order='desc')
containers.reverse()
for i in range(len(r)):
assert containers[i].id == r[i].id
def test_pagination(context):
client = context.client
name = random_str()
containers = []
for i in range(4):
c = client.create_container(imageUuid=context.image_uuid, name=name)
containers.append(c)
for c in containers:
client.wait_success(c)
r = client.list_container(name=name)
assert len(r) == 4
try:
assert r.pagination.next is None
except AttributeError:
pass
collected = {}
r = client.list_container(name=name, limit=2)
assert len(r) == 2
assert r.pagination.next is not None
for i in r:
collected[i.id] = True
r = r.next()
assert len(r) == 2
try:
assert r.pagination.next is None
except AttributeError:
pass
for i in r:
collected[i.id] = True
assert len(collected) == 4
def test_pagination_include(super_client, client, context):
host = context.host
name = random_str()
container_ids = []
containers = []
for i in range(5):
c = client.create_container(imageUuid=context.image_uuid,
name=name,
requestedHostId=host.id)
c = super_client.reload(c)
containers.append(c)
container_ids.append(c.id)
for c in containers:
client.wait_success(c)
assert len(containers[0].instanceHostMaps()) == 1
assert host.id == containers[0].instanceHostMaps()[0].host().id
r = super_client.list_container(name=name)
assert len(r) == 5
for c in r:
assert len(c.instanceHostMaps()) == 1
assert c.instanceHostMaps()[0].hostId == host.id
collected = {}
r = super_client.list_container(name=name, include='instanceHostMaps',
limit=2)
assert len(r) == 2
for c in r:
collected[c.id] = True
assert len(c.instanceHostMaps) == 1
assert c.instanceHostMaps[0].hostId == host.id
r = r.next()
assert len(r) == 2
for c in r:
collected[c.id] = True
assert len(c.instanceHostMaps) == 1
assert c.instanceHostMaps[0].hostId == host.id
r = r.next()
assert len(r) == 1
for c in r:
collected[c.id] = True
assert len(c.instanceHostMaps) == 1
assert c.instanceHostMaps[0].hostId == host.id
assert not r.pagination.partial
maps = []
for id in container_ids:
maps.extend(super_client.list_instanceHostMap(hostId=host.id,
instanceId=id))
assert len(maps) == 5
maps_from_include = []
r = super_client.list_host(include='instanceHostMaps', limit=2)
while True:
for h in r:
if h.id == host.id:
assert len(h.instanceHostMaps) <= 2
for m in h.instanceHostMaps:
if m.instanceId in container_ids:
maps_from_include.append(m)
try:
r = r.next()
except AttributeError:
break
assert len(maps) == len(maps_from_include)
def test_include_left_join(super_client, context):
container = context.create_container_no_success(startOnCreate=False)
container = context.wait_for_state(container, 'stopped')
c = super_client.by_id('container', container.id,
include='instanceHostMaps')
assert container.id == c.id
def test_include_left_join_sort(super_client, context):
client = context.client
name = random_str()
containers = []
for i in range(2):
c = client.create_container(imageUuid=context.image_uuid, name=name)
containers.append(c)
for c in containers:
client.wait_success(c)
r = super_client.list_container(name=name, include='instanceHostMaps',
sort='created', order='asc')
for i in range(len(r)):
assert containers[i].id == r[i].id
r = super_client.list_container(name=name, include='instanceHostMaps',
sort='created', order='desc')
containers.reverse()
for i in range(len(r)):
assert containers[i].id == r[i].id
def test_include(super_client, context):
container = context.create_container(name='include_test')
container = super_client.reload(container)
for link_name in ['instanceHostMaps', 'instancehostmaps']:
found = False
for c in super_client.list_container(name_like='include_test%'):
if c.id == container.id:
found = True
assert len(c.instanceHostMaps()) == 1
assert callable(c.instanceHostMaps)
assert found
found = False
for c in super_client.list_container(include=link_name,
name_like='include_test%'):
if c.id == container.id:
found = True
assert len(c.instanceHostMaps) == 1
assert found
c = super_client.by_id('container', container.id)
assert callable(c.instanceHostMaps)
c = super_client.by_id('container', container.id, include=link_name)
assert len(c.instanceHostMaps) == 1
def test_limit(super_client):
result = super_client.list_container()
assert result.pagination.limit == 100
result = super_client.list_container(limit=105)
assert result.pagination.limit == 105
result = super_client.list_container(limit=10005)
assert result.pagination.limit == 10000
def test_schema_boolean_default(client):
con_schema = client.schema.types['container']
assert isinstance(con_schema.resourceFields.startOnCreate.default, bool)
def test_schema_self_link(client):
con_schema = client.schema.types['container']
assert con_schema.links.self is not None
assert con_schema.links.self.startswith("http")
def test_child_map_include(super_client, context):
container = context.create_container()
cs = super_client.list_container(uuid=container.uuid, include='hosts')
assert cs[0].hosts[0].uuid is not None
assert len(cs[0].hosts) == 1
hs = super_client.list_host(uuid=cs[0].hosts[0].uuid,
include='instances')
found = False
for i in hs[0].instances:
if i.uuid == cs[0].uuid:
found = True
assert found
def test_child_map(super_client, context):
container = context.create_container()
hosts = super_client.reload(container).hosts()
assert len(hosts) == 1
assert hosts[0].type == 'host'
def test_fields_on_include(super_client, context):
c = context.create_container()
host = super_client.by_id_host(context.host.id,
include='instances')
assert host is not None
found = False
for instance in host.instances:
if instance.id == c.id:
assert instance.transitioning == 'no'
assert 'stop' in instance
assert callable(instance.stop)
assert len(instance.links) > 1
found = True
break
assert found
def test_state_enum(super_client):
container_schema = super_client.schema.types['container']
states = set([
'creating',
'migrating',
'purged',
'purging',
'removed',
'removing',
'requested',
'restarting',
'restoring',
'running',
'starting',
'stopped',
'stopping',
'updating-running',
'updating-stopped',
'error',
'erroring'
])
assert container_schema.resourceFields['state'].type == 'enum'
assert states == set(container_schema.resourceFields['state'].options)
def test_actions_based_on_state(context):
c = context.create_container()
assert set(c.actions.keys()) == set(['migrate', 'restart', 'stop',
'update', 'execute', 'logs',
'proxy'])
def test_include_user_not_auth_map(client):
client.list_host(include='instances')
def test_map_user_not_auth_map(context):
c = context.create_container()
assert len(c.hosts()) == 1
def test_role_option(admin_user_client, client, random_str, context):
c = admin_user_client.create_api_key(name=random_str,
accountId=context.account.id)
c = admin_user_client.wait_success(c)
assert c.state == 'active'
creds = admin_user_client.list_credential(name=random_str)
assert len(creds) == 1
creds = admin_user_client.list_credential(name=random_str,
_role='user')
assert len(creds) == 0
creds = client.list_credential(name=random_str, _role='superadmin')
assert len(creds) == 0
schemas = [x for x in admin_user_client.list_schema(_role='project')
if x.id == 'externalHandler']
assert len(schemas) == 0
def test_query_length(admin_user_client):
big = 'a' * 8192
admin_user_client.list_account(name=big)
bigger = 'a' * (16384 - 512)
admin_user_client.list_account(name=bigger)
| |
#!/usr/bin/env python
#
# Pucktada Treeratpituk (https://pucktada.github.io/)
# License: MIT
# 2017-05-01
#
# A recurrent neural network model (LSTM) for thai word segmentation
import logging
import re
import numpy as np
import tensorflow as tf
import tensorflow.contrib.layers as layers
import tensorflow.contrib.rnn as rnn
#from . import char_dictionary
def load_settings(sess):
model_settings = dict()
model_vars = dict()
graph = tf.get_default_graph()
#for v in sess.graph.get_operations():
# print('P:', v.name)
configs = ['cell_sizes', 'look_ahead', 'num_layers', 'input_classes', 'label_classes', 'learning_rate', 'l2_regularization', 'cell_type'] #, 'direction']
for c in configs:
name = 'prefix/%s:0' % c
model_settings[c] = sess.run(graph.get_tensor_by_name(name))
model_vars['inputs'] = graph.get_tensor_by_name('prefix/placeholder/inputs:0')
model_vars['fw_state'] = graph.get_tensor_by_name('prefix/placeholder/fw_state:0')
model_vars['seq_lengths'] = graph.get_tensor_by_name('prefix/placeholder/seq_lengths:0')
model_vars['keep_prob'] = graph.get_tensor_by_name('prefix/placeholder/keep_prob:0')
model_vars['probs'] = graph.get_tensor_by_name('prefix/probs:0')
return model_settings, model_vars
def load_graph(model_file):
""" loading necessary configuration of the network from the meta file &
the checkpoint file together with variables that are needed for the inferences
"""
# We load the protobuf file from the disk and parse it to retrieve the
# unserialized graph_def
with tf.gfile.GFile(model_file, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# Then, we can use again a convenient built-in function to import a graph_def into the
# current default Graph
with tf.Graph().as_default() as graph:
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name='prefix',
op_dict=None,
producer_op_list=None)
return graph
def load_model2(sess, meta_file, checkpoint_file):
""" loading necessary configuration of the network from the meta file &
the checkpoint file together with variables that are needed for the inferences
"""
saver = tf.train.import_meta_graph(meta_file, clear_devices=True)
saver.restore(sess, checkpoint_file)
configs = tf.get_collection('configs')
pvars = tf.get_collection('placeholders')
model_settings = dict()
for c in configs:
name = c.name.split(':')[0]
model_settings[name] = sess.run(c)
model_vars = dict()
for p in pvars:
scope, name, _ = re.split('[:/]', p.name)
model_vars[name] = p
model_vars['probs'] = tf.get_collection('probs')[0]
return model_settings, model_vars
class CkModel:
""" cutkum model: LSTM recurrent neural network model """
def __init__(self, model_settings):
logging.info('...init WordSegmentor')
self.num_layers = model_settings["num_layers"]
self.cell_sizes = model_settings["cell_sizes"] # list of cell_size, same length as num_layers
self.total_cells = sum(self.cell_sizes)
self.cell_start = [sum(self.cell_sizes [:i]) for i in range(self.num_layers)]
# keep number of look_ahead (not used in the training, but so that people know how to use the model)
self.look_ahead = model_settings['look_ahead']
#self.num_unroll = model_settings["num_unroll"]
self.input_classes = model_settings['input_classes']
self.label_classes = model_settings['label_classes']
self.learning_rate = model_settings['learning_rate']
self.l2_regularization = model_settings['l2_regularization'] # 0.1
self.cell_type = model_settings['cell_type']
#self.direction = model_settings['direction']
#self.states = None
tf.add_to_collection('configs', tf.constant(self.cell_sizes, name="cell_sizes"))
tf.add_to_collection('configs', tf.constant(self.look_ahead, name="look_ahead"))
#tf.add_to_collection('configs', tf.constant(self.num_unroll, name="num_unroll"))
tf.add_to_collection('configs', tf.constant(self.num_layers, name="num_layers"))
tf.add_to_collection('configs', tf.constant(self.input_classes, name="input_classes"))
tf.add_to_collection('configs', tf.constant(self.label_classes, name="label_classes"))
tf.add_to_collection('configs', tf.constant(self.learning_rate, name="learning_rate"))
tf.add_to_collection('configs', tf.constant(self.l2_regularization, name="l2_regularization"))
tf.add_to_collection('configs', tf.constant(self.cell_type, name="cell_type"))
#tf.add_to_collection('configs', tf.constant(self.direction, name="direction"))
self.global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
self.increment_global_step_op = tf.assign(self.global_step, self.global_step+1)
def _create_placeholders(self):
logging.info('...create placeholder')
with tf.name_scope("placeholder"):
# (time, batch, in)
self.inputs = tf.placeholder(tf.float32, (None, None, self.input_classes), name="inputs")
# (time, batch, out)
self.outputs = tf.placeholder(tf.float32, (None, None, self.label_classes), name="outputs")
# [batch]
self.seq_lengths = tf.placeholder(tf.int32, [None], name="seq_lengths")
# LSTM - [2, None, sum(cell_sizes)]
# GRU, RNN - [1, None, sum(cell_sizes)]
if (self.cell_type == 'lstm'):
self.fw_state = tf.placeholder(tf.float32, [2, None, self.total_cells], name="fw_state")
else: # gru, rnn
self.fw_state = tf.placeholder(tf.float32, [1, None, self.total_cells], name="fw_state")
self.keep_prob = tf.placeholder(tf.float32, name="keep_prob")
tf.add_to_collection('placeholders', self.inputs)
tf.add_to_collection('placeholders', self.outputs)
tf.add_to_collection('placeholders', self.seq_lengths)
tf.add_to_collection('placeholders', self.keep_prob)
#
def init_fw_states(self, batch_size):
if (self.cell_type == 'lstm'):
return np.zeros(shape=[2, batch_size, self.total_cells])
else: # GRU, RNN
return np.zeros(shape=[1, batch_size, self.total_cells])
# state tuple to tensor
def flatten_fw_states(self, fw_state_tuple):
if (self.cell_type == 'lstm'):
# fw_state_tuple is tuple of LSTMStateTuple of lenghts 'num_layers'
# states = [2, batch_size, self.total_cells]
c_tensor = np.concatenate([fw_state_tuple[i].c for i in range(self.num_layers)], axis=1)
h_tensor = np.concatenate([fw_state_tuple[i].h for i in range(self.num_layers)], axis=1)
state = np.stack([c_tensor, h_tensor])
else: # GRU, RNN
# fw_state_tuple is tuple of ndarray of lenghts 'num_layers'
c_tensor = np.concatenate([fw_state_tuple[i] for i in range(self.num_layers)], axis=1)
state = np.expand_dims(c_tensor, axis=0)
return state #.eval()
# state tensor to tuple
def unstack_fw_states(self, fw_state):
if (self.cell_type == 'lstm'):
# states = [2, batch_size, self.total_cells]
fw_state_tuple = tuple(
[tf.contrib.rnn.LSTMStateTuple(
fw_state[0, :, self.cell_start[i]:self.cell_start[i]+self.cell_sizes[i]],
fw_state[1, :, self.cell_start[i]:self.cell_start[i]+self.cell_sizes[i]])
for i in range(self.num_layers)])
else: # GRU, RNN
# states = [1, batch_size, self.total_cells]
fw_state_tuple = tuple(
[fw_state[0, :, self.cell_start[i]:self.cell_start[i]+self.cell_sizes[i]]
for i in range(self.num_layers)])
return fw_state_tuple
def _inference(self):
logging.info('...create inference')
fw_state_tuple = self.unstack_fw_states(self.fw_state)
fw_cells = list()
for i in range(0, self.num_layers):
if (self.cell_type == 'lstm'):
cell = rnn.LSTMCell(num_units=self.cell_sizes[i], state_is_tuple=True)
elif (self.cell_type == 'gru'):
# change to GRU
cell = rnn.GRUCell(num_units=self.cell_sizes[i])
else:
cell = rnn.BasicRNNCell(num_units=self.cell_sizes[i])
cell = rnn.DropoutWrapper(cell, output_keep_prob=self.keep_prob)
fw_cells.append(cell)
self.fw_cells = rnn.MultiRNNCell(fw_cells, state_is_tuple=True)
rnn_outputs, states = tf.nn.dynamic_rnn(
self.fw_cells,
self.inputs,
initial_state=fw_state_tuple,
sequence_length=self.seq_lengths,
dtype=tf.float32, time_major=True)
# project output from rnn output size to OUTPUT_SIZE. Sometimes it is worth adding
# an extra layer here.
self.projection = lambda x: layers.linear(x,
num_outputs=self.label_classes, activation_fn=tf.nn.sigmoid)
self.logits = tf.map_fn(self.projection, rnn_outputs, name="logits")
self.probs = tf.nn.softmax(self.logits, name="probs")
self.states = states
tf.add_to_collection('probs', self.probs)
def _create_loss(self):
logging.info('...create loss')
with tf.name_scope("loss"):
# shape=[Time * Batch, label_classes]
outputs_flat = tf.reshape(self.outputs, [-1, self.label_classes])
logits_flat = tf.reshape(self.logits, [-1, self.label_classes])
# calculate the losses shape=[Time * Batch]
# pre-tensorflow 1.5
#losses = tf.nn.softmax_cross_entropy_with_logits_v2(labels=outputs_flat, logits=logits_flat)
losses = tf.nn.softmax_cross_entropy_with_logits(labels=outputs_flat, logits=logits_flat)
# create mask [Time * Batch] where 0: padded, 1: not-padded
mask = outputs_flat[:,0]
mask = tf.abs(tf.subtract(mask, tf.ones_like(mask)))
# mask the losses
masked_losses = mask * losses
l2_reg = self.l2_regularization
l2 = l2_reg * sum(tf.nn.l2_loss(tf_var) for tf_var in tf.trainable_variables()
if not ("noreg" in tf_var.name or "Bias" in tf_var.name))
self.losses = masked_losses + l2
self.num_entries = tf.reduce_sum(mask)
self.mean_loss = tf.reduce_sum(masked_losses) / self.num_entries
# accuracy
correct_pred = tf.cast(tf.equal(tf.argmax(outputs_flat, 1), tf.argmax(logits_flat, 1)), tf.float32)
mask_correct_pred = mask * correct_pred
self.accuracy = tf.reduce_sum(mask_correct_pred) / self.num_entries
def _create_optimizer(self):
logging.info('...create optimizer')
with tf.name_scope("train"):
#self.optimizer = tf.train.AdamOptimizer(self.learning_rate).minimize(self.mean_loss, global_step=self.global_step)
max_gradient_norm = 1.0
params = tf.trainable_variables()
gradients = tf.gradients(self.mean_loss, params)
clipped_gradients, norm = tf.clip_by_global_norm(gradients, max_gradient_norm)
#self.train_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate)\
# .apply_gradients(zip(clipped_gradients, params), global_step=self.global_step)
self.train_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate)\
.apply_gradients(zip(clipped_gradients, params))
def _create_summary(self):
logging.info('...create summary')
tf.summary.scalar("mean_loss", self.mean_loss)
tf.summary.scalar("accuracy", self.accuracy)
self.summary_op = tf.summary.merge_all()
def build_graph(self):
self._create_placeholders()
self._inference()
self._create_loss()
self._create_optimizer()
self._create_summary()
if __name__ == '__main__':
print('create word segmentor model')
char_dict = CharDictionary()
# MODEL
model_settings = dict()
#model_settings["l2_regularisation"] = 0.0 # not usring right now
model_settings['num_unroll'] = 12
model_settings['num_layers'] = 3
model_settings['cell_size'] = 64
model_settings['input_classes'] = char_dict.num_char_classes() + 1
model_settings['label_classes'] = char_dict.num_label_classes() + 1
model_settings['learning_rate'] = 0.001 # Initial learning rate
model = CkModel(model_settings)
model.build_graph()
| |
# -*- coding: utf-8 -*-
from lar import *
from scipy import *
import json
import scipy
import numpy as np
import time as tm
import gc
from pngstack2array3d import *
import struct
import getopt, sys
import traceback
#
import matplotlib.pyplot as plt
# threading
import multiprocessing
from multiprocessing import Process, Value, Lock
from Queue import Queue
# cython stuf. not used now
import larVolumeToObjximport; pyximport.install()
import calc_chains_helper as cch
# ------------------------------------------------------------
# Logging & Timer
# ------------------------------------------------------------
logging_level = 2;
# 0 = no_logging
# 1 = few details
# 2 = many details
# 3 = many many details
def log(n, l):
if __name__=="__main__" and n <= logging_level:
for s in l:
print "Log:", s;
# ------------------------------------------------------------
# Configuration parameters
# ------------------------------------------------------------
PNG_EXTENSION = ".png"
BIN_EXTENSION = ".bin"
# ------------------------------------------------------------
# Utility toolbox
# ------------------------------------------------------------
def invertIndex(nx,ny,nz):
nx,ny,nz = nx+1,ny+1,nz+1
def invertIndex0(offset):
a0, b0 = offset / nx, offset % nx
a1, b1 = a0 / ny, a0 % ny
a2, b2 = a1 / nz, a1 % nz
return b0,b1,b2
return invertIndex0
def countFilesInADir(directory):
return len(os.walk(directory).next()[2])
def isArrayEmpty(arr):
return all(e == 0 for e in arr)
# ------------------------------------------------------------
def writeOffsetToFile(file, offsetCurr):
file.write( struct.pack('>I', offsetCurr[0]) )
file.write( struct.pack('>I', offsetCurr[1]) )
file.write( struct.pack('>I', offsetCurr[2]) )
# ------------------------------------------------------------
def computeChainsThread(startImage,endImage,imageHeight,imageWidth, imageDx,imageDy,imageDz, Nx,Ny,Nz, calculateout,BORDER_FILE, colors,pixelCalc,centroidsCalc, colorIdx, imageDir, DIR_O):
log(2, [ "Working task: " +str(startImage) + "-" + str(endImage) + " [" + str( imageHeight) + "-" + str( imageWidth ) + "-" + str(imageDx) + "-" + str( imageDy) + "-" + str (imageDz) + "]" ])
bordo3 = None
if (calculateout == True):
with open(BORDER_FILE, "r") as file:
bordo3_json = json.load(file)
ROWCOUNT = bordo3_json['ROWCOUNT']
COLCOUNT = bordo3_json['COLCOUNT']
ROW = np.asarray(bordo3_json['ROW'], dtype=np.int32)
COL = np.asarray(bordo3_json['COL'], dtype=np.int32)
DATA = np.asarray(bordo3_json['DATA'], dtype=np.int8)
bordo3 = csr_matrix((DATA,COL,ROW),shape=(ROWCOUNT,COLCOUNT));
xEnd, yEnd = 0,0
beginImageStack = 0
saveTheColors = centroidsCalc
saveTheColors = np.array( sorted(saveTheColors.reshape(1,colors)[0]), dtype=np.int )
fileName = "pselettori-"
if (calculateout == True):
fileName = "poutput-"
fileName = fileName + str(startImage) + "_" + str(endImage) + "-"
returnProcess = 0
try:
fileToWrite = open(DIR_O+'/'+fileName+str(saveTheColors[colorIdx])+BIN_EXTENSION, "wb")
try:
log(2, [ "Working task: " +str(startImage) + "-" + str(endImage) + " [loading colors]" ])
theImage,colors,theColors = pngstack2array3d(imageDir, startImage, endImage, colors, pixelCalc, centroidsCalc)
# theColors = theColors.reshape(1,colors)
# if (sorted(theColors[0]) != saveTheColors):
# log(1, [ "Error: colors have changed"] )
# sys.exit(2)
log(2, [ "Working task: " +str(startImage) + "-" + str(endImage) + " [comp loop]" ])
for xBlock in xrange(imageHeight/imageDx):
# print "Working task: " +str(startImage) + "-" + str(endImage) + " [Xblock]"
for yBlock in xrange(imageWidth/imageDy):
# print "Working task: " +str(startImage) + "-" + str(endImage) + " [Yblock]"
xStart, yStart = xBlock * imageDx, yBlock * imageDy
xEnd, yEnd = xStart+imageDx, yStart+imageDy
image = theImage[:, xStart:xEnd, yStart:yEnd]
nz,nx,ny = image.shape
# Compute a quotient complex of chains with constant field
# ------------------------------------------------------------
chains3D_old = [];
chains3D = None
hasSomeOne = False
if (calculateout != True):
chains3D = np.zeros(nx*ny*nz, dtype=np.int32)
zStart = startImage - beginImageStack;
if (calculateout == True):
chains3D_old = cch.setList(nx,ny,nz, colorIdx, image,saveTheColors)
else:
hasSomeOne,chains3D = cch.setListNP(nx,ny,nz, colorIdx, image,saveTheColors)
# print "Working task: " +str(startImage) + "-" + str(endImage) + " [hasSomeOne: " + str(hasSomeOne) +"]"
# Compute the boundary complex of the quotient cell
# ------------------------------------------------------------
objectBoundaryChain = None
if (calculateout == True) and (len(chains3D_old) > 0):
objectBoundaryChain = larBoundaryChain(bordo3,chains3D_old)
# Save
if (calculateout == True):
if (objectBoundaryChain != None):
writeOffsetToFile( fileToWrite, np.array([zStart,xStart,yStart], dtype=int32) )
fileToWrite.write( bytearray( np.array(objectBoundaryChain.toarray().astype('b').flatten()) ) )
else:
if (hasSomeOne != False):
writeOffsetToFile( fileToWrite, np.array([zStart,xStart,yStart], dtype=int32) )
fileToWrite.write( bytearray( np.array(chains3D, dtype=np.dtype('b')) ) )
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
log(1, [ "Error: " + ''.join('!! ' + line for line in lines) ]) # Log it or whatever here
returnProcess = 2
finally:
fileToWrite.close()
# -------------------------------------------------------------------------
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
log(1, [ "Error: " + ''.join('!! ' + line for line in lines) ]) # Log it or whatever here
returnProcess = 2
return returnProcess
processRes = []
def collectResult(resData):
processRes.append(resData)
def startComputeChains(imageHeight,imageWidth,imageDepth, imageDx,imageDy,imageDz, Nx,Ny,Nz, calculateout,BORDER_FILE, colors,pixelCalc,centroidsCalc, colorIdx,INPUT_DIR,DIR_O):
beginImageStack = 0
endImage = beginImageStack
saveTheColors = centroidsCalc
log(2, [ centroidsCalc ])
saveTheColors = np.array( sorted(saveTheColors.reshape(1,colors)[0]), dtype=np.int )
log(2, [ saveTheColors ])
# print str(imageHeight) + '-' + str(imageWidth) + '-' + str(imageDepth)
# print str(imageDx) + '-' + str(imageDy) + '-' + str(imageDz)
# print str(Nx) + '-' + str(Ny) + '-' + str(Nz)
returnValue = 2
processPool = max(1, multiprocessing.cpu_count()/2)
log(2, [ "Starting pool with: " + str(processPool) ])
try:
pool = multiprocessing.Pool(processPool)
log(2, [ 'Start pool' ])
for j in xrange(imageDepth/imageDz):
startImage = endImage
endImage = startImage + imageDz
log(2, [ "Added task: " + str(j) + " -- (" + str(startImage) + "," + str(endImage) + ")" ])
pool.apply_async(computeChainsThread, args = (startImage,endImage,imageHeight,imageWidth, imageDx,imageDy,imageDz, Nx,Ny,Nz, calculateout,BORDER_FILE, colors,pixelCalc,centroidsCalc, colorIdx,INPUT_DIR,DIR_O, ), callback = collectResult)
log(2, [ "Waiting for completion..." ])
pool.close()
pool.join()
log(1, [ "Completed: " + str(processRes) ])
if (sum(processRes) == 0):
returnValue = 0
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
log(1, [ "Error: " + ''.join('!! ' + line for line in lines) ]) # Log it or whatever here
return returnValue
def runComputation(imageDx,imageDy,imageDz, colors,coloridx,calculateout, V,FV, INPUT_DIR,BEST_IMAGE,BORDER_FILE,DIR_O):
imageHeight,imageWidth = getImageData(INPUT_DIR+str(BEST_IMAGE)+PNG_EXTENSION)
imageDepth = countFilesInADir(INPUT_DIR)
Nx,Ny,Nz = imageHeight/imageDx, imageWidth/imageDx, imageDepth/imageDz
returnValue = 2
try:
pixelCalc, centroidsCalc = centroidcalc(INPUT_DIR, BEST_IMAGE, colors)
returnValue = startComputeChains(imageHeight,imageWidth,imageDepth, imageDx,imageDy,imageDz, Nx,Ny,Nz, calculateout,BORDER_FILE, colors,pixelCalc,centroidsCalc, coloridx,INPUT_DIR,DIR_O)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
log(1, [ "Error: " + ''.join('!! ' + line for line in lines) ]) # Log it or whatever here
returnValue = 2
sys.exit(returnValue)
def main(argv):
ARGS_STRING = 'Args: -r -b <borderfile> -x <borderX> -y <borderY> -z <borderZ> -i <inputdirectory> -c <colors> -d <coloridx> -o <outputdir> -q <bestimage>'
try:
opts, args = getopt.getopt(argv,"rb:x:y:z:i:c:d:o:q:")
except getopt.GetoptError:
print ARGS_STRING
sys.exit(2)
nx = ny = nz = imageDx = imageDy = imageDz = 64
colors = 2
coloridx = 0
mandatory = 6
calculateout = False
#Files
BORDER_FILE = 'bordo3.json'
BEST_IMAGE = ''
DIR_IN = ''
DIR_O = ''
for opt, arg in opts:
if opt == '-x':
nx = ny = nz = imageDx = imageDy = imageDz = int(arg)
mandatory = mandatory - 1
elif opt == '-y':
ny = nz = imageDy = imageDz = int(arg)
elif opt == '-z':
nz = imageDz = int(arg)
elif opt == '-r':
calculateout = True
elif opt == '-i':
DIR_IN = arg + '/'
mandatory = mandatory - 1
elif opt == '-b':
BORDER_FILE = arg
mandatory = mandatory - 1
elif opt == '-o':
mandatory = mandatory - 1
DIR_O = arg
elif opt == '-c':
mandatory = mandatory - 1
colors = int(arg)
elif opt == '-d':
mandatory = mandatory - 1
coloridx = int(arg)
elif opt == '-q':
BEST_IMAGE = int(arg)
if mandatory != 0:
print 'Not all arguments where given'
print ARGS_STRING
sys.exit(2)
if (coloridx >= colors):
print 'Not all arguments where given (coloridx >= colors)'
print ARGS_STRING
sys.exit(2)
def ind(x,y,z): return x + (nx+1) * (y + (ny+1) * (z))
chunksize = nx * ny + nx * nz + ny * nz + 3 * nx * ny * nz
V = [[x,y,z] for z in xrange(nz+1) for y in xrange(ny+1) for x in xrange(nx+1) ]
v2coords = invertIndex(nx,ny,nz)
FV = []
for h in xrange(len(V)):
x,y,z = v2coords(h)
if (x < nx) and (y < ny): FV.append([h,ind(x+1,y,z),ind(x,y+1,z),ind(x+1,y+1,z)])
if (x < nx) and (z < nz): FV.append([h,ind(x+1,y,z),ind(x,y,z+1),ind(x+1,y,z+1)])
if (y < ny) and (z < nz): FV.append([h,ind(x,y+1,z),ind(x,y,z+1),ind(x,y+1,z+1)])
runComputation(imageDx, imageDy, imageDz, colors, coloridx, calculateout, V, FV, DIR_IN, BEST_IMAGE, BORDER_FILE, DIR_O)
if __name__ == "__main__":
main(sys.argv[1:])
| |
#######################################################
# Copyright (c) 2015, ArrayFire
# All rights reserved.
#
# This file is distributed under 3-clause BSD license.
# The complete license agreement can be obtained at:
# http://arrayfire.com/licenses/BSD-3-Clause
########################################################
"""
Functions to create and manipulate sparse matrices.
"""
from .library import *
from .array import *
import numbers
from .interop import to_array
__to_sparse_enum = [STORAGE.DENSE,
STORAGE.CSR,
STORAGE.CSC,
STORAGE.COO]
def create_sparse(values, row_idx, col_idx, nrows, ncols, storage = STORAGE.CSR):
"""
Create a sparse matrix from it's constituent parts.
Parameters
----------
values : af.Array.
- Contains the non zero elements of the sparse array.
row_idx : af.Array.
- Contains row indices of the sparse array.
col_idx : af.Array.
- Contains column indices of the sparse array.
nrows : int.
- specifies the number of rows in sparse matrix.
ncols : int.
- specifies the number of columns in sparse matrix.
storage : optional: arrayfire.STORAGE. default: arrayfire.STORAGE.CSR.
- Can be one of arrayfire.STORAGE.CSR, arrayfire.STORAGE.COO.
Returns
-------
A sparse matrix.
"""
assert(isinstance(values, Array))
assert(isinstance(row_idx, Array))
assert(isinstance(col_idx, Array))
out = Array()
safe_call(backend.get().af_create_sparse_array(c_pointer(out.arr), c_dim_t(nrows), c_dim_t(ncols),
values.arr, row_idx.arr, col_idx.arr, storage.value))
return out
def create_sparse_from_host(values, row_idx, col_idx, nrows, ncols, storage = STORAGE.CSR):
"""
Create a sparse matrix from it's constituent parts.
Parameters
----------
values : Any datatype that can be converted to array.
- Contains the non zero elements of the sparse array.
row_idx : Any datatype that can be converted to array.
- Contains row indices of the sparse array.
col_idx : Any datatype that can be converted to array.
- Contains column indices of the sparse array.
nrows : int.
- specifies the number of rows in sparse matrix.
ncols : int.
- specifies the number of columns in sparse matrix.
storage : optional: arrayfire.STORAGE. default: arrayfire.STORAGE.CSR.
- Can be one of arrayfire.STORAGE.CSR, arrayfire.STORAGE.COO.
Returns
-------
A sparse matrix.
"""
return create_sparse(to_array(values), to_array(row_idx), to_array(col_idx), nrows, ncols, storage)
def create_sparse_from_dense(dense, storage = STORAGE.CSR):
"""
Create a sparse matrix from a dense matrix.
Parameters
----------
dense : af.Array.
- A dense matrix.
storage : optional: arrayfire.STORAGE. default: arrayfire.STORAGE.CSR.
- Can be one of arrayfire.STORAGE.CSR, arrayfire.STORAGE.COO.
Returns
-------
A sparse matrix.
"""
assert(isinstance(dense, Array))
out = Array()
safe_call(backend.get().af_create_sparse_array_from_dense(c_pointer(out.arr), dense.arr, storage.value))
return out
def convert_sparse_to_dense(sparse):
"""
Create a dense matrix from a sparse matrix.
Parameters
----------
sparse : af.Array.
- A sparse matrix.
Returns
-------
A dense matrix.
"""
out = Array()
safe_call(backend.get().af_sparse_to_dense(c_pointer(out.arr), sparse.arr))
return out
def sparse_get_info(sparse):
"""
Get the constituent arrays and storage info from a sparse matrix.
Parameters
----------
sparse : af.Array.
- A sparse matrix.
Returns
--------
(values, row_idx, col_idx, storage) where
values : arrayfire.Array containing non zero elements from sparse matrix
row_idx : arrayfire.Array containing the row indices
col_idx : arrayfire.Array containing the column indices
storage : sparse storage
"""
values = Array()
row_idx = Array()
col_idx = Array()
stype = c_int_t(0)
safe_call(backend.get().af_sparse_get_info(c_pointer(values.arr), c_pointer(row_idx.arr),
c_pointer(col_idx.arr), c_pointer(stype),
sparse.arr))
return (values, row_idx, col_idx, __to_sparse_enum[stype.value])
def sparse_get_values(sparse):
"""
Get the non zero values from sparse matrix.
Parameters
----------
sparse : af.Array.
- A sparse matrix.
Returns
--------
arrayfire array containing the non zero elements.
"""
values = Array()
safe_call(backend.get().af_sparse_get_values(c_pointer(values.arr), sparse.arr))
return values
def sparse_get_row_idx(sparse):
"""
Get the row indices from sparse matrix.
Parameters
----------
sparse : af.Array.
- A sparse matrix.
Returns
--------
arrayfire array containing the non zero elements.
"""
row_idx = Array()
safe_call(backend.get().af_sparse_get_row_idx(c_pointer(row_idx.arr), sparse.arr))
return row_idx
def sparse_get_col_idx(sparse):
"""
Get the column indices from sparse matrix.
Parameters
----------
sparse : af.Array.
- A sparse matrix.
Returns
--------
arrayfire array containing the non zero elements.
"""
col_idx = Array()
safe_call(backend.get().af_sparse_get_col_idx(c_pointer(col_idx.arr), sparse.arr))
return col_idx
def sparse_get_nnz(sparse):
"""
Get the column indices from sparse matrix.
Parameters
----------
sparse : af.Array.
- A sparse matrix.
Returns
--------
Number of non zero elements in the sparse matrix.
"""
nnz = c_dim_t(0)
safe_call(backend.get().af_sparse_get_nnz(c_pointer(nnz), sparse.arr))
return nnz.value
def sparse_get_storage(sparse):
"""
Get the column indices from sparse matrix.
Parameters
----------
sparse : af.Array.
- A sparse matrix.
Returns
--------
Number of non zero elements in the sparse matrix.
"""
storage = c_int_t(0)
safe_call(backend.get().af_sparse_get_storage(c_pointer(storage), sparse.arr))
return __to_sparse_enum[storage.value]
def convert_sparse(sparse, storage):
"""
Convert sparse matrix from one format to another.
Parameters
----------
storage : arrayfire.STORAGE.
Returns
-------
Sparse matrix converted to the appropriate type.
"""
out = Array()
safe_call(backend.get().af_sparse_convert_to(c_pointer(out.arr), sparse.arr, storage.value))
return out
| |
# Test the Unicode versions of normal file functions
# open, os.open, os.stat. os.listdir, os.rename, os.remove, os.mkdir, os.chdir, os.rmdir
import sys, os, unittest
from unicodedata import normalize
from test import test_support
filenames = [
'1_abc',
u'2_ascii',
u'3_Gr\xfc\xdf-Gott',
u'4_\u0393\u03b5\u03b9\u03ac-\u03c3\u03b1\u03c2',
u'5_\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435',
u'6_\u306b\u307d\u3093',
u'7_\u05d4\u05e9\u05e7\u05e6\u05e5\u05e1',
u'8_\u66e8\u66e9\u66eb',
u'9_\u66e8\u05e9\u3093\u0434\u0393\xdf',
# Specific code points: fn, NFC(fn) and NFKC(fn) all differents
u'10_\u1fee\u1ffd',
]
# Mac OS X decomposes Unicode names, using Normal Form D.
# http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
# "However, most volume formats do not follow the exact specification for
# these normal forms. For example, HFS Plus uses a variant of Normal Form D
# in which U+2000 through U+2FFF, U+F900 through U+FAFF, and U+2F800 through
# U+2FAFF are not decomposed."
if sys.platform != 'darwin':
filenames.extend([
# Specific code points: NFC(fn), NFD(fn), NFKC(fn) and NFKD(fn) all differents
u'11_\u0385\u03d3\u03d4',
u'12_\u00a8\u0301\u03d2\u0301\u03d2\u0308', # == NFD(u'\u0385\u03d3\u03d4')
u'13_\u0020\u0308\u0301\u038e\u03ab', # == NFKC(u'\u0385\u03d3\u03d4')
u'14_\u1e9b\u1fc1\u1fcd\u1fce\u1fcf\u1fdd\u1fde\u1fdf\u1fed',
# Specific code points: fn, NFC(fn) and NFKC(fn) all differents
u'15_\u1fee\u1ffd\ufad1',
u'16_\u2000\u2000\u2000A',
u'17_\u2001\u2001\u2001A',
u'18_\u2003\u2003\u2003A', # == NFC(u'\u2001\u2001\u2001A')
u'19_\u0020\u0020\u0020A', # u'\u0020' == u' ' == NFKC(u'\u2000') ==
# NFKC(u'\u2001') == NFKC(u'\u2003')
])
# Is it Unicode-friendly?
if not os.path.supports_unicode_filenames:
fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
try:
for name in filenames:
name.encode(fsencoding)
except UnicodeEncodeError:
raise unittest.SkipTest("only NT+ and systems with "
"Unicode-friendly filesystem encoding")
# Destroy directory dirname and all files under it, to one level.
def deltree(dirname):
# Don't hide legitimate errors: if one of these suckers exists, it's
# an error if we can't remove it.
if os.path.exists(dirname):
# must pass unicode to os.listdir() so we get back unicode results.
for fname in os.listdir(unicode(dirname)):
os.unlink(os.path.join(dirname, fname))
os.rmdir(dirname)
class UnicodeFileTests(unittest.TestCase):
files = set(filenames)
normal_form = None
def setUp(self):
try:
os.mkdir(test_support.TESTFN)
except OSError:
pass
files = set()
for name in self.files:
name = os.path.join(test_support.TESTFN, self.norm(name))
with open(name, 'w') as f:
f.write((name+'\n').encode("utf-8"))
os.stat(name)
files.add(name)
self.files = files
def tearDown(self):
deltree(test_support.TESTFN)
def norm(self, s):
if self.normal_form and isinstance(s, unicode):
return normalize(self.normal_form, s)
return s
def _apply_failure(self, fn, filename, expected_exception,
check_fn_in_exception = True):
with self.assertRaises(expected_exception) as c:
fn(filename)
exc_filename = c.exception.filename
# the "filename" exception attribute may be encoded
if isinstance(exc_filename, str):
filename = filename.encode(sys.getfilesystemencoding())
if check_fn_in_exception:
self.assertEqual(exc_filename, filename, "Function '%s(%r) failed "
"with bad filename in the exception: %r" %
(fn.__name__, filename, exc_filename))
def test_failures(self):
# Pass non-existing Unicode filenames all over the place.
for name in self.files:
name = "not_" + name
self._apply_failure(open, name, IOError)
self._apply_failure(os.stat, name, OSError)
self._apply_failure(os.chdir, name, OSError)
self._apply_failure(os.rmdir, name, OSError)
self._apply_failure(os.remove, name, OSError)
# listdir may append a wildcard to the filename, so dont check
self._apply_failure(os.listdir, name, OSError, False)
def test_open(self):
for name in self.files:
f = open(name, 'w')
f.write((name+'\n').encode("utf-8"))
f.close()
os.stat(name)
# Skip the test on darwin, because darwin does normalize the filename to
# NFD (a variant of Unicode NFD form). Normalize the filename to NFC, NFKC,
# NFKD in Python is useless, because darwin will normalize it later and so
# open(), os.stat(), etc. don't raise any exception.
@unittest.skipIf(sys.platform == 'darwin', 'irrelevant test on Mac OS X')
def test_normalize(self):
files = set(f for f in self.files if isinstance(f, unicode))
others = set()
for nf in set(['NFC', 'NFD', 'NFKC', 'NFKD']):
others |= set(normalize(nf, file) for file in files)
others -= files
for name in others:
self._apply_failure(open, name, IOError)
self._apply_failure(os.stat, name, OSError)
self._apply_failure(os.chdir, name, OSError)
self._apply_failure(os.rmdir, name, OSError)
self._apply_failure(os.remove, name, OSError)
# listdir may append a wildcard to the filename, so dont check
self._apply_failure(os.listdir, name, OSError, False)
# Skip the test on darwin, because darwin uses a normalization different
# than Python NFD normalization: filenames are different even if we use
# Python NFD normalization.
@unittest.skipIf(sys.platform == 'darwin', 'irrelevant test on Mac OS X')
def test_listdir(self):
sf0 = set(self.files)
f1 = os.listdir(test_support.TESTFN)
f2 = os.listdir(unicode(test_support.TESTFN,
sys.getfilesystemencoding()))
sf2 = set(os.path.join(unicode(test_support.TESTFN), f) for f in f2)
self.assertEqual(sf0, sf2)
self.assertEqual(len(f1), len(f2))
def test_rename(self):
for name in self.files:
os.rename(name, "tmp")
os.rename("tmp", name)
def test_directory(self):
dirname = os.path.join(test_support.TESTFN,
u'Gr\xfc\xdf-\u66e8\u66e9\u66eb')
filename = u'\xdf-\u66e8\u66e9\u66eb'
oldwd = os.getcwd()
os.mkdir(dirname)
os.chdir(dirname)
try:
with open(filename, 'w') as f:
f.write((filename + '\n').encode("utf-8"))
os.access(filename,os.R_OK)
os.remove(filename)
finally:
os.chdir(oldwd)
os.rmdir(dirname)
class UnicodeNFCFileTests(UnicodeFileTests):
normal_form = 'NFC'
class UnicodeNFDFileTests(UnicodeFileTests):
normal_form = 'NFD'
class UnicodeNFKCFileTests(UnicodeFileTests):
normal_form = 'NFKC'
class UnicodeNFKDFileTests(UnicodeFileTests):
normal_form = 'NFKD'
def test_main():
try:
test_support.run_unittest(
UnicodeFileTests,
UnicodeNFCFileTests,
UnicodeNFDFileTests,
UnicodeNFKCFileTests,
UnicodeNFKDFileTests,
)
finally:
deltree(test_support.TESTFN)
if __name__ == "__main__":
test_main()
| |
import numpy as np
import scipy.sparse as sp
# analyze my solution by comparing objective functions
from sklearn import linear_model
import sys
import pandas as pd
class SparseLasso:
def __init__(self, X, y, lam, w=None, w0=0, delta=0.01,
verbose = False, max_iter = 100000):
"""
:param X:
:param y: don't pass in a transposed y
:param lam:
:param w: don't pass in a transposed w
:param w0:
:param delta:
:param verbose:
:param max_iter:
"""
self.X = sp.csc_matrix(X)
self.N, self.d = self.X.shape
self.y = y
assert self.y.shape == (self.N, )
if w is None:
self.w = np.ones(self.d)
elif type(w) == np.ndarray:
self.w = w
else:
assert False, "w is not None or a numpy array."
assert self.w.shape == (self.d ,), \
"shape of w is {}".format(self.w.shape)
self.w0 = w0
self.lam = lam
self.delta = delta
self.verbose = verbose
self.max_iter = max_iter
# a is twice the column-wise dot of X with itself
self.a = np.zeros(self.d)
for k in range(0, self.d):
self.a[k] = 2 * (self.X[:, k].T.dot(self.X[:, k]))[0,0]
self.XT = sp.csr_matrix(X.T)
def sse(self):
# SSE is sum of residuals squared
error_v = self.X.dot(self.w) + self.w0 - self.y
return error_v.T.dot(error_v)
def rmse(self):
# RMSE = root mean square error
# the square root of the variance of the residuals
mse = self.sse()/self.N # /N for the M in RMSE
return mse**0.5 # **0.5 for the R in the RMSE
def objective(self):
return self.sse() + self.lam*np.linalg.norm(self.w,1)
def step(self):
yhat = self.X.dot(self.w) + self.w0
old_w0 = self.w0
self.w0 += (self.y - yhat).sum()/self.N
yhat += self.w0 - old_w0
for k in range(0, self.d):
# Un-clever version:
# ck = 2 * self.extract_scalar(Xk.T.dot(self.y - yhat + Xk*self.w[k, 0]))
ck = 2 * self.X[:, k].T.dot(self.y - yhat)[0] + self.a[k]*self.w[k]
old_wk = self.w[k]
if ck < - self.lam:
self.w[k] = (ck + self.lam)/self.a[k]
elif ck > self.lam:
self.w[k] = (ck - self.lam)/self.a[k]
else:
self.w[k] = 0.
yhat += (self.XT[k,:]*(self.w[k] - old_wk)).toarray()[0]
def run(self):
for s in range(0, self.max_iter):
old_objective = self.objective()
old_w = self.w.copy()
sys.stdout.write(".")
self.step()
assert not self.has_increased_significantly(
old_objective, self.objective()), \
"objective: {} --> {}".format(old_objective, self.objective())
if abs(old_w - self.w).max() < self.delta:
break
if self.verbose:
print(self.objective())
print(self.w)
@staticmethod
def has_increased_significantly(old, new, sig_fig=10**(-4)):
"""
Return if new is larger than old in the `sig_fig` significant digit.
"""
return(new > old and np.log10(1.-old/new) > -sig_fig)
def sklearn_comparison(X, y, lam, sparse = False):
alpha = lam/(2.*X.shape[0])
clf = linear_model.Lasso(alpha)
clf.fit(X, y)
# store solutions in my Lasso class so I can look @ obj fun
skl_lasso = SparseLasso(X, y, lam, w0=0, verbose = False)
skl_lasso.w = clf.coef_
skl_lasso.w0 = clf.intercept_
skl_objective_fun_value = skl_lasso.objective()
return({"objective": skl_objective_fun_value,
"weights": clf.coef_,
"intercept": clf.intercept_})
def generate_random_data(N, d, sigma, k=5):
assert(d > N)
# generate w0
w0 = 0
# generate X
X = np.reshape(np.random.normal(0, 1, N*d),
newshape = (N, d), order='C')
assert X.shape == (N, d)
# generate w* with the first k elements being nonzero.
# todo: k is hard coded for now.
w = np.zeros(d, dtype=float)
w[0] = 10.
w[1] = -10
w[2] = 10
w[3] = -10
w[4] = 10
assert w.shape == (d, )
# generate error
e = np.random.normal(0, sigma, N)
assert e.shape == (N, )
# generate noisy Y
Y = X.dot(w) + w0 + e
Y.reshape(N, )
assert X.shape == (N, d)
assert Y.shape == (N, )
assert w.shape == (d, )
return X, Y, w
class RegularizationPath:
def __init__(self, X, y, lam_max, frac_decrease, steps, delta,
initial_w=None):
self.X = X
self.y = y
self.N, self.d = self.X.shape
self.lam_max = lam_max
self.frac_decrease = frac_decrease
self.steps = steps
self.delta = delta
self.initial_w = initial_w
def analyze_lam(self, lam, w):
sl = SparseLasso(self.X, self.y, lam, w=w, delta=self.delta)
sl.run()
print("")
assert sl.w.shape == (self.d, )
return sl.w.copy(), sl.w0
def walk_path(self):
# protect the first value of lambda.
lam = self.lam_max/self.frac_decrease
if self.initial_w is None:
w_prev = self.initial_w
else:
w_prev = None
# initialize a dataframe to store results in
results = pd.DataFrame()
for c in range(0, self.steps):
lam = lam*self.frac_decrease
print("Loop {}: solving weights for lambda = {}.".format(c+1, lam))
w, w0 = self.analyze_lam(lam, w=w_prev)
one_val = pd.DataFrame({"lam":[lam],
"weights":[w],
"w0": [w0]})
results = pd.concat([results, one_val])
w_prev = w.copy()
self.results_df = results.reset_index(drop=True)
class SyntheticDataRegPath():
def __init__(self, N, d, sigma, lam_max, frac_decrease, delta,
k=5, steps=10):
self.N = N
self.d = d
self.sigma = sigma
self.k = k
self.init_lam = lam_max
self.frac_decrease = frac_decrease
X, y, true_weights = generate_random_data(N=N, d=d,
sigma=sigma, k=k)
self.X = X
self.y = y
self.true_weights = true_weights
self.lam_max = lam_max
reg_path = RegularizationPath(X=self.X, y=self.y,
lam_max=self.lam_max,
frac_decrease=self.frac_decrease,
steps=steps, delta=delta)
reg_path.walk_path()
self.results_df = reg_path.results_df
def analyze_path(self):
# update self.reg_path_results dataframe
self.results_df['precision'] = \
self.results_df['weights'].apply(self.calc_precision)
self.results_df['recall'] = \
self.results_df['weights'].apply(self.calc_recall)
def weight_agreement(self, regression_weights, z):
# True array for regression weight ~ 0:
true_weights_array = self.true_weights.reshape(1, self.d)
abs_weights = np.absolute(true_weights_array)
nonzero_weights = abs_weights > z
reg_weight_array = regression_weights.reshape(1, self.d)
abs_reg_weights = np.absolute(reg_weight_array)
nonzero_reg_weights = abs_reg_weights > z
disagreement = np.bitwise_xor(nonzero_weights, nonzero_reg_weights)
return (disagreement, nonzero_weights, nonzero_reg_weights)
def calc_precision(self, regression_weights, z=0.001):
agreement, nonzero_weights, nonzero_reg_weights = \
self.weight_agreement(regression_weights, z)
# precision = (# correct nonzeros in w^hat)/(num zeros in w^hat)
return agreement.sum() / nonzero_reg_weights.sum()
def calc_recall(self, regression_weights, z=0.001):
agreement, nonzero_weights, nonzero_reg_weights = \
self.weight_agreement(regression_weights, z)
# recall = (number of correct nonzeros in w^hat)/k
return agreement.sum() / self.k
class RegularizationPathTrainTest:
def __init__(self, X_train, y_train, lam_max,
X_val, y_val, feature_names,
steps=10, frac_decrease=0.1, delta=0.01,
initial_w=None):
self.X_train = X_train
self.y_train = y_train
self.lam_max = lam_max
self.X_val = X_val
self.y_val = y_val
self.steps = steps
self.frac_decrease = frac_decrease
reg_path = RegularizationPath(X=self.X_train, y=self.y_train,
lam_max=self.lam_max,
frac_decrease=self.frac_decrease,
steps=self.steps,
initial_w=initial_w,
delta=delta)
reg_path.walk_path()
self.results_df = reg_path.results_df
self.feature_names = feature_names
def analyze_path(self):
self.results_df['RMSE (training)'] = \
self.results_df.apply(
lambda x: self.rmse_train(x['weights'], x['w0']), axis=1)
self.results_df['RMSE (validation)'] = \
self.results_df.apply(
lambda x: self.rmse_val(x['weights'], x['w0']), axis=1)
self.results_df['# nonzero coefficients'] = \
self.results_df['weights'].apply(self.num_nonzero_coefs)
self.results_df['top_features'] = \
self.results_df['weights'].apply(self.top_features)
def calc_rmse(self, X, w, w0, y):
# re-use the formula implemented in SparseLasso.
# put in a random lam b/c it isn't used.
sl = SparseLasso(X, y, lam=0, verbose=False)
# store solutions in my Lasso class so I can look @ obj fun
sl.w = w.copy()
sl.w0 = w0
return sl.rmse()
def rmse_train(self, w, w0):
return self.calc_rmse(X=self.X_train, w=w, w0=w0, y=self.y_train)
def rmse_val(self, w, w0):
return self.calc_rmse(X=self.X_val, w=w, w0=w0, y=self.y_val)
def num_nonzero_coefs(self, w, z=0.001):
nonzero_weights = np.absolute(w) > z
return nonzero_weights.sum()
def top_features(self, w, n_features=10):
w = w.copy()
abs_w = np.absolute(w.copy())
feature_names = self.feature_names
best_indices = abs_w.argsort()[-n_features:][::-1] # get the top n_f features. (They are at the back of the list.)
print(np.argsort(best_indices))
print(np.argsort(w[best_indices]))
return feature_names[best_indices].tolist(), w[best_indices]
| |
# likelihoods based on window calls as input
# all plants for a scaff together in all*txt
# reports likelihood of observed states in forward_backward
#v2:: bounded optimization of error rates
#v3:: mask entire markers based on Geno.summary
from scipy import optimize
from scipy.optimize import minimize_scalar
from scipy.special import gammaln
from math import exp,log
import sys
plantID=sys.argv[1]
#genotyping error probs
zy=0.00001 # edge for bounds
rbp = 0.1/1000000.0 # recombination rate per bp (morgans / megabase)
def calc_v0(e_rates):
def scipy_ln_like0(x):
return -LL(x)
bounds = [ (zy,0.5), (zy,0.5), (zy,1.0-zy) ]
best, val, d = optimize.fmin_l_bfgs_b(scipy_ln_like0, e_rates, approx_grad=True, bounds=bounds)
solution = list(best)
ln_l = -scipy_ln_like0(solution)
solution.append(ln_l)
#zbob=ln_like0(parents,famStr,RRL,RAL,AAL,FLnum,1, list(best),matplant)
return solution
def foward_backward(obs, states, start_p,transition_probability,er):
alpha=[{} for j in range(len(obs))] # forward:: alpha[j][X] is probability that true genotye is X at marker j (starts at 0)
lnFactor=0.0
for y in states:
alpha[0][y] = start_p[y] * emission_probability(y,obs[0],er)
for t in xrange(1, len(obs)):
for y in states:
alpha[t][y] = 0.0
for y0 in states: # y0 is state at t-1
alpha[t][y] +=alpha[t-1][y0] * transition_probability[t-1][y0][y] * emission_probability(y,obs[t],er)
normalizer = max(alpha[t]['AA'],alpha[t]['AB'],alpha[t]['BB'])
#print alpha[t]['AA'],alpha[t]['AB'],alpha[t]['BB']
lnFactor+=log(normalizer)
for y in states:
alpha[t][y] = alpha[t][y]/normalizer
# Likelihood of observed states
LLobs=lnFactor+log(alpha[len(obs)-1]['AA']+alpha[len(obs)-1]['AB']+alpha[len(obs)-1]['BB'])
beta=[{} for j in range(len(obs))] # backward:: beta[j][X] is probability that true genotye is X at marker j (starts at 0)
for y in states:
beta[len(obs)-1][y] = 1.0 #start_p[y]
for t in xrange(len(obs)-2,-1,-1):
#beta.append({})
for y in states:
beta[t][y] = 0.0 # y is state at t
for y0 in states: # y0 is state at t+1
beta[t][y] +=beta[t+1][y0] * transition_probability[t][y][y0] * emission_probability(y0,obs[t+1],er)
normalizer = max(beta[t]['AA'],beta[t]['AB'],beta[t]['BB'])
for y in states:
beta[t][y] = beta[t][y]/normalizer
#print alpha
#print beta
return alpha,beta,LLobs
def emission_probability(genotype,calledG,x): # cc [ AA,AB,BB,NN ]
e1 = x[0] # probability of sequencing error to het
e2 = x[1]
beta=x[2]
if calledG == 'NN':
return 1.0
elif calledG =='AA':
if genotype=='AA':
prob = 1 - e1 - e2
elif genotype=='AB':
prob = beta/2
elif genotype=='BB':
prob = e2
elif calledG =='AB':
if genotype=='AA' or genotype=='BB':
prob = e1
elif genotype=='AB':
prob = 1-beta
elif calledG =='BB':
if genotype=='AA':
prob = e2
elif genotype=='AB':
prob = beta/2
elif genotype=='BB':
prob = 1-e1-e2
return prob
def LL(x):
Total_LL=0.0
for v1s in v1scaffs:
total_snps=v1scaffs[v1s] # updated for each scaff
# transition probs a global
transition_probability=[{} for j in xrange(total_snps-1)] # global that is updated within LL(x)
for x1 in xrange(total_snps-1): # v1 scaff
dist=abs(Position[plantID][v1s][x1+1]-Position[plantID][v1s][x1])
r = rbp*float(dist)
transition_probability[x1] ={'AA' : {'AA':(1-r)**2.0,'AB':2*r*(1-r),'BB':r**2.0}, 'AB' : {'AA':r*(1-r),'AB':(1-r)**2.0 + r**2.0,'BB':r*(1-r)}, 'BB' : {'AA':r**2.0,'AB':2*r*(1-r),'BB':(1-r)**2.0} }
if Gcalls[v1s]>0:
fprbs,rprbs,llx=foward_backward(obsA[plantID][v1s],states,start_probability,transition_probability,x)
#print v1s,Gcalls[v1s],"LL= ",llx
#print "forward ",fprbs
#print "backward ",rprbs
#postProb=[{} for j in range(len(obsA[plantID][v1s]))] # forward:: alpha[j][X] is probability that true genotye is X at marker j (starts at 0)
#for j in range(len(fprbs)):
# denom=0.0
# for y in states:
# denom+=(fprbs[j][y]*rprbs[j][y])
# for y in states:
# postProb[j][y]=(fprbs[j][y]*rprbs[j][y])/denom
#print postProb
else:
llx=0.0
Total_LL+=llx
#print x,Total_LL
return Total_LL
####################################################################################################
### Main Program
states = ('AA','AB','BB')
start_probability = {'AA':0.25,'AB':0.5,'BB':0.25}
inZ = open("bad.marks.txt","rU")
badmark={}
for line_idx, line in enumerate(inZ):
cols = line.replace('\n', '').split('\t')
# 103a 100000
key=cols[0]+"_"+cols[1]
badmark[key]=1
Position={}
obsA={}
v1scaffs={}
Gcalls={}
cscaff=''
calls_total=0
src =open("g."+plantID+".txt", "rU")
for line_idx, line in enumerate(src):
cols = line.replace('\n', '').split('\t')
# isg480 1 400000 AB
key=cols[1]+"_"+cols[2]
try:
uck=badmark[key]
#print "suppressing ", key
except KeyError:
if plantID!=cols[0]:
print "Whoa",plantID,cols[0]
if line_idx==0:
Position[plantID]={}
obsA[plantID]={}
if cols[1] !=cscaff: # new scaff
Position[plantID][cols[1]]=[]
obsA[plantID][cols[1]]=[]
cscaff=cols[1]
v1scaffs[cols[1]]=0
Gcalls[cols[1]]=0
Position[plantID][cols[1]].append(int(cols[2]))
obsA[plantID][cols[1]].append(cols[3])
v1scaffs[cols[1]]+=1 # will need to be updated if you do more than one plant in a run
if cols[3] != 'NN':
Gcalls[cols[1]]+=1
calls_total+=1
#initial values for e1,e2,beta
e_rates=[0.01, 0.01,0.01]
zsol= calc_v0(e_rates)
print plantID,calls_total,zsol[0],zsol[1],zsol[2],zsol[3]
| |
'''
A custom robotframework parser that retains line numbers (though
it doesn't retain character positions for each cell)
Note: this only works on pipe-separated files. It uses part of
the TxtReader robot parser to divide a line into cells.
(probably works for space-separated too. I haven't tried. )
Performance is pretty spiffy! At the time I write this (where
admittedly I don't fully parse everything) it is about 3x-5x faster
than the official robot parser. It can read a file with 500
test cases and 500 keywords in about 30ms, compared to 150ms
for the robot parser. Sweet.
'''
import re
import sys
import os
import os.path
from robot.parsing.txtreader import TxtReader
from robot.errors import DataError
from robot.utils import Utf8Reader
from util import timeit, Matcher
from tables import *
from testcase import Testcase
from rfkeyword import Keyword
from common import Row, Statement
class SuiteFolder(object):
def __init__(self, path, parent=None):
self.parent = parent
self.name = os.path.splitext(os.path.basename(path))[0]
self.path = os.path.abspath(path)
self.children = []
self._add_folder(self.path, parent=None)
def __repr__(self):
return "<SuiteFolder(%s)>" % self.path
def walk(self):
'''Generator which traverses the tree of child objects
This will return a list of SuiteFolder, SuiteFile and ResourceFile objects
'''
for child in self.children:
yield child
if isinstance(child, SuiteFolder):
for grandchild in child.traverse():
yield grandchild
def _add_folder(self, path, parent=None):
for filename in os.listdir(path):
fullpath = os.path.join(path, filename)
(basename, ext) = os.path.splitext(filename.lower())
if not os.access(fullpath, os.R_OK):
continue
try:
if (os.path.isdir(fullpath)):
if not basename.startswith("."):
self.children.append(SuiteFolder(fullpath, parent=self))
else:
if (ext in (".xml", ".robot", ".txt", ".py", ".tsv")):
rf = RobotFile.factory(fullpath, parent=self)
self.children.append(rf)
except Exception, e:
# I really need to get the logging situation figured out.
print "bummer:", str(e)
class RobotFile(object):
'''
Terminology:
- A file is a set of tables
- A table begins with a heading and extends to the next table or EOF
- Each table may be made up of smaller tables that define test cases or keywords
- Each line of text in a table becomes a "Row".
- A Row object contains a list of cells.
- A cell is all of the data between pipes, stripped of leading and
trailing spaces
'''
TYPE_RESOURCE = "resource"
TYPE_SUITE = "suite"
@classmethod
def factory(cls, path, parent=None):
'''Return an instance of SuiteFile, ResourceFile, or SuiteFolder
Exactly which is returned depends on the contents of the
file. If there is a testcase table, this will return an
instance of SuiteFile, otherwise it will return an
instance of ResourceFile.
'''
basename = os.path.basename(path)
if (re.search(r'__init__.(txt|robot|html|tsv)$', basename)):
return None
if os.path.isdir(path):
rf = SuiteFolder(path, parent)
else:
rf = SuiteFile(path, parent)
for table in rf.tables:
if isinstance(table, TestcaseTable):
rf.__class__ = SuiteFile
return rf
rf.__class__ = ResourceFile
return rf
def __init__(self, path, parent=None):
self.parent = parent
self.name = os.path.splitext(os.path.basename(path))[0]
self.path = os.path.abspath(path)
self.tables = []
self.rows = []
self._load(path)
def _load(self, path):
'''
The general idea is to do a quick parse, creating a list of
tables. Each table is nothing more than a list of rows, with
each row being a list of cells. Additional parsing such as
combining rows into statements is done on demand. This first
pass is solely to read in the plain text and organize it by table.
'''
self.tables = []
current_table = DefaultTable(self)
with Utf8Reader(path) as f:
self.raw_text = f.read()
f._file.seek(0) # bleh; wish this wasn't a private property
matcher = Matcher(re.IGNORECASE)
for linenumber, raw_text in enumerate(f.readlines()):
linenumber += 1; # start counting at 1 rather than zero
# this mimics what the robot TSV reader does --
# it replaces non-breaking spaces with regular spaces,
# and strips trailing whitespace
raw_text = raw_text.replace(u'\xA0', ' ')
raw_text = raw_text.rstrip()
# FIXME: I'm keeping line numbers but throwing away
# where each cell starts. I should be preserving that
# (though to be fair, robot is throwing that away so
# I'll have to write my own splitter if I want to save
# the character position)
# cells = self._split_row(raw_text)
cells = TxtReader.split_row(raw_text)
_heading_regex = r'^\s*\*+\s*(.*?)[ *]*$'
if matcher(_heading_regex, cells[0]):
# we've found the start of a new table
table_name = matcher.group(1)
current_table = TableFactory(self, linenumber, table_name)
self.tables.append(current_table)
else:
current_table.append(Row(linenumber, raw_text, cells))
# N.B. this is a static method mainly because I want to be
# able to unit-test it without creating an instance of the class.
@staticmethod
def _split_row(line):
'''
DANGER WILL ROBINSON
this doesn't work yet. I need to write a unit test for it to
see how it differs from robot's splitter.
Based off of robot's row splitter, but it remembers the
character position of each cell
'''
_space_splitter = re.compile('( {2,})')
# oddly, robot uses a lookahead for a trailing space, but it
# seems like a greedy match of surrounding whitespace makes
# more sense (and gives me better data to remember column
# positions
_pipe_splitter = re.compile('( +\|(?: +|$))')
_pipe_starts = ('|', '| ')
if '\t' in line:
line = line.replace('\t', ' ')
if line[:2] in _pipe_starts:
line = line[1:-1] if line.endswith(' |') else line[1:]
cells = [cell.strip() for cell in _pipe_splitter.split(line)]
return cells[::2] # return every other element, skipping the separators
else:
return _space_splitter.split(line)
def __repr__(self):
return "<RobotFile(%s)>" % self.path
@property
def settings(self):
settings = {}
for table in self.tables:
if isinstance(table, SettingTable):
for statement in table.statements:
settings[statement[0].lower()] = statement[1:]
return settings
@property
def type(self):
'''Return 'suite' or 'resource' or None
This will return 'suite' if a testcase table is found;
It will return 'resource' if at least one robot table
is found. If no tables are found it will return None
'''
robot_tables = [table for table in self.tables if not isinstance(table, UnknownTable)]
if len(robot_tables) == 0:
# no robot tables were found
return self.TYPE_RESOURCE
for table in self.tables:
if isinstance(table, TestcaseTable):
return self.TYPE_SUITE
return self.TYPE_RESOURCE
@property
def keywords(self):
'''Generator which returns all keywords in the suite'''
for table in self.tables:
if isinstance(table, KeywordTable):
for keyword in table.keywords:
yield keyword
@property
def testcases(self):
'''Generator which returns all test cases in the suite'''
for table in self.tables:
if isinstance(table, TestcaseTable):
for testcase in table.testcases:
yield testcase
def dump(self):
'''Regurgitate the tables and rows'''
for table in self.tables:
print "*** %s ***" % table.name
table.dump()
def TableFactory(parent, linenumber, name):
match = Matcher(re.IGNORECASE)
if name is None:
table = UnknownTable(parent, linenumber, name)
elif match(r'settings?|metadata', name):
table = SettingTable(parent, linenumber, name)
elif match(r'variables?', name):
table = VariableTable(parent, linenumber, name)
elif match(r'test( cases?)', name):
table = TestcaseTable(parent, linenumber, name)
elif match(r'(user )?keywords?', name):
table = KeywordTable(parent, linenumber, name)
else:
table = UnknownTable(parent, linenumber, name)
return table
class SuiteFile(RobotFile):
def __repr__(self):
return "<SuiteFile(%s)>" % self.path
class ResourceFile(RobotFile):
def __repr__(self):
return "<ResourceFile(%s)>" % self.path
class TestcaseTable(AbstractContainerTable):
_childClass = Testcase
def __init__(self, parent, *args, **kwargs):
super(TestcaseTable, self).__init__(parent, *args, **kwargs)
self.testcases = self._children
class KeywordTable(AbstractContainerTable):
_childClass = Keyword
def __init__(self, parent, *args, **kwargs):
super(KeywordTable, self).__init__(parent, *args, **kwargs)
self.keywords = self._children
@timeit
def dump(suite):
result = []
for table in suite.tables:
# print "table:", table
# for row in table.rows:
# print "=>", row
if isinstance(table, TestcaseTable):
for tc in table.testcases:
# force parsing of individual steps
steps = [step for step in tc.steps]
def try_to_run_it(suite):
print "here we go!"
# create a test suite object
from robot.api import TestSuite
suite = TestSuite('Autogenerated Suite')
for testcase in suite.testcases:
import pdb; pdb.set_trace()
if __name__ == "__main__":
from robot.parsing import TestData, ResourceFile
import sys
# parse with the robot parser and this parser, to
# see which is faster. Of course, this parser will
# be faster :-)
@timeit
def test_robot():
try:
suite = TestData(parent=None, source=sys.argv[1])
except DataError:
# if loading the suite failed, assume it's a resource file
# (bad assumption, but _whatever_)
suite = ResourceFile(source=sys.argv[1])
return suite
@timeit
def test_mine():
suite = RobotFile(sys.argv[1])
# force parsing of every line
for tc in suite.testcases:
statements = tc.statements
tags = tc.tags
return suite
if len(sys.argv) == 1:
print "give me a filename on the command line"
sys.exit(1)
suite1 = test_robot()
suite2 = test_mine()
| |
from unittest.mock import call, patch, PropertyMock, Mock
from directory_cms_client import cms_api_client
import pytest
from django.core.urlresolvers import resolve, reverse
from directory_constants import slugs
from core.tests.helpers import create_response
from industry import constants, views
from industry.views import IndustryDetailCMSView
details_cms_urls = (
reverse('sector-article', kwargs={'slug': 'article'}),
)
list_cms_urls = (reverse('sector-list'),)
cms_urls = details_cms_urls + list_cms_urls
@pytest.fixture
def contact_page_data(breadcrumbs):
return {
'breadcrumbs': breadcrumbs,
'industry_options': [
{
'breadcrumbs_label': 'Agriculture',
'meta': {'slug': 'agriculture'},
},
{
'breadcrumbs_label': 'Technology',
'meta': {'slug': 'industry'},
},
],
'meta': {
'languages': ['en-gb'],
'slug': 'industry-contact',
'url': 'https://www.example.com/industries/contact-us/',
'pk': 'industry',
}
}
@pytest.fixture
def industry_detail_data(breadcrumbs):
return {
'search_filter_sector': ['value'],
'search_filter_text': 'great',
'search_filter_showcase_only': False,
'breadcrumbs': breadcrumbs,
'breadcrumbs_label': 'Technology',
'meta': {
'languages': ['en-gb'],
'slug': 'industry',
'url': 'https://www.example.com/1/slug/',
'pk': 'industry',
}
}
@pytest.fixture
def industry_list_data(breadcrumbs):
return {
'title': 'the page',
'industries': [
{'title': 'one', 'show_on_industries_showcase_page': False},
{'title': 'two', 'show_on_industries_showcase_page': False},
{'title': 'three', 'show_on_industries_showcase_page': True},
{'title': 'four', 'show_on_industries_showcase_page': True}
],
'breadcrumbs': breadcrumbs,
'meta': {
'languages': ['en-gb'],
'slug': 'industries-landing-page',
'pk': 'article',
},
}
@pytest.fixture
def industry_list_no_showcase_data(industry_list_data):
data = industry_list_data
data['industries'] = [
{'title': i, 'show_on_industries_showcase_page': False}
for i in range(19)
]
return data
@pytest.fixture
def industry_article_data(breadcrumbs):
return {
'title': 'Hello world',
'body': '<h2>Hello world</h2>',
'date': '2018-01-01',
'breadcrumbs': breadcrumbs,
'meta': {
'languages': ['en-gb'],
'slug': 'article',
'url': 'https://www.example.com/1/slug/',
'pk': '3',
}
}
@pytest.fixture(autouse=True)
def mock_lookup_by_slug(
industry_detail_data, industry_article_data, contact_page_data,
industry_list_data, request
):
def side_effect(slug, *args, **kwargs):
if 'noautofixture' in request.keywords:
return
resources = [
industry_detail_data,
industry_article_data,
contact_page_data,
industry_list_data,
]
return {
resource['meta']['slug']: create_response(json_payload=resource)
for resource in resources
}[slug]
stub = patch.object(
cms_api_client, 'lookup_by_slug', side_effect=side_effect
)
yield stub.start()
stub.stop()
@pytest.fixture(autouse=True)
def mock_get_showcase_companies():
stub = patch('industry.views.get_showcase_companies', return_value=[])
yield stub.start()
stub.stop()
@pytest.mark.parametrize('url', cms_urls)
def test_cms_pages(settings, client, url):
response = client.get(url)
assert response.status_code == 200
@pytest.mark.parametrize('url', details_cms_urls)
def test_cms_api_client_params(
settings, client, url, mock_lookup_by_slug
):
response = client.get(url, {'draft_token': '123', 'lang': 'de'})
assert response.status_code == 200
assert mock_lookup_by_slug.call_count == 1
assert mock_lookup_by_slug.call_args == call(
slug=resolve(url).kwargs['slug'],
draft_token='123',
language_code='de',
)
@pytest.mark.parametrize('url', (
reverse('sector-article', kwargs={'slug': 'article'}),
))
def test_cms_pages_cms_slug(settings, client, url):
response = client.get(url)
assert response.status_code == 200
@pytest.mark.parametrize('url', details_cms_urls)
def test_cms_pages_cms_page_404(settings, client, url, mock_lookup_by_slug):
mock_lookup_by_slug.side_effect = None
mock_lookup_by_slug.return_value = create_response(status_code=404)
response = client.get(url)
assert response.status_code == 404
@patch.object(IndustryDetailCMSView, 'international_industry_page',
new_callable=PropertyMock)
def test_industry_page_context_curated_feature_enabled(
mock_page_exists, mock_get_showcase_companies, client,
industry_detail_data
):
mock_page_exists.return_value = None
industry_detail_data['search_filter_showcase_only'] = True
url = reverse('sector-detail-verbose', kwargs={'slug': 'industry'})
response = client.get(url)
assert mock_get_showcase_companies.call_count == 1
assert mock_get_showcase_companies.call_args == call(
sectors=['value'], is_showcase_company=True, size=6, term='great'
)
assert response.context_data['page'] == industry_detail_data
assert response.template_name == ['industry/detail.html']
@patch.object(IndustryDetailCMSView, 'international_industry_page',
new_callable=PropertyMock)
def test_industry_page_context_curated_feature_disabled(
mock_page_exists, mock_get_showcase_companies, settings, client,
industry_detail_data
):
mock_page_exists.return_value = None
industry_detail_data['search_filter_showcase_only'] = False
url = reverse('sector-detail-verbose', kwargs={'slug': 'industry'})
response = client.get(url)
assert mock_get_showcase_companies.call_count == 1
assert mock_get_showcase_companies.call_args == call(
sectors=['value'], size=6, term='great'
)
assert response.context_data['page'] == industry_detail_data
assert response.template_name == ['industry/detail.html']
def test_article_page_context(settings, client, industry_article_data):
url = reverse('sector-article', kwargs={'slug': 'article'})
response = client.get(url)
assert response.context_data['page'] == industry_article_data
assert response.template_name == ['industry/article.html']
@patch.object(cms_api_client, 'lookup_by_slug')
def test_industries_page_context(
mock_get_industries_landing_page, settings, client, industry_list_data
):
mock_get_industries_landing_page.return_value = create_response(
json_payload=industry_list_data,
)
response = client.get(reverse('sector-list'))
assert response.status_code == 200
assert response.context_data['page'] == industry_list_data
assert response.context_data['showcase_industries'] == [
industry_list_data['industries'][2],
industry_list_data['industries'][3],
]
@patch.object(cms_api_client, 'lookup_by_slug')
def test_industries_page_context_no_showcase_industries(
mock_lookup_by_slug, settings, client, industry_list_no_showcase_data
):
mock_lookup_by_slug.side_effect = None
mock_lookup_by_slug.return_value = create_response(
json_payload=industry_list_no_showcase_data,
)
response = client.get(reverse('sector-list'))
assert response.status_code == 200
assert response.context_data['page'] == industry_list_no_showcase_data
assert response.context_data['showcase_industries'] == (
industry_list_no_showcase_data['industries'][:9]
)
@patch.object(cms_api_client, 'lookup_by_slug')
def test_industries_page_not_found(mock_lookup_by_slug, settings, client):
mock_lookup_by_slug.return_value = create_response(status_code=404)
response = client.get(reverse('sector-list'))
assert response.status_code == 404
@patch.object(
views.IndustryDetailContactCMSView.form_class.action_class, 'save'
)
def test_contact_form_submit_with_comment_forms_api(
mock_save, client, captcha_stub
):
mock_save.return_value = create_response(status_code=200)
url = reverse('sector-detail-cms-contact', kwargs={'slug': 'industry'})
data = {
'full_name': 'Jeff',
'email_address': 'jeff@example.com',
'phone_number': '1231312',
'sector': 'industry',
'organisation_name': 'My name is Jeff',
'organisation_size': '1-10',
'country': 'United Kingdom',
'body': 'hello',
'source': constants.MARKETING_SOURCES[1][0],
'terms_agreed': True,
'g-recaptcha-response': captcha_stub,
}
response = client.post(url, data)
assert response.status_code == 302
assert response.url == (
reverse('sector-detail-cms-contact-sent', kwargs={'slug': 'industry'})
)
assert mock_save.call_count == 2
assert mock_save.call_args_list[0] == mock_save.call_args_list[1] == call({
'sector': 'industry',
'organisation_name': 'My name is Jeff',
'source_other': '',
'organisation_size': '1-10',
'email_address': 'jeff@example.com',
'phone_number': '1231312',
'country': 'United Kingdom',
'full_name': 'Jeff',
'body': 'hello',
'source': constants.MARKETING_SOURCES[1][0],
})
def test_contact_form_prefills_sector(client, industry_detail_data):
url = reverse('sector-detail-cms-contact', kwargs={'slug': 'industry'})
response = client.get(url)
assert response.context_data['form'].initial['sector'] == (
industry_detail_data['meta']['slug']
)
@patch.object(views.IndustryLandingPageContactCMSView.form_class, 'save')
def test_sector_list_submit_with_comment_forms_api(
mock_save, client, captcha_stub, settings
):
mock_save.return_value = create_response(status_code=200)
url = reverse('sector-list-cms-contact')
data = {
'full_name': 'Jeff',
'email_address': 'jeff@example.com',
'phone_number': '3223232',
'sector': 'industry',
'organisation_name': 'My name is Jeff',
'organisation_size': '1-10',
'country': 'United Kingdom',
'body': 'hello',
'source': constants.MARKETING_SOURCES[1][0],
'terms_agreed': True,
'g-recaptcha-response': captcha_stub,
}
response = client.post(url, data)
assert response.status_code == 302
assert response.url == (
reverse('sector-list-cms-contact-sent')
)
assert mock_save.call_count == 2
assert mock_save.call_args_list[0] == call(
email_address='buying@example.com',
form_url='/trade/industries/contact/',
sender={
'email_address': 'jeff@example.com',
'country_code': 'United Kingdom'
},
spam_control={
'contents': ['hello']},
template_id=settings.CONTACT_INDUSTRY_AGENT_TEMPLATE_ID,
)
assert mock_save.call_args_list[1] == call(
email_address='jeff@example.com',
form_url='/trade/industries/contact/',
template_id=settings.CONTACT_INDUSTRY_USER_TEMPLATE_ID,
email_reply_to_id=settings.CONTACT_INDUSTRY_USER_REPLY_TO_ID,
)
def test_contact_industry_detail_sent_no_referer(client):
url = reverse(
'sector-detail-cms-contact-sent', kwargs={'slug': 'industry'}
)
expected_url = reverse(
'sector-detail-cms-contact', kwargs={'slug': 'industry'}
)
response = client.get(url, {})
assert response.status_code == 302
assert response.url == expected_url
def test_contact_industry_detail_sent_incorrect_referer(client):
url = reverse(
'sector-detail-cms-contact-sent', kwargs={'slug': 'industry'}
)
expected_url = reverse(
'sector-detail-cms-contact', kwargs={'slug': 'industry'}
)
referer_url = 'http://www.googe.com'
response = client.get(url, {}, HTTP_REFERER=referer_url)
assert response.status_code == 302
assert response.url == expected_url
def test_contact_industry_detail_sent_correct_referer(client):
url = reverse(
'sector-detail-cms-contact-sent', kwargs={'slug': 'industry'}
)
referer_url = reverse(
'sector-detail-cms-contact', kwargs={'slug': 'industry'}
)
response = client.get(url, {}, HTTP_REFERER=referer_url)
assert response.status_code == 200
assert response.template_name == [
views.IndustryDetailContactCMSSentView.template_name
]
def test_contact_industry_list_sent_no_referer(client):
url = reverse(
'sector-list-cms-contact-sent',
kwargs={'slug': slugs.FIND_A_SUPPLIER_INDUSTRY_CONTACT})
expected_url = reverse(
'sector-list-cms-contact',
kwargs={'slug': slugs.FIND_A_SUPPLIER_INDUSTRY_CONTACT})
response = client.get(url, {})
assert response.status_code == 302
assert response.url == expected_url
def test_contact_industry_list_sent_incorrect_referer(client):
url = reverse(
'sector-list-cms-contact-sent',
kwargs={'slug': slugs.FIND_A_SUPPLIER_INDUSTRY_CONTACT})
expected_url = reverse(
'sector-list-cms-contact',
kwargs={'slug': slugs.FIND_A_SUPPLIER_INDUSTRY_CONTACT})
referer_url = 'http://www.googe.com'
response = client.get(url, {}, HTTP_REFERER=referer_url)
assert response.status_code == 302
assert response.url == expected_url
def test_contact_industry_list_sent_correct_referer(client):
url = reverse(
'sector-list-cms-contact-sent',
kwargs={'slug': slugs.FIND_A_SUPPLIER_INDUSTRY_CONTACT})
referer_url = reverse(
'sector-list-cms-contact',
kwargs={'slug': slugs.FIND_A_SUPPLIER_INDUSTRY_CONTACT})
response = client.get(url, {}, HTTP_REFERER=referer_url)
assert response.status_code == 200
assert response.template_name == [
views.IndustryLandingPageContactCMSSentView.template_name
]
@pytest.mark.noautofixture
@patch('directory_cms_client.client.cms_api_client.lookup_by_slug')
def test_industry_page_exists_in_international(mock_get_page, client):
mocked_response = Mock(status_code=200)
mocked_response.json.return_value = {'full_url': 'http://test.com'}
mock_get_page.return_value = mocked_response
url = reverse('sector-detail-verbose', kwargs={'slug': 'foo'})
response = client.get(url)
assert mock_get_page.call_args == call(draft_token=None,
language_code='en-gb',
service_name='GREAT_INTERNATIONAL',
slug='foo')
assert response.status_code == 302
assert response.url == 'http://test.com'
@patch.object(IndustryDetailCMSView, 'international_industry_page',
new_callable=PropertyMock)
def test_industry_page_does_not_exist_in_international(mock_page_exists,
client):
mock_page_exists.return_value = None
url = reverse('sector-detail-verbose', kwargs={'slug': 'industry'})
response = client.get(url)
assert response.status_code == 200
| |
# Copyright 2013, Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import mock
from mock import patch
from oslo_utils import units
import six
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from glance.common import exception
from glance.common import store_utils
import glance.quota
from glance.tests.unit import utils as unit_test_utils
from glance.tests import utils as test_utils
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
class FakeContext(object):
owner = 'someone'
is_admin = False
class FakeImage(object):
size = None
image_id = 'someid'
locations = [{'url': 'file:///not/a/path', 'metadata': {}}]
tags = set([])
def set_data(self, data, size=None):
self.size = 0
for d in data:
self.size += len(d)
def __init__(self, **kwargs):
self.extra_properties = kwargs.get('extra_properties', {})
class TestImageQuota(test_utils.BaseTestCase):
def setUp(self):
super(TestImageQuota, self).setUp()
def tearDown(self):
super(TestImageQuota, self).tearDown()
def _get_image(self, location_count=1, image_size=10):
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = 'xyz'
base_image.size = image_size
image = glance.quota.ImageProxy(base_image, context, db_api, store)
locations = []
for i in range(location_count):
locations.append({'url': 'file:///g/there/it/is%d' % i,
'metadata': {}, 'status': 'active'})
image_values = {'id': 'xyz', 'owner': context.owner,
'status': 'active', 'size': image_size,
'locations': locations}
db_api.image_create(context, image_values)
return image
def test_quota_allowed(self):
quota = 10
self.config(user_storage_quota=str(quota))
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = 'id'
image = glance.quota.ImageProxy(base_image, context, db_api, store)
data = '*' * quota
base_image.set_data(data, size=None)
image.set_data(data)
self.assertEqual(quota, base_image.size)
def _test_quota_allowed_unit(self, data_length, config_quota):
self.config(user_storage_quota=config_quota)
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = 'id'
image = glance.quota.ImageProxy(base_image, context, db_api, store)
data = '*' * data_length
base_image.set_data(data, size=None)
image.set_data(data)
self.assertEqual(data_length, base_image.size)
def test_quota_allowed_unit_b(self):
self._test_quota_allowed_unit(10, '10B')
def test_quota_allowed_unit_kb(self):
self._test_quota_allowed_unit(10, '1KB')
def test_quota_allowed_unit_mb(self):
self._test_quota_allowed_unit(10, '1MB')
def test_quota_allowed_unit_gb(self):
self._test_quota_allowed_unit(10, '1GB')
def test_quota_allowed_unit_tb(self):
self._test_quota_allowed_unit(10, '1TB')
def _quota_exceeded_size(self, quota, data,
deleted=True, size=None):
self.config(user_storage_quota=quota)
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = 'id'
image = glance.quota.ImageProxy(base_image, context, db_api, store)
if deleted:
with patch.object(store_utils, 'safe_delete_from_backend'):
store_utils.safe_delete_from_backend(
context,
image.image_id,
base_image.locations[0])
self.assertRaises(exception.StorageQuotaFull,
image.set_data,
data,
size=size)
def test_quota_exceeded_no_size(self):
quota = 10
data = '*' * (quota + 1)
# NOTE(jbresnah) When the image size is None it means that it is
# not known. In this case the only time we will raise an
# exception is when there is no room left at all, thus we know
# it will not fit.
# That's why 'get_remaining_quota' is mocked with return_value = 0.
with patch.object(glance.api.common, 'get_remaining_quota',
return_value=0):
self._quota_exceeded_size(str(quota), data)
def test_quota_exceeded_with_right_size(self):
quota = 10
data = '*' * (quota + 1)
self._quota_exceeded_size(str(quota), data, size=len(data),
deleted=False)
def test_quota_exceeded_with_right_size_b(self):
quota = 10
data = '*' * (quota + 1)
self._quota_exceeded_size('10B', data, size=len(data),
deleted=False)
def test_quota_exceeded_with_right_size_kb(self):
quota = units.Ki
data = '*' * (quota + 1)
self._quota_exceeded_size('1KB', data, size=len(data),
deleted=False)
def test_quota_exceeded_with_lie_size(self):
quota = 10
data = '*' * (quota + 1)
self._quota_exceeded_size(str(quota), data, deleted=False,
size=quota - 1)
def test_append_location(self):
new_location = {'url': 'file:///a/path', 'metadata': {},
'status': 'active'}
image = self._get_image()
pre_add_locations = image.locations[:]
image.locations.append(new_location)
pre_add_locations.append(new_location)
self.assertEqual(image.locations, pre_add_locations)
def test_insert_location(self):
new_location = {'url': 'file:///a/path', 'metadata': {},
'status': 'active'}
image = self._get_image()
pre_add_locations = image.locations[:]
image.locations.insert(0, new_location)
pre_add_locations.insert(0, new_location)
self.assertEqual(image.locations, pre_add_locations)
def test_extend_location(self):
new_location = {'url': 'file:///a/path', 'metadata': {},
'status': 'active'}
image = self._get_image()
pre_add_locations = image.locations[:]
image.locations.extend([new_location])
pre_add_locations.extend([new_location])
self.assertEqual(image.locations, pre_add_locations)
def test_iadd_location(self):
new_location = {'url': 'file:///a/path', 'metadata': {},
'status': 'active'}
image = self._get_image()
pre_add_locations = image.locations[:]
image.locations += [new_location]
pre_add_locations += [new_location]
self.assertEqual(image.locations, pre_add_locations)
def test_set_location(self):
new_location = {'url': 'file:///a/path', 'metadata': {},
'status': 'active'}
image = self._get_image()
image.locations = [new_location]
self.assertEqual(image.locations, [new_location])
def _make_image_with_quota(self, image_size=10, location_count=2):
quota = image_size * location_count
self.config(user_storage_quota=str(quota))
return self._get_image(image_size=image_size,
location_count=location_count)
def test_exceed_append_location(self):
image = self._make_image_with_quota()
self.assertRaises(exception.StorageQuotaFull,
image.locations.append,
{'url': 'file:///a/path', 'metadata': {},
'status': 'active'})
def test_exceed_insert_location(self):
image = self._make_image_with_quota()
self.assertRaises(exception.StorageQuotaFull,
image.locations.insert,
0,
{'url': 'file:///a/path', 'metadata': {},
'status': 'active'})
def test_exceed_extend_location(self):
image = self._make_image_with_quota()
self.assertRaises(exception.StorageQuotaFull,
image.locations.extend,
[{'url': 'file:///a/path', 'metadata': {},
'status': 'active'}])
def test_set_location_under(self):
image = self._make_image_with_quota(location_count=1)
image.locations = [{'url': 'file:///a/path', 'metadata': {},
'status': 'active'}]
def test_set_location_exceed(self):
image = self._make_image_with_quota(location_count=1)
try:
image.locations = [{'url': 'file:///a/path', 'metadata': {},
'status': 'active'},
{'url': 'file:///a/path2', 'metadata': {},
'status': 'active'}]
self.fail('Should have raised the quota exception')
except exception.StorageQuotaFull:
pass
def test_iadd_location_exceed(self):
image = self._make_image_with_quota(location_count=1)
try:
image.locations += [{'url': 'file:///a/path', 'metadata': {},
'status': 'active'}]
self.fail('Should have raised the quota exception')
except exception.StorageQuotaFull:
pass
def test_append_location_for_queued_image(self):
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = str(uuid.uuid4())
image = glance.quota.ImageProxy(base_image, context, db_api, store)
self.assertIsNone(image.size)
self.stubs.Set(store_api, 'get_size_from_backend',
unit_test_utils.fake_get_size_from_backend)
image.locations.append({'url': 'file:///fake.img.tar.gz',
'metadata': {}})
self.assertIn({'url': 'file:///fake.img.tar.gz', 'metadata': {}},
image.locations)
def test_insert_location_for_queued_image(self):
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = str(uuid.uuid4())
image = glance.quota.ImageProxy(base_image, context, db_api, store)
self.assertIsNone(image.size)
self.stubs.Set(store_api, 'get_size_from_backend',
unit_test_utils.fake_get_size_from_backend)
image.locations.insert(0,
{'url': 'file:///fake.img.tar.gz',
'metadata': {}})
self.assertIn({'url': 'file:///fake.img.tar.gz', 'metadata': {}},
image.locations)
def test_set_location_for_queued_image(self):
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = str(uuid.uuid4())
image = glance.quota.ImageProxy(base_image, context, db_api, store)
self.assertIsNone(image.size)
self.stubs.Set(store_api, 'get_size_from_backend',
unit_test_utils.fake_get_size_from_backend)
image.locations = [{'url': 'file:///fake.img.tar.gz', 'metadata': {}}]
self.assertEqual([{'url': 'file:///fake.img.tar.gz', 'metadata': {}}],
image.locations)
def test_iadd_location_for_queued_image(self):
context = FakeContext()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
base_image = FakeImage()
base_image.image_id = str(uuid.uuid4())
image = glance.quota.ImageProxy(base_image, context, db_api, store)
self.assertIsNone(image.size)
self.stubs.Set(store_api, 'get_size_from_backend',
unit_test_utils.fake_get_size_from_backend)
image.locations += [{'url': 'file:///fake.img.tar.gz', 'metadata': {}}]
self.assertIn({'url': 'file:///fake.img.tar.gz', 'metadata': {}},
image.locations)
class TestImagePropertyQuotas(test_utils.BaseTestCase):
def setUp(self):
super(TestImagePropertyQuotas, self).setUp()
self.base_image = FakeImage()
self.image = glance.quota.ImageProxy(self.base_image,
mock.Mock(),
mock.Mock(),
mock.Mock())
self.image_repo_mock = mock.Mock()
self.image_repo_mock.add.return_value = self.base_image
self.image_repo_mock.save.return_value = self.base_image
self.image_repo_proxy = glance.quota.ImageRepoProxy(
self.image_repo_mock,
mock.Mock(),
mock.Mock(),
mock.Mock())
def test_save_image_with_image_property(self):
self.config(image_property_quota=1)
self.image.extra_properties = {'foo': 'bar'}
self.image_repo_proxy.save(self.image)
self.image_repo_mock.save.assert_called_once_with(self.base_image,
from_state=None)
def test_save_image_too_many_image_properties(self):
self.config(image_property_quota=1)
self.image.extra_properties = {'foo': 'bar', 'foo2': 'bar2'}
exc = self.assertRaises(exception.ImagePropertyLimitExceeded,
self.image_repo_proxy.save, self.image)
self.assertIn("Attempted: 2, Maximum: 1", six.text_type(exc))
def test_save_image_unlimited_image_properties(self):
self.config(image_property_quota=-1)
self.image.extra_properties = {'foo': 'bar'}
self.image_repo_proxy.save(self.image)
self.image_repo_mock.save.assert_called_once_with(self.base_image,
from_state=None)
def test_add_image_with_image_property(self):
self.config(image_property_quota=1)
self.image.extra_properties = {'foo': 'bar'}
self.image_repo_proxy.add(self.image)
self.image_repo_mock.add.assert_called_once_with(self.base_image)
def test_add_image_too_many_image_properties(self):
self.config(image_property_quota=1)
self.image.extra_properties = {'foo': 'bar', 'foo2': 'bar2'}
exc = self.assertRaises(exception.ImagePropertyLimitExceeded,
self.image_repo_proxy.add, self.image)
self.assertIn("Attempted: 2, Maximum: 1", six.text_type(exc))
def test_add_image_unlimited_image_properties(self):
self.config(image_property_quota=-1)
self.image.extra_properties = {'foo': 'bar'}
self.image_repo_proxy.add(self.image)
self.image_repo_mock.add.assert_called_once_with(self.base_image)
def _quota_exceed_setup(self):
self.config(image_property_quota=2)
self.base_image.extra_properties = {'foo': 'bar', 'spam': 'ham'}
self.image = glance.quota.ImageProxy(self.base_image,
mock.Mock(),
mock.Mock(),
mock.Mock())
def test_modify_image_properties_when_quota_exceeded(self):
self._quota_exceed_setup()
self.config(image_property_quota=1)
self.image.extra_properties = {'foo': 'frob', 'spam': 'eggs'}
self.image_repo_proxy.save(self.image)
self.image_repo_mock.save.assert_called_once_with(self.base_image,
from_state=None)
self.assertEqual('frob', self.base_image.extra_properties['foo'])
self.assertEqual('eggs', self.base_image.extra_properties['spam'])
def test_delete_image_properties_when_quota_exceeded(self):
self._quota_exceed_setup()
self.config(image_property_quota=1)
del self.image.extra_properties['foo']
self.image_repo_proxy.save(self.image)
self.image_repo_mock.save.assert_called_once_with(self.base_image,
from_state=None)
self.assertNotIn('foo', self.base_image.extra_properties)
self.assertEqual('ham', self.base_image.extra_properties['spam'])
def test_invalid_quota_config_parameter(self):
self.config(user_storage_quota='foo')
location = {"url": "file:///fake.img.tar.gz", "metadata": {}}
self.assertRaises(exception.InvalidOptionValue,
self.image.locations.append, location)
def test_exceed_quota_during_patch_operation(self):
self._quota_exceed_setup()
self.image.extra_properties['frob'] = 'baz'
self.image.extra_properties['lorem'] = 'ipsum'
self.assertEqual('bar', self.base_image.extra_properties['foo'])
self.assertEqual('ham', self.base_image.extra_properties['spam'])
self.assertEqual('baz', self.base_image.extra_properties['frob'])
self.assertEqual('ipsum', self.base_image.extra_properties['lorem'])
del self.image.extra_properties['frob']
del self.image.extra_properties['lorem']
self.image_repo_proxy.save(self.image)
call_args = mock.call(self.base_image, from_state=None)
self.assertEqual(call_args, self.image_repo_mock.save.call_args)
self.assertEqual('bar', self.base_image.extra_properties['foo'])
self.assertEqual('ham', self.base_image.extra_properties['spam'])
self.assertNotIn('frob', self.base_image.extra_properties)
self.assertNotIn('lorem', self.base_image.extra_properties)
def test_quota_exceeded_after_delete_image_properties(self):
self.config(image_property_quota=3)
self.base_image.extra_properties = {'foo': 'bar',
'spam': 'ham',
'frob': 'baz'}
self.image = glance.quota.ImageProxy(self.base_image,
mock.Mock(),
mock.Mock(),
mock.Mock())
self.config(image_property_quota=1)
del self.image.extra_properties['foo']
self.image_repo_proxy.save(self.image)
self.image_repo_mock.save.assert_called_once_with(self.base_image,
from_state=None)
self.assertNotIn('foo', self.base_image.extra_properties)
self.assertEqual('ham', self.base_image.extra_properties['spam'])
self.assertEqual('baz', self.base_image.extra_properties['frob'])
class TestImageTagQuotas(test_utils.BaseTestCase):
def setUp(self):
super(TestImageTagQuotas, self).setUp()
self.base_image = mock.Mock()
self.base_image.tags = set([])
self.base_image.extra_properties = {}
self.image = glance.quota.ImageProxy(self.base_image,
mock.Mock(),
mock.Mock(),
mock.Mock())
self.image_repo_mock = mock.Mock()
self.image_repo_proxy = glance.quota.ImageRepoProxy(
self.image_repo_mock,
mock.Mock(),
mock.Mock(),
mock.Mock())
def test_replace_image_tag(self):
self.config(image_tag_quota=1)
self.image.tags = ['foo']
self.assertEqual(1, len(self.image.tags))
def test_replace_too_many_image_tags(self):
self.config(image_tag_quota=0)
exc = self.assertRaises(exception.ImageTagLimitExceeded,
setattr, self.image, 'tags', ['foo', 'bar'])
self.assertIn('Attempted: 2, Maximum: 0', six.text_type(exc))
self.assertEqual(0, len(self.image.tags))
def test_replace_unlimited_image_tags(self):
self.config(image_tag_quota=-1)
self.image.tags = ['foo']
self.assertEqual(1, len(self.image.tags))
def test_add_image_tag(self):
self.config(image_tag_quota=1)
self.image.tags.add('foo')
self.assertEqual(1, len(self.image.tags))
def test_add_too_many_image_tags(self):
self.config(image_tag_quota=1)
self.image.tags.add('foo')
exc = self.assertRaises(exception.ImageTagLimitExceeded,
self.image.tags.add, 'bar')
self.assertIn('Attempted: 2, Maximum: 1', six.text_type(exc))
def test_add_unlimited_image_tags(self):
self.config(image_tag_quota=-1)
self.image.tags.add('foo')
self.assertEqual(1, len(self.image.tags))
def test_remove_image_tag_while_over_quota(self):
self.config(image_tag_quota=1)
self.image.tags.add('foo')
self.assertEqual(1, len(self.image.tags))
self.config(image_tag_quota=0)
self.image.tags.remove('foo')
self.assertEqual(0, len(self.image.tags))
class TestQuotaImageTagsProxy(test_utils.BaseTestCase):
def setUp(self):
super(TestQuotaImageTagsProxy, self).setUp()
def test_add(self):
proxy = glance.quota.QuotaImageTagsProxy(set([]))
proxy.add('foo')
self.assertIn('foo', proxy)
def test_add_too_many_tags(self):
self.config(image_tag_quota=0)
proxy = glance.quota.QuotaImageTagsProxy(set([]))
exc = self.assertRaises(exception.ImageTagLimitExceeded,
proxy.add, 'bar')
self.assertIn('Attempted: 1, Maximum: 0', six.text_type(exc))
def test_equals(self):
proxy = glance.quota.QuotaImageTagsProxy(set([]))
self.assertEqual(set([]), proxy)
def test_contains(self):
proxy = glance.quota.QuotaImageTagsProxy(set(['foo']))
self.assertIn('foo', proxy)
def test_len(self):
proxy = glance.quota.QuotaImageTagsProxy(set(['foo',
'bar',
'baz',
'niz']))
self.assertEqual(4, len(proxy))
def test_iter(self):
items = set(['foo', 'bar', 'baz', 'niz'])
proxy = glance.quota.QuotaImageTagsProxy(items.copy())
self.assertEqual(4, len(items))
for item in proxy:
items.remove(item)
self.assertEqual(0, len(items))
class TestImageMemberQuotas(test_utils.BaseTestCase):
def setUp(self):
super(TestImageMemberQuotas, self).setUp()
db_api = unit_test_utils.FakeDB()
store_api = unit_test_utils.FakeStoreAPI()
store = unit_test_utils.FakeStoreUtils(store_api)
context = FakeContext()
self.image = mock.Mock()
self.base_image_member_factory = mock.Mock()
self.image_member_factory = glance.quota.ImageMemberFactoryProxy(
self.base_image_member_factory, context,
db_api, store)
def test_new_image_member(self):
self.config(image_member_quota=1)
self.image_member_factory.new_image_member(self.image,
'fake_id')
nim = self.base_image_member_factory.new_image_member
nim .assert_called_once_with(self.image.base, 'fake_id')
def test_new_image_member_unlimited_members(self):
self.config(image_member_quota=-1)
self.image_member_factory.new_image_member(self.image,
'fake_id')
nim = self.base_image_member_factory.new_image_member
nim.assert_called_once_with(self.image.base, 'fake_id')
def test_new_image_member_too_many_members(self):
self.config(image_member_quota=0)
self.assertRaises(exception.ImageMemberLimitExceeded,
self.image_member_factory.new_image_member,
self.image, 'fake_id')
class TestImageLocationQuotas(test_utils.BaseTestCase):
def setUp(self):
super(TestImageLocationQuotas, self).setUp()
self.base_image = mock.Mock()
self.base_image.locations = []
self.base_image.size = 1
self.base_image.extra_properties = {}
self.image = glance.quota.ImageProxy(self.base_image,
mock.Mock(),
mock.Mock(),
mock.Mock())
self.image_repo_mock = mock.Mock()
self.image_repo_proxy = glance.quota.ImageRepoProxy(
self.image_repo_mock,
mock.Mock(),
mock.Mock(),
mock.Mock())
def test_replace_image_location(self):
self.config(image_location_quota=1)
self.image.locations = [{"url": "file:///fake.img.tar.gz",
"metadata": {}
}]
self.assertEqual(1, len(self.image.locations))
def test_replace_too_many_image_locations(self):
self.config(image_location_quota=1)
self.image.locations = [{"url": "file:///fake.img.tar.gz",
"metadata": {}}
]
locations = [
{"url": "file:///fake1.img.tar.gz", "metadata": {}},
{"url": "file:///fake2.img.tar.gz", "metadata": {}},
{"url": "file:///fake3.img.tar.gz", "metadata": {}}
]
exc = self.assertRaises(exception.ImageLocationLimitExceeded,
setattr, self.image, 'locations', locations)
self.assertIn('Attempted: 3, Maximum: 1', six.text_type(exc))
self.assertEqual(1, len(self.image.locations))
def test_replace_unlimited_image_locations(self):
self.config(image_location_quota=-1)
self.image.locations = [{"url": "file:///fake.img.tar.gz",
"metadata": {}}
]
self.assertEqual(1, len(self.image.locations))
def test_add_image_location(self):
self.config(image_location_quota=1)
location = {"url": "file:///fake.img.tar.gz", "metadata": {}}
self.image.locations.append(location)
self.assertEqual(1, len(self.image.locations))
def test_add_too_many_image_locations(self):
self.config(image_location_quota=1)
location1 = {"url": "file:///fake1.img.tar.gz", "metadata": {}}
self.image.locations.append(location1)
location2 = {"url": "file:///fake2.img.tar.gz", "metadata": {}}
exc = self.assertRaises(exception.ImageLocationLimitExceeded,
self.image.locations.append, location2)
self.assertIn('Attempted: 2, Maximum: 1', six.text_type(exc))
def test_add_unlimited_image_locations(self):
self.config(image_location_quota=-1)
location1 = {"url": "file:///fake1.img.tar.gz", "metadata": {}}
self.image.locations.append(location1)
self.assertEqual(1, len(self.image.locations))
def test_remove_image_location_while_over_quota(self):
self.config(image_location_quota=1)
location1 = {"url": "file:///fake1.img.tar.gz", "metadata": {}}
self.image.locations.append(location1)
self.assertEqual(1, len(self.image.locations))
self.config(image_location_quota=0)
self.image.locations.remove(location1)
self.assertEqual(0, len(self.image.locations))
| |
# -*- coding: utf-8 -*-
"""
zenfig.variables
~~~~~~~~
Variables processor
:copyright: (c) 2016 by Alejandro Ricoveri
:license: MIT, see LICENSE for more details.
"""
import os
import re
import platform
import yaml
import psutil
import cpuinfo
from . import __name__ as pkg_name
from . import __version__ as pkg_version
from . import log
from . import util
from . import renderer
from .kit import get_kit
from .kits import Kit
from .util import autolog
# Sanity check regex for ZF_VAR_PATH
ZF_VAR_PATH_REGEX = "([^:]+:)*[^:]+$"
@autolog
def _get_search_path_from_env(var_path=None):
"""
Get variable search paths from environment variable ZF_VAR_PATH (if any)
:param var_path: optional var_path string
"""
if var_path is None:
var_path = os.getenv("ZF_VAR_PATH")
if var_path is not None and re.match(ZF_VAR_PATH_REGEX, var_path):
log.msg_debug("ZF_VAR_PATH has been set!")
return var_path.split(':')
return None
@autolog
def _resolve_search_path(*, user_var_files, kit_var_dir=None, defaults_only=False):
"""
Resolve variable search path
:param user_var_files: Raw list of variable locations set by the user
:param defaults_only: If True, variable locations set by the user won't be included.
:returns:
A list of variable locations/files, ordered by precedence
"""
########################################
# Variable locations are set by order of
# precedence as follows
########################################
if not defaults_only:
#####################################
# 1 => Variables set by the user
# Make sure we have absolute paths to
# all variable files and/or directories
#####################################
for i, var_file in enumerate(user_var_files):
user_var_files[i] = os.path.abspath(user_var_files[i])
################################
# 2 => Variables set in ZF_VAR_PATH
# Add entries in ZF_VAR_PATH:
# Should this environment variable
# be set, then it will be taken into
# account for variables search path
################################
env_vars = _get_search_path_from_env()
if env_vars is not None:
user_var_files.extend(env_vars)
########################################
# 3 => Variables set in default vars dir
# Add user data home into the search path
########################################
user_vars_dir = "{}/vars".format(util.get_data_home())
user_var_files.append(user_vars_dir)
########################################
# 4 => Default variables set by the kit
########################################
if kit_var_dir is not None:
user_var_files.append(kit_var_dir)
# Make sure there are no duplicates in this one
return sorted(set(user_var_files), key=lambda x: user_var_files.index(x))[::-1]
@autolog
def _get_default_vars():
"""
Get default variables
Default variables are mutable global variables
covering a great range of basics, from common terminal settings
to color schemes. Since they are mutable, it means that they
can be superseeded by other definitions found along the variable
resolution with set search paths.
:return: A dictionary with a bunch of scavenged variables
"""
# This holds the entire thing
default_vars = {}
# Browser settings
browser = os.getenv("BROWSER")
if browser is None:
browser = 'firefox'
default_vars["browser"] = browser
#################################
# base16 colors (default palette)
#################################
base16_colors = [
"181818",
"282828",
"383838",
"585858",
"b8b8b8",
"d8d8d8",
"e8e8e8",
"f8f8f8",
"ab4642",
"dc9656",
"f7ca88",
"a1b56c",
"86c1b9",
"7cafc2",
"ba8baf",
"a16946",
]
# Insert on defaults
for index, color in enumerate(base16_colors):
index = "{:02x}".format(index).upper()
default_vars["color_base{}".format(index)] = color
###############
# Font settings
###############
font_settings = {
"font": "Sans",
"font_icon": "FontAwesome",
"font_size": 10,
"font_antialiasing": True,
"font_hinting": True,
"font_hintstyle": "hintslight",
"font_antialias": True
}
# Insert on defaults
default_vars.update(font_settings)
##########################
# Terminal common settings
##########################
term_settings = {
"term": os.getenv("TERM"),
"term_font": "Mono",
"term_scroll_on_output": True,
"term_font_size": 10,
####################################################
# Notice that terminal color values
# depend ultimately on resolved base16 color palette
# from variables, hence the reason why they are actual
# string templates.
####################################################
# general colors
"term_color_background": "{{ @color_base00 }}",
"term_color_foreground": "{{ @color_base07 }}",
"term_color_cursor": "{{ @color_base0A }}",
# 16-color space
"term_color00": "{{ @color_base01 }}",
"term_color01": "{{ @color_base09 }}",
"term_color02": "{{ @color_base0B }}",
"term_color03": "{{ @color_base0A }}",
"term_color04": "{{ @color_base0C }}",
"term_color05": "{{ @color_base0E }}",
"term_color06": "{{ @color_base0C }}",
"term_color07": "{{ @color_base07 }}",
"term_color08": "{{ @color_base00 }}",
"term_color09": "{{ @color_base08 }}",
"term_color10": "{{ @color_base0B }}",
"term_color11": "{{ @color_base0A }}",
"term_color12": "{{ @color_base0D }}",
"term_color13": "{{ @color_base0E }}",
"term_color14": "{{ @color_base0C }}",
"term_color15": "{{ @color_base06 }}",
# 256-color space
"term_color16": "{{ @color_base09 }}",
"term_color17": "{{ @color_base0F }}",
"term_color18": "{{ @color_base01 }}",
"term_color19": "{{ @color_base02 }}",
"term_color20": "{{ @color_base04 }}",
"term_color21": "{{ @color_base06 }}",
}
# Insert on defaults
default_vars.update(term_settings)
# Give those variables already!
return default_vars
def _create_fact(facts, key, value, *, prefix=None):
if prefix is None:
prefix = pkg_name
facts["{}_{}".format(prefix, key)] = value
@autolog
def _get_facts(*, kit=None):
"""
Get facts
Facts are immutable global variables
set at the very end of variable resolution.
:param kit: A kit from which facts are going to be extracted
:return: A dictionary with a bunch of scavenged variables
"""
# these are the facts
facts = {}
# General facts that are available for every platform
_create_fact(facts, 'version', pkg_version)
_create_fact(facts, 'install_prefix', os.getenv('HOME'))
# General system-related facts
_create_fact(facts, 'sys_uid', os.getuid())
_create_fact(facts, 'sys_gid', os.getgid())
# A collection of current environment variables is held in here
_create_fact(facts, 'env', dict(os.environ))
# Facts for *nix operating systems
_create_fact(facts, 'sys_path', os.getenv("PATH").split(":"))
if os.name == 'posix':
_create_fact(facts, 'sys_user', os.getenv('USER'))
_create_fact(facts, 'sys_user_home', os.getenv('HOME'))
####################################################
# System-related facts:
# ---------------------
# These facts collect characteristics of the current
# platform zenfig is running on
####################################################
# Operating System facts
_system = platform.system()
_create_fact(facts, 'system', _system)
_create_fact(facts, 'sys_node', platform.node())
# These are exclusive to linux-based systems
if _system == 'Linux':
linux_distro = platform.linux_distribution()
_create_fact(facts, 'linux_dist_name', linux_distro[0])
_create_fact(facts, 'linux_dist_version', linux_distro[1])
_create_fact(facts, 'linux_dist_id', linux_distro[2])
# kernel version
_create_fact(facts, 'linux_release', platform.release())
# OSX-specific facts
if _system == 'Darwin':
_create_fact(facts, 'osx_ver', platform.mac_ver())
# Hardware-related facts
_create_fact(facts, 'sys_machine', platform.machine())
# Low level CPU information (thanks to cpuinfo)
_cpu_info = cpuinfo.get_cpu_info()
_create_fact(facts, 'cpu_vendor_id', _cpu_info['vendor_id'])
_create_fact(facts, 'cpu_brand', _cpu_info['brand'])
_create_fact(facts, 'cpu_cores', _cpu_info['count'])
_create_fact(facts, 'cpu_hz', _cpu_info['hz_advertised_raw'][0])
_create_fact(facts, 'cpu_arch', _cpu_info['arch'])
_create_fact(facts, 'cpu_bits', _cpu_info['bits'])
# RAM information
_create_fact(facts, 'mem_total', psutil.virtual_memory()[0])
####################
# Python information
####################
_py_ver = platform.python_version_tuple()
_create_fact(facts, 'python_implementation', platform.python_revision())
_create_fact(facts, 'python_version', platform.python_version())
_create_fact(facts, 'python_version_major', _py_ver[0])
_create_fact(facts, 'python_version_minor', _py_ver[1])
_create_fact(facts, 'python_version_patch', _py_ver[2])
# Kit index variables are taken as well as facts
# so they can be referenced by other variables, also
# this means that index variables from a kit can reference
# other variables as well, because all these variables get
# rendered as part of variable resolution.
if kit is not None:
for key, value in kit.index_data.items():
_create_fact(facts, key, value, prefix="{}_{}".format(pkg_name, "kit"))
# Give those variables already!
return facts
@autolog
def get_user_vars(*, user_var_files=None, kit=None, defaults_only=False):
"""
Resolve variables from user environment
This compiles all set variables to be applied
on the template. These variables come from defaults,
read-only built-ins, kits (if specified),
files found in default search paths and
ultimately search paths set by the user.
:param user_var_files: Variable search paths set by the user
:param kit: Kit to be sourced
:param defaults_only: If True, variable locations set by the user won't be included.
"""
# user var locations can be None
if user_var_files is None:
user_var_files = []
# Kit variables directory
kit_var_dir = None
# Get kit (if any)
if kit is not None:
if isinstance(kit, str):
kit = get_kit(kit)
elif not isinstance(kit, Kit):
raise TypeError("kit must be either a str or a Kit")
# Where is that kit's variable directory?
kit_var_dir = kit.var_dir
#######################################################
# User variables get initialised with default variables
#######################################################
user_vars = _get_default_vars()
user_var_locations = {}
# set locations
for user_var in user_vars.keys():
user_var_locations[user_var] = None
##########################
# Get variable search path
##########################
user_var_files = _resolve_search_path(
user_var_files=user_var_files,
kit_var_dir=kit_var_dir,
defaults_only=defaults_only
)
log.msg_debug("Variables search path:")
log.msg_debug("**********************")
for user_var_file in user_var_files:
log.msg_debug(user_var_file)
log.msg_debug("**********************")
########################################
# Set facts
########################################
facts = _get_facts(kit=kit)
fact_locations = {}
for fact in facts.keys():
fact_locations[fact] = 'fact'
user_vars.update(facts)
user_var_locations.update(fact_locations)
######################################################
# Obtain variables from variable files set by the user
######################################################
_vars, locations = _get_vars(var_files=user_var_files)
user_vars.update(_vars)
# Variables whose values are strings may
# have jinja2 logic within them as well
# so we render those values through jinja
# so, we merge defaults and facts with
# user-set values to get the final picture
user_vars.update(renderer.render_dict(**user_vars))
# and we consolidate their locations (should they come from actual files)
user_var_locations.update(locations)
# Print vars
_list_vars(vars=user_vars, locations=user_var_locations)
# Give variables already!
return user_vars
@autolog
def _list_vars(*, vars, locations):
"""Print all vars given"""
log.msg("{} variable(s) captured".format(len(vars)))
log.msg("**********************************")
for key, value in sorted(vars.items()):
location = locations[key]
if location is None:
location = "default"
if isinstance(value, list):
log.msg("{:24} [list] [{}]".format(key, location))
for subvalue in value:
log.msg(" => {}".format(subvalue))
elif isinstance(value, dict):
log.msg("{:24} [dict] [{}]".format(key, location))
for k, v in value.items():
log.msg(" {:24} => {}".format(k, v))
else:
log.msg("{:24} = '{}' [{}]".format(key, value, location))
log.msg("**********************************")
@autolog
def _get_vars(*, var_files):
"""
Collect all variables taken from all files in var_files
:param var_files: list of files/directories to be sourced
:returns:
A tuple with two dicts, one containing variables
and the other one containing locations where they were
ultimately set (following precedence set by normalize_search_path)
"""
######################################
# All merged variables will go in here
######################################
tpl_vars = {} # variables themselves
tpl_files = {} # locations in which these vars were set will go in here
#############################################################
# iterate through all entries and see whether or not they're
# files or directories
#############################################################
for var_file in var_files:
# Normalize full path to file
var_file = os.path.abspath(var_file)
###############################################################
# The entry is in fact a file, thus, to load it directly I must
# Only files with .yaml and .yml will be taken into account
###############################################################
# The entry is in fact a file, thus, to load it directly I must
if os.path.isfile(var_file) and \
re.match("/.*\.yaml$", var_file) or re.match("/.*\.yml$", var_file):
with open(var_file, 'r') as f:
# Update variables with those found
# on this file
try:
# Load the YAML file
vars = yaml.load(f)
# Check whether there is indeed something inside the YAML file
if not isinstance(vars, dict):
log.msg_err("Invalid document format on file '{}'. "
"Root YAML structure must be a dictionary. "
"This file has been discarded.".format(var_file))
continue
# And update locations in which these
# variables were found
tpl_vars.update(vars)
for var in vars.keys():
tpl_files[var] = var_file
# Log the count
log.msg_debug("Found {} variable(s) in {}".format(
len(vars), var_file)
)
except yaml.YAMLError as exc:
log.msg_err("Error loading variable file: {}".format(
var_file)
)
log.msg_err("{}: file discarded".format(var_file))
# The entry is a directory
elif os.path.isdir(var_file):
# First of all, list all files inside of this directory
# and merge their values with tpl_vars
next_var_files = []
for next_var_file in os.listdir(var_file):
next_var_file = os.path.join(var_file, next_var_file)
if os.path.isfile(next_var_file):
next_var_files.append(next_var_file)
# Get both variables and locations
vars, files = _get_vars(var_files=next_var_files)
# ... and merge them with the current
tpl_vars.update(vars)
tpl_files.update(files)
# Return the final result
return tpl_vars, tpl_files
| |
from __future__ import annotations
import datetime
from functools import partial
from textwrap import dedent
import warnings
import numpy as np
from pandas._libs.tslibs import Timedelta
import pandas._libs.window.aggregations as window_aggregations
from pandas._typing import (
Axis,
FrameOrSeries,
FrameOrSeriesUnion,
TimedeltaConvertibleTypes,
)
from pandas.compat.numpy import function as nv
from pandas.util._decorators import doc
from pandas.core.dtypes.common import is_datetime64_ns_dtype
from pandas.core.dtypes.missing import isna
import pandas.core.common as common # noqa: PDF018
from pandas.core.util.numba_ import maybe_use_numba
from pandas.core.window.common import zsqrt
from pandas.core.window.doc import (
_shared_docs,
args_compat,
create_section_header,
kwargs_compat,
numba_notes,
template_header,
template_returns,
template_see_also,
window_agg_numba_parameters,
)
from pandas.core.window.indexers import (
BaseIndexer,
ExponentialMovingWindowIndexer,
GroupbyIndexer,
)
from pandas.core.window.numba_ import generate_numba_ewma_func
from pandas.core.window.rolling import (
BaseWindow,
BaseWindowGroupby,
)
def get_center_of_mass(
comass: float | None,
span: float | None,
halflife: float | None,
alpha: float | None,
) -> float:
valid_count = common.count_not_none(comass, span, halflife, alpha)
if valid_count > 1:
raise ValueError("comass, span, halflife, and alpha are mutually exclusive")
# Convert to center of mass; domain checks ensure 0 < alpha <= 1
if comass is not None:
if comass < 0:
raise ValueError("comass must satisfy: comass >= 0")
elif span is not None:
if span < 1:
raise ValueError("span must satisfy: span >= 1")
comass = (span - 1) / 2
elif halflife is not None:
if halflife <= 0:
raise ValueError("halflife must satisfy: halflife > 0")
decay = 1 - np.exp(np.log(0.5) / halflife)
comass = 1 / decay - 1
elif alpha is not None:
if alpha <= 0 or alpha > 1:
raise ValueError("alpha must satisfy: 0 < alpha <= 1")
comass = (1 - alpha) / alpha
else:
raise ValueError("Must pass one of comass, span, halflife, or alpha")
return float(comass)
def _calculate_deltas(
times: str | np.ndarray | FrameOrSeries | None,
halflife: float | TimedeltaConvertibleTypes | None,
) -> np.ndarray:
"""
Return the diff of the times divided by the half-life. These values are used in
the calculation of the ewm mean.
Parameters
----------
times : str, np.ndarray, Series, default None
Times corresponding to the observations. Must be monotonically increasing
and ``datetime64[ns]`` dtype.
halflife : float, str, timedelta, optional
Half-life specifying the decay
Returns
-------
np.ndarray
Diff of the times divided by the half-life
"""
# error: Item "str" of "Union[str, ndarray, FrameOrSeries, None]" has no
# attribute "view"
# error: Item "None" of "Union[str, ndarray, FrameOrSeries, None]" has no
# attribute "view"
_times = np.asarray(
times.view(np.int64), dtype=np.float64 # type: ignore[union-attr]
)
_halflife = float(Timedelta(halflife).value)
return np.diff(_times) / _halflife
class ExponentialMovingWindow(BaseWindow):
r"""
Provide exponential weighted (EW) functions.
Available EW functions: ``mean()``, ``var()``, ``std()``, ``corr()``, ``cov()``.
Exactly one parameter: ``com``, ``span``, ``halflife``, or ``alpha`` must be
provided.
Parameters
----------
com : float, optional
Specify decay in terms of center of mass,
:math:`\alpha = 1 / (1 + com)`, for :math:`com \geq 0`.
span : float, optional
Specify decay in terms of span,
:math:`\alpha = 2 / (span + 1)`, for :math:`span \geq 1`.
halflife : float, str, timedelta, optional
Specify decay in terms of half-life,
:math:`\alpha = 1 - \exp\left(-\ln(2) / halflife\right)`, for
:math:`halflife > 0`.
If ``times`` is specified, the time unit (str or timedelta) over which an
observation decays to half its value. Only applicable to ``mean()``
and halflife value will not apply to the other functions.
.. versionadded:: 1.1.0
alpha : float, optional
Specify smoothing factor :math:`\alpha` directly,
:math:`0 < \alpha \leq 1`.
min_periods : int, default 0
Minimum number of observations in window required to have a value
(otherwise result is NA).
adjust : bool, default True
Divide by decaying adjustment factor in beginning periods to account
for imbalance in relative weightings (viewing EWMA as a moving average).
- When ``adjust=True`` (default), the EW function is calculated using weights
:math:`w_i = (1 - \alpha)^i`. For example, the EW moving average of the series
[:math:`x_0, x_1, ..., x_t`] would be:
.. math::
y_t = \frac{x_t + (1 - \alpha)x_{t-1} + (1 - \alpha)^2 x_{t-2} + ... + (1 -
\alpha)^t x_0}{1 + (1 - \alpha) + (1 - \alpha)^2 + ... + (1 - \alpha)^t}
- When ``adjust=False``, the exponentially weighted function is calculated
recursively:
.. math::
\begin{split}
y_0 &= x_0\\
y_t &= (1 - \alpha) y_{t-1} + \alpha x_t,
\end{split}
ignore_na : bool, default False
Ignore missing values when calculating weights; specify ``True`` to reproduce
pre-0.15.0 behavior.
- When ``ignore_na=False`` (default), weights are based on absolute positions.
For example, the weights of :math:`x_0` and :math:`x_2` used in calculating
the final weighted average of [:math:`x_0`, None, :math:`x_2`] are
:math:`(1-\alpha)^2` and :math:`1` if ``adjust=True``, and
:math:`(1-\alpha)^2` and :math:`\alpha` if ``adjust=False``.
- When ``ignore_na=True`` (reproducing pre-0.15.0 behavior), weights are based
on relative positions. For example, the weights of :math:`x_0` and :math:`x_2`
used in calculating the final weighted average of
[:math:`x_0`, None, :math:`x_2`] are :math:`1-\alpha` and :math:`1` if
``adjust=True``, and :math:`1-\alpha` and :math:`\alpha` if ``adjust=False``.
axis : {0, 1}, default 0
The axis to use. The value 0 identifies the rows, and 1
identifies the columns.
times : str, np.ndarray, Series, default None
.. versionadded:: 1.1.0
Times corresponding to the observations. Must be monotonically increasing and
``datetime64[ns]`` dtype.
If str, the name of the column in the DataFrame representing the times.
If 1-D array like, a sequence with the same shape as the observations.
Only applicable to ``mean()``.
Returns
-------
DataFrame
A Window sub-classed for the particular operation.
See Also
--------
rolling : Provides rolling window calculations.
expanding : Provides expanding transformations.
Notes
-----
More details can be found at:
:ref:`Exponentially weighted windows <window.exponentially_weighted>`.
Examples
--------
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})
>>> df
B
0 0.0
1 1.0
2 2.0
3 NaN
4 4.0
>>> df.ewm(com=0.5).mean()
B
0 0.000000
1 0.750000
2 1.615385
3 1.615385
4 3.670213
Specifying ``times`` with a timedelta ``halflife`` when computing mean.
>>> times = ['2020-01-01', '2020-01-03', '2020-01-10', '2020-01-15', '2020-01-17']
>>> df.ewm(halflife='4 days', times=pd.DatetimeIndex(times)).mean()
B
0 0.000000
1 0.585786
2 1.523889
3 1.523889
4 3.233686
"""
_attributes = [
"com",
"span",
"halflife",
"alpha",
"min_periods",
"adjust",
"ignore_na",
"axis",
"times",
]
def __init__(
self,
obj: FrameOrSeries,
com: float | None = None,
span: float | None = None,
halflife: float | TimedeltaConvertibleTypes | None = None,
alpha: float | None = None,
min_periods: int = 0,
adjust: bool = True,
ignore_na: bool = False,
axis: Axis = 0,
times: str | np.ndarray | FrameOrSeries | None = None,
*,
selection=None,
):
super().__init__(
obj=obj,
min_periods=max(int(min_periods), 1),
on=None,
center=False,
closed=None,
method="single",
axis=axis,
selection=selection,
)
self.com = com
self.span = span
self.halflife = halflife
self.alpha = alpha
self.adjust = adjust
self.ignore_na = ignore_na
self.times = times
if self.times is not None:
if not self.adjust:
raise NotImplementedError("times is not supported with adjust=False.")
if isinstance(self.times, str):
self.times = self._selected_obj[self.times]
if not is_datetime64_ns_dtype(self.times):
raise ValueError("times must be datetime64[ns] dtype.")
# error: Argument 1 to "len" has incompatible type "Union[str, ndarray,
# FrameOrSeries, None]"; expected "Sized"
if len(self.times) != len(obj): # type: ignore[arg-type]
raise ValueError("times must be the same length as the object.")
if not isinstance(self.halflife, (str, datetime.timedelta)):
raise ValueError(
"halflife must be a string or datetime.timedelta object"
)
if isna(self.times).any():
raise ValueError("Cannot convert NaT values to integer")
self._deltas = _calculate_deltas(self.times, self.halflife)
# Halflife is no longer applicable when calculating COM
# But allow COM to still be calculated if the user passes other decay args
if common.count_not_none(self.com, self.span, self.alpha) > 0:
self._com = get_center_of_mass(self.com, self.span, None, self.alpha)
else:
self._com = 1.0
else:
if self.halflife is not None and isinstance(
self.halflife, (str, datetime.timedelta)
):
raise ValueError(
"halflife can only be a timedelta convertible argument if "
"times is not None."
)
# Without times, points are equally spaced
self._deltas = np.ones(max(len(self.obj) - 1, 0), dtype=np.float64)
self._com = get_center_of_mass(
# error: Argument 3 to "get_center_of_mass" has incompatible type
# "Union[float, Any, None, timedelta64, signedinteger[_64Bit]]";
# expected "Optional[float]"
self.com,
self.span,
self.halflife, # type: ignore[arg-type]
self.alpha,
)
def _get_window_indexer(self) -> BaseIndexer:
"""
Return an indexer class that will compute the window start and end bounds
"""
return ExponentialMovingWindowIndexer()
@doc(
_shared_docs["aggregate"],
see_also=dedent(
"""
See Also
--------
pandas.DataFrame.rolling.aggregate
"""
),
examples=dedent(
"""
Examples
--------
>>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6], "C": [7, 8, 9]})
>>> df
A B C
0 1 4 7
1 2 5 8
2 3 6 9
>>> df.ewm(alpha=0.5).mean()
A B C
0 1.000000 4.000000 7.000000
1 1.666667 4.666667 7.666667
2 2.428571 5.428571 8.428571
"""
),
klass="Series/Dataframe",
axis="",
)
def aggregate(self, func, *args, **kwargs):
return super().aggregate(func, *args, **kwargs)
agg = aggregate
@doc(
template_header,
create_section_header("Parameters"),
args_compat,
window_agg_numba_parameters,
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also,
create_section_header("Notes"),
numba_notes.replace("\n", "", 1),
window_method="ewm",
aggregation_description="(exponential weighted moment) mean",
agg_method="mean",
)
def mean(self, *args, engine=None, engine_kwargs=None, **kwargs):
if maybe_use_numba(engine):
ewma_func = generate_numba_ewma_func(
engine_kwargs, self._com, self.adjust, self.ignore_na, self._deltas
)
return self._apply(
ewma_func,
numba_cache_key=(lambda x: x, "ewma"),
)
elif engine in ("cython", None):
if engine_kwargs is not None:
raise ValueError("cython engine does not accept engine_kwargs")
nv.validate_window_func("mean", args, kwargs)
window_func = partial(
window_aggregations.ewma,
com=self._com,
adjust=self.adjust,
ignore_na=self.ignore_na,
deltas=self._deltas,
)
return self._apply(window_func)
else:
raise ValueError("engine must be either 'numba' or 'cython'")
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
bias : bool, default False
Use a standard estimation bias correction.
"""
).replace("\n", "", 1),
args_compat,
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) standard deviation",
agg_method="std",
)
def std(self, bias: bool = False, *args, **kwargs):
nv.validate_window_func("std", args, kwargs)
return zsqrt(self.var(bias=bias, **kwargs))
def vol(self, bias: bool = False, *args, **kwargs):
warnings.warn(
(
"vol is deprecated will be removed in a future version. "
"Use std instead."
),
FutureWarning,
stacklevel=2,
)
return self.std(bias, *args, **kwargs)
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
bias : bool, default False
Use a standard estimation bias correction.
"""
).replace("\n", "", 1),
args_compat,
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) variance",
agg_method="var",
)
def var(self, bias: bool = False, *args, **kwargs):
nv.validate_window_func("var", args, kwargs)
window_func = window_aggregations.ewmcov
wfunc = partial(
window_func,
com=self._com,
adjust=self.adjust,
ignore_na=self.ignore_na,
bias=bias,
)
def var_func(values, begin, end, min_periods):
return wfunc(values, begin, end, min_periods, values)
return self._apply(var_func)
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
other : Series or DataFrame , optional
If not supplied then will default to self and produce pairwise
output.
pairwise : bool, default None
If False then only matching columns between self and other will be
used and the output will be a DataFrame.
If True then all pairwise combinations will be calculated and the
output will be a MultiIndex DataFrame in the case of DataFrame
inputs. In the case of missing elements, only complete pairwise
observations will be used.
bias : bool, default False
Use a standard estimation bias correction.
"""
).replace("\n", "", 1),
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) sample covariance",
agg_method="cov",
)
def cov(
self,
other: FrameOrSeriesUnion | None = None,
pairwise: bool | None = None,
bias: bool = False,
**kwargs,
):
from pandas import Series
def cov_func(x, y):
x_array = self._prep_values(x)
y_array = self._prep_values(y)
window_indexer = self._get_window_indexer()
min_periods = (
self.min_periods
if self.min_periods is not None
else window_indexer.window_size
)
start, end = window_indexer.get_window_bounds(
num_values=len(x_array),
min_periods=min_periods,
center=self.center,
closed=self.closed,
)
result = window_aggregations.ewmcov(
x_array,
start,
end,
# error: Argument 4 to "ewmcov" has incompatible type
# "Optional[int]"; expected "int"
self.min_periods, # type: ignore[arg-type]
y_array,
self._com,
self.adjust,
self.ignore_na,
bias,
)
return Series(result, index=x.index, name=x.name)
return self._apply_pairwise(self._selected_obj, other, pairwise, cov_func)
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
other : Series or DataFrame, optional
If not supplied then will default to self and produce pairwise
output.
pairwise : bool, default None
If False then only matching columns between self and other will be
used and the output will be a DataFrame.
If True then all pairwise combinations will be calculated and the
output will be a MultiIndex DataFrame in the case of DataFrame
inputs. In the case of missing elements, only complete pairwise
observations will be used.
"""
).replace("\n", "", 1),
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) sample correlation",
agg_method="corr",
)
def corr(
self,
other: FrameOrSeriesUnion | None = None,
pairwise: bool | None = None,
**kwargs,
):
from pandas import Series
def cov_func(x, y):
x_array = self._prep_values(x)
y_array = self._prep_values(y)
window_indexer = self._get_window_indexer()
min_periods = (
self.min_periods
if self.min_periods is not None
else window_indexer.window_size
)
start, end = window_indexer.get_window_bounds(
num_values=len(x_array),
min_periods=min_periods,
center=self.center,
closed=self.closed,
)
def _cov(X, Y):
return window_aggregations.ewmcov(
X,
start,
end,
min_periods,
Y,
self._com,
self.adjust,
self.ignore_na,
True,
)
with np.errstate(all="ignore"):
cov = _cov(x_array, y_array)
x_var = _cov(x_array, x_array)
y_var = _cov(y_array, y_array)
result = cov / zsqrt(x_var * y_var)
return Series(result, index=x.index, name=x.name)
return self._apply_pairwise(self._selected_obj, other, pairwise, cov_func)
class ExponentialMovingWindowGroupby(BaseWindowGroupby, ExponentialMovingWindow):
"""
Provide an exponential moving window groupby implementation.
"""
_attributes = ExponentialMovingWindow._attributes + BaseWindowGroupby._attributes
def __init__(self, obj, *args, _grouper=None, **kwargs):
super().__init__(obj, *args, _grouper=_grouper, **kwargs)
if not obj.empty and self.times is not None:
# sort the times and recalculate the deltas according to the groups
groupby_order = np.concatenate(list(self._grouper.indices.values()))
self._deltas = _calculate_deltas(
self.times.take(groupby_order), # type: ignore[union-attr]
self.halflife,
)
def _get_window_indexer(self) -> GroupbyIndexer:
"""
Return an indexer class that will compute the window start and end bounds
Returns
-------
GroupbyIndexer
"""
window_indexer = GroupbyIndexer(
groupby_indicies=self._grouper.indices,
window_indexer=ExponentialMovingWindowIndexer,
)
return window_indexer
| |
#!/usr/bin/env python
#################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
"""
This script is a python implementation of the "boot.go" script in "beam-sdks-python-container"
project of Apache Beam, see in:
https://github.com/apache/beam/blob/release-2.14.0/sdks/python/container/boot.go
It is implemented in golang and will introduce unnecessary dependencies if used in pure python
project. So we add a python implementation which will be used when the python worker runs in
process mode. It downloads and installs users' python artifacts, then launches the python SDK
harness of Apache Beam.
"""
import argparse
import hashlib
import os
from subprocess import call
import grpc
import logging
import sys
from apache_beam.portability.api.beam_provision_api_pb2_grpc import ProvisionServiceStub
from apache_beam.portability.api.beam_provision_api_pb2 import GetProvisionInfoRequest
from apache_beam.portability.api.beam_artifact_api_pb2_grpc import ArtifactRetrievalServiceStub
from apache_beam.portability.api.beam_artifact_api_pb2 import (GetManifestRequest,
GetArtifactRequest)
from apache_beam.portability.api.endpoints_pb2 import ApiServiceDescriptor
from distutils.dist import Distribution
from google.protobuf import json_format, text_format
from pkg_resources import get_distribution, parse_version
def check_not_empty(check_str, error_message):
if check_str == "":
logging.fatal(error_message)
exit(1)
python_exec = sys.executable
PYTHON_REQUIREMENTS_FILE = "_PYTHON_REQUIREMENTS_FILE"
PYTHON_REQUIREMENTS_CACHE = "_PYTHON_REQUIREMENTS_CACHE"
PYTHON_REQUIREMENTS_INSTALL_DIR = "_PYTHON_REQUIREMENTS_INSTALL_DIR"
def append_path_to_env(env, name, value):
if name in env:
env[name] = os.pathsep.join([value, env[name]])
else:
env[name] = value
def get_site_packages_paths(prefix):
install_obj = Distribution().get_command_obj('install', create=True)
install_obj.prefix = prefix
install_obj.finalize_options()
installed_dir = [install_obj.install_purelib]
if install_obj.install_purelib != install_obj.install_platlib:
installed_dir.append(install_obj.install_platlib)
return installed_dir
def get_prefix_option(requirements_install_path):
pip_version = get_distribution("pip").version
# since '--prefix' option is only supported for pip 8.0+, so here we fallback to
# use '--install-option' when the pip version is lower than 8.0.0.
if parse_version(pip_version) >= parse_version('8.0.0'):
return ["--prefix", requirements_install_path]
else:
return ['--install-option', '--prefix=' + requirements_install_path]
def pip_install_requirements():
if (PYTHON_REQUIREMENTS_FILE in os.environ
and PYTHON_REQUIREMENTS_INSTALL_DIR in os.environ):
requirements_file_path = os.environ[PYTHON_REQUIREMENTS_FILE]
requirements_install_path = os.environ[PYTHON_REQUIREMENTS_INSTALL_DIR]
if PYTHON_REQUIREMENTS_CACHE in os.environ:
requirements_cache_path = os.environ[PYTHON_REQUIREMENTS_CACHE]
else:
requirements_cache_path = None
env = dict(os.environ)
installed_python_path = os.pathsep.join(get_site_packages_paths(requirements_install_path))
installed_python_script_path = os.path.join(requirements_install_path, "bin")
append_path_to_env(env, "PYTHONPATH", installed_python_path)
append_path_to_env(env, "PATH", installed_python_script_path)
pip_install_commands = [python_exec, "-m", "pip", "install", "--ignore-installed", "-r",
requirements_file_path]
pip_install_commands.extend(get_prefix_option(requirements_install_path))
if requirements_cache_path is not None:
pip_install_commands.extend(["--find-links", requirements_cache_path])
logging.info("Run command: %s\n" % " ".join(pip_install_commands))
exit_code = call(
pip_install_commands, stdout=sys.stdout, stderr=sys.stderr, env=env)
if exit_code > 0:
raise Exception(
"Run command: %s error! exit code: %d" %
(" ".join(pip_install_commands), exit_code))
os.environ["PYTHONPATH"] = env["PYTHONPATH"]
os.environ["PATH"] = env["PATH"]
if __name__ == "__main__":
# print INFO and higher level messages
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("--id", default="", help="Local identifier (required).")
parser.add_argument("--logging_endpoint", default="",
help="Logging endpoint (required).")
parser.add_argument("--artifact_endpoint", default="",
help="Artifact endpoint (required).")
parser.add_argument("--provision_endpoint", default="",
help="Provision endpoint (required).")
parser.add_argument("--control_endpoint", default="",
help="Control endpoint (required).")
parser.add_argument("--semi_persist_dir", default="/tmp",
help="Local semi-persistent directory (optional).")
args = parser.parse_args()
worker_id = args.id
logging_endpoint = args.logging_endpoint
artifact_endpoint = args.artifact_endpoint
provision_endpoint = args.provision_endpoint
control_endpoint = args.control_endpoint
semi_persist_dir = args.semi_persist_dir
check_not_empty(worker_id, "No id provided.")
check_not_empty(logging_endpoint, "No logging endpoint provided.")
check_not_empty(artifact_endpoint, "No artifact endpoint provided.")
check_not_empty(provision_endpoint, "No provision endpoint provided.")
check_not_empty(control_endpoint, "No control endpoint provided.")
logging.info("Initializing python harness: %s" % " ".join(sys.argv))
metadata = [("worker_id", worker_id)]
# read job information from provision stub
with grpc.insecure_channel(provision_endpoint) as channel:
client = ProvisionServiceStub(channel=channel)
info = client.GetProvisionInfo(GetProvisionInfoRequest(), metadata=metadata).info
options = json_format.MessageToJson(info.pipeline_options)
staged_dir = os.path.join(semi_persist_dir, "staged")
# download files
with grpc.insecure_channel(artifact_endpoint) as channel:
client = ArtifactRetrievalServiceStub(channel=channel)
# get file list via retrieval token
response = client.GetManifest(GetManifestRequest(retrieval_token=info.retrieval_token),
metadata=metadata)
artifacts = response.manifest.artifact
# download files and check hash values
for artifact in artifacts:
name = artifact.name
permissions = artifact.permissions
sha256 = artifact.sha256
file_path = os.path.join(staged_dir, name)
if os.path.exists(file_path):
with open(file_path, "rb") as f:
sha256obj = hashlib.sha256()
sha256obj.update(f.read())
hash_value = sha256obj.hexdigest()
if hash_value == sha256:
logging.info("The file: %s already exists and its sha256 hash value: %s is the "
"same as the expected hash value, skipped." % (file_path, sha256))
continue
else:
os.remove(file_path)
if not os.path.exists(os.path.dirname(file_path)):
os.makedirs(os.path.dirname(file_path), 0o755)
stream = client.GetArtifact(
GetArtifactRequest(name=name, retrieval_token=info.retrieval_token),
metadata=metadata)
with open(file_path, "wb") as f:
sha256obj = hashlib.sha256()
for artifact_chunk in stream:
sha256obj.update(artifact_chunk.data)
f.write(artifact_chunk.data)
hash_value = sha256obj.hexdigest()
if hash_value != sha256:
raise Exception("The sha256 hash value: %s of the downloaded file: %s is not the"
" same as the expected hash value: %s" %
(hash_value, file_path, sha256))
os.chmod(file_path, int(str(permissions), 8))
pip_install_requirements()
os.environ["WORKER_ID"] = worker_id
os.environ["PIPELINE_OPTIONS"] = options
os.environ["SEMI_PERSISTENT_DIRECTORY"] = semi_persist_dir
os.environ["LOGGING_API_SERVICE_DESCRIPTOR"] = text_format.MessageToString(
ApiServiceDescriptor(url=logging_endpoint))
os.environ["CONTROL_API_SERVICE_DESCRIPTOR"] = text_format.MessageToString(
ApiServiceDescriptor(url=control_endpoint))
env = dict(os.environ)
if "FLINK_BOOT_TESTING" in os.environ and os.environ["FLINK_BOOT_TESTING"] == "1":
exit(0)
call([python_exec, "-m", "pyflink.fn_execution.sdk_worker_main"],
stdout=sys.stdout, stderr=sys.stderr, env=env)
| |
#!/bin/env pyhton
"""
Asynchronous Ping
(c)2013 Mladen Vasic, mladen.vasic0@gmail.com
- requires root/administrator privileges
"""
import gevent
import time, struct
from gevent import socket, Timeout
from gevent.queue import Queue
from gevent.event import AsyncResult
from gevent.pool import Pool
import logging
logging.basicConfig(level=logging.WARNING)
__all__ = ['asyncping']
__version__ = '0.1.0'
class ping(object):
def __init__(self, queue, ttl=8, MAX_COUNTER=65535):
self.queue = queue
self.ttl = ttl
self.MAX_COUNTER = MAX_COUNTER
self.lookup_table = [None] * MAX_COUNTER
self.counter = 0
self.log = logging.getLogger("ping")
self.socket = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)
self.__recv_greenlet = gevent.spawn(self.__recv)
self.__run_greenlet = gevent.spawn(self.__run)
def stop(self):
self.__recv_greenlet.kill()
self.__run_greenlet.kill()
def __checksum(self, source_string):
sum = 0
countTo = (len(source_string) / 2) * 2
count = 0
while count < countTo:
thisVal = ord(source_string[count + 1]) * 256 + ord(source_string[count])
sum = sum + thisVal
sum = sum & 0xffffffff
count = count + 2
if countTo < len(source_string):
sum = sum + ord(source_string[len(source_string) - 1])
sum = sum & 0xffffffff
sum = (sum >> 16) + (sum & 0xffff)
sum = sum + (sum >> 16)
answer = ~sum
answer = answer & 0xffff
answer = answer >> 8 | (answer << 8 & 0xff00)
return answer
def __run(self):
"""
Main loop which getting task from queue, send ping and make
greenlet to wait for response
"""
while True:
destination, external_event = self.queue.get()
if self.counter == self.MAX_COUNTER:
# rotate data in lookup_table
self.counter = 0
else:
self.counter += 1
internal_event = AsyncResult()
gevent.spawn(self.__wait_for_event, internal_event, external_event)
# save internal_event in lookup_table
self.lookup_table[self.counter] = internal_event
self.__send(destination, self.counter)
def __wait_for_event(self, event_in, event_out):
"""
Greenlet that wait for reponse for x seconds or timeout
"""
try:
result = event_in.get(timeout=self.ttl)
event_out.set(result)
except:
event_out.set(None)
def __send(self, destination, id):
"""
Sending to socket
"""
checksum = 0
header = struct.pack("bbHHh", 8, 0, checksum, id, 1)
bytesInDouble = struct.calcsize("d")
data = (192 - bytesInDouble) * "Q"
data = struct.pack("d", time.time()) + data
checksum = self.__checksum(header + data)
header = struct.pack(
"bbHHh", 8, 0, socket.htons(checksum), id, 1
)
packet = header + data
try:
self.socket.sendto(packet, (destination, 0))
except Exception, e:
self.log.error(repr(e))
def __recv(self):
"""
Reciving from socket
"""
while True:
recPacket, addr = self.socket.recvfrom(1024)
timeReceived = time.time()
icmpHeader = recPacket[20:28]
type, code, checksum, packetID, sequence = struct.unpack(
"bbHHh", icmpHeader
)
if type == 0:
bytesInDouble = struct.calcsize("d")
timeSent = struct.unpack("d", recPacket[28:28 + bytesInDouble])[0]
try:
# with recived packetID try to find internal_event in lookup_table
event = self.lookup_table[packetID]
if event:
event.set(timeReceived - timeSent)
self.lookup_table[packetID] = None
except Exception, e:
self.log.error(repr(e))
else:
self.log.debug("packet not recognized from %s" % addr)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
def task(queue, destination, retries):
"""
Greenlet task
"""
result = False
_retries = 0
while not result and _retries < retries:
event = AsyncResult()
queue.put((destination, event))
result = event.get()
_retries += 1
if result:
print destination, result, _retries
def iprange(start, end):
"""
IP range generator
"""
current = start
parts = current.split(".")
while current != end:
yield current
parts[3] = int(parts[3]) + 1
increment = False
for x in range(3,0,-1):
if increment:
parts[x] = int(parts[x]) + 1
increment = False
if (int(parts[x]) == 256):
parts[x] = 0
increment = True
current = "%s.%s.%s.%s" % tuple(parts)
import argparse
parser = argparse.ArgumentParser(description="Asynchronous PING")
parser.add_argument('start', help="starting IP address")
parser.add_argument('end', help="ending IP address")
parser.add_argument('-r', '--retries', default=2, help="number of retries")
parser.add_argument('-t', '--ttl', default=2, help="time to wait for answer in seconds")
parser.add_argument('-p', '--pool', default=64, help="maximum number of greenlets")
args = parser.parse_args()
queue = Queue()
p = ping(queue, ttl=int(args.ttl))
pool = Pool(int(args.pool))
for ip in iprange(args.start, args.end):
pool.spawn(task, queue, ip, int(args.retries))
pool.join()
p.stop()
| |
import converters
import parser
import sys
import json
# // test bot logic
def bot_logic(game_state):
return 0
# // todo: need parsing game_state: is_someone_allin(), is_someone_rise(), get_active_players(), get_all_money_in_game()
# // todo: get_probabillity()
# // todo:
# fold = 0
# someone_allin = True # is_someone_allin()
# noone_rise = False # is_someone_rise()
# active_player_count = # get_active_players()
# our_probabillity_to_win = get_probabillity()
# current_pot = get_all_money_in_game()
# if someone_allin:
# if is_good_chance_to_win():
# return all_in
# else
# return fold
# elif noone_rise:
# return big_blind * 3
# else:
# amount_to_call = get_max_rise();
# if is_probabillity_good_to_play():
# return amount_to_call
class Player:
VERSION = "1.5"
NAME = "Kraken"
def __init__(self):
self.bet = 0
self.risk = 1.
self.accuracy = 1000
self.active_players = 0
self.our_player = None
def get_preflop_probability(self, hand, players_count):
preflop_probability_table = parser.Parser().parse_preflop("preflop.txt")
hand_converted = converters.server_to_table(hand)
print "HAND_CONVERTED:", hand_converted
try:
preflop_probability = preflop_probability_table[hand_converted][players_count - 1]
except:
print "START_CONVERTING"
hand_converted_2 = hand_converted[1] + hand_converted[0]
if len(hand_converted) == 3:
hand_converted_2 += hand_converted[2]
print
"HAND_CONVERTED_2:", hand_converted_2
try:
preflop_probability = preflop_probability_table[hand_converted_2][players_count - 1]
except:
preflop_probability = 100
print "PREFLOP_PROB_EXIT"
return preflop_probability
def checkBet(self):
print >> sys.stderr, "CHECK_BET"
current_buy_in = self.game_state["current_buy_in"]
if current_buy_in == self.our_player["bet"]:
self.bet = 0
else:
self.bet = self.our_player["bet"]
def foldBet(self):
print >> sys.stderr, "FOLD_BET"
self.bet = 0
def callBet(self):
print >> sys.stderr, "CALL_BET"
self.bet = self.game_state['current_buy_in'] - self.our_player['bet']
def raiseBet(self):
print >> sys.stderr, "RAISE_BET"
self.bet = self.game_state['current_buy_in'] - self.our_player['bet'] + self.game_state['minimum_raise']
def all_in(self):
print >> sys.stderr, "ALL_IN"
if self.our_player:
our_stack = self.our_player["stack"]
self.bet = our_stack
return self.bet
def maxStack(self):
players_list = self.game_state["players"]
maxStack = 0
for pl in players_list:
if pl["status"] == "active" and pl["name"] != Player.NAME:
if pl["stack"] > maxStack:
maxStack = pl["stack"]
return maxStack
def betRequest(self, game_state):
self.game_state = game_state
try:
players_list = game_state["players"]
for pl in players_list:
if pl["status"] == "active":
self.active_players += 1
for player in players_list:
if player["name"] == Player.NAME:
self.our_player = player
break
print "ACTIVE PLAYERS: ", self.active_players
hand = self.our_player["hole_cards"]
preflop_probability = self.get_preflop_probability(hand, self.active_players)
tmphand = []
lastProb = 0
if len(game_state["community_cards"]) > 0:
for cardOne in hand:
for cardTwo in game_state["community_cards"]:
tmphand = [cardOne, cardTwo]
preflop_probability = self.get_preflop_probability(tmphand, self.active_players)
if (preflop_probability > lastProb):
lastProb = preflop_probability
preflop_probability = lastProb
else:
preflop_probability = self.get_preflop_probability(hand, self.active_players)
print >> sys.stderr, "HAND:", hand, "PROBABILITY: " + str(preflop_probability)
if preflop_probability < 35.0:
self.foldBet()
# elif preflop_probability >= 35.0 and preflop_probability < 37.0:
# self.checkBet()
elif preflop_probability >= 37.0 and preflop_probability < 50.0:
self.callBet()
elif preflop_probability > 50.0:
maxUsersStack = self.maxStack()
if maxUsersStack != 0 and maxUsersStack < self.our_player["stack"]:
self.all_in()
else:
self.raiseBet()
elif preflop_probability == 100:
self.all_in()
except Exception as e:
print >> sys.stderr, "MAIN EXCEPTION: ", e.message
self.all_in()
finally:
print "FINAL_BET:", self.bet
return self.bet
def showdown(self, game_state):
pass
# pl = Player()
# data = {
# "tournament_id":"550d1d68cd7bd10003000003",
# "game_id":"550da1cb2d909006e90004b1",
# "round":0,
# "bet_index":0,
# "small_blind": 10,
# "current_buy_in": 320,
# "pot": 400,
# "minimum_raise": 240,
# "dealer": 1,
# "orbits": 7,
# "in_action": 1,
# "players": [
# {
# "id": 0,
# "name": "bob",
# "status": "active",
# "version": "Default random player",
# "stack": 1010,
# "bet": 320
# },
# {
# "id": 1,
# "name": "Kraken",
# "status": "active",
# "version": "Default random player",
# "stack": 1590,
# "bet": 80,
# "hole_cards": [
# {
# "rank": "6",
# "suit": "hearts"
# },
# {
# "rank": "K",
# "suit": "spades"
# }
# ]
# },
# {
# "id": 2,
# "name": "Chuck",
# "status": "out",
# "version": "Default random player",
# "stack": 0,
# "bet": 0
# }
# ],
# "community_cards": [
# {
# "rank": "4",
# "suit": "spades"
# },
# {
# "rank": "A",
# "suit": "hearts"
# },
# {
# "rank": "6",
# "suit": "clubs"
# }
# ]
# }
# json_data = json.dumps(data)
# json_msg = json.loads(json_data)
# pl.betRequest(json_msg)
| |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Pytest configuration."""
from __future__ import absolute_import, print_function
import json
import os
import shutil
import tempfile
import pytest
from flask import Flask
from flask_babelex import Babel
from flask_mail import Mail
from flask_menu import Menu as FlaskMenu
from flask_oauthlib.client import OAuth as FlaskOAuth
from flask_oauthlib.client import OAuthResponse
from invenio_accounts import InvenioAccounts
from invenio_db import InvenioDB, db
from invenio_userprofiles import InvenioUserProfiles, UserProfile
from invenio_userprofiles.views import blueprint_ui_init
from sqlalchemy_utils.functions import create_database, database_exists, \
drop_database
from invenio_oauthclient import InvenioOAuthClient
from invenio_oauthclient.contrib.cern import REMOTE_APP as CERN_REMOTE_APP
from invenio_oauthclient.contrib.github import REMOTE_APP as GITHUB_REMOTE_APP
from invenio_oauthclient.contrib.orcid import REMOTE_APP as ORCID_REMOTE_APP
from invenio_oauthclient.views.client import blueprint as blueprint_client
from invenio_oauthclient.views.settings import blueprint as blueprint_settings
@pytest.fixture
def base_app(request):
"""Flask application fixture without OAuthClient initialized."""
instance_path = tempfile.mkdtemp()
base_app = Flask('testapp')
base_app.config.update(
TESTING=True,
WTF_CSRF_ENABLED=False,
LOGIN_DISABLED=False,
CACHE_TYPE='simple',
OAUTHCLIENT_REMOTE_APPS=dict(
cern=CERN_REMOTE_APP,
orcid=ORCID_REMOTE_APP,
github=GITHUB_REMOTE_APP,
),
GITHUB_APP_CREDENTIALS=dict(
consumer_key='github_key_changeme',
consumer_secret='github_secret_changeme',
),
ORCID_APP_CREDENTIALS=dict(
consumer_key='orcid_key_changeme',
consumer_secret='orcid_secret_changeme',
),
CERN_APP_CREDENTIALS=dict(
consumer_key='cern_key_changeme',
consumer_secret='cern_secret_changeme',
),
# use local memory mailbox
EMAIL_BACKEND='flask_email.backends.locmem.Mail',
SQLALCHEMY_DATABASE_URI=os.getenv('SQLALCHEMY_DATABASE_URI',
'sqlite://'),
SERVER_NAME='localhost',
DEBUG=False,
SECRET_KEY='TEST',
SECURITY_DEPRECATED_PASSWORD_SCHEMES=[],
SECURITY_PASSWORD_HASH='plaintext',
SECURITY_PASSWORD_SCHEMES=['plaintext'],
)
FlaskMenu(base_app)
Babel(base_app)
Mail(base_app)
InvenioDB(base_app)
InvenioAccounts(base_app)
with base_app.app_context():
if str(db.engine.url) != 'sqlite://' and \
not database_exists(str(db.engine.url)):
create_database(str(db.engine.url))
db.create_all()
def teardown():
with base_app.app_context():
db.session.close()
if str(db.engine.url) != 'sqlite://':
drop_database(str(db.engine.url))
shutil.rmtree(instance_path)
request.addfinalizer(teardown)
base_app.test_request_context().push()
return base_app
def _init_app(app_):
"""Init OAuth app."""
FlaskOAuth(app_)
InvenioOAuthClient(app_)
app_.register_blueprint(blueprint_client)
app_.register_blueprint(blueprint_settings)
return app_
@pytest.fixture
def app(base_app):
"""Flask application fixture."""
base_app.config.update(
WTF_CSRF_ENABLED=False,
)
return _init_app(base_app)
@pytest.fixture
def app_with_csrf(base_app):
"""Flask application fixture with CSRF enabled."""
base_app.config.update(
WTF_CSRF_ENABLED=True,
)
return _init_app(base_app)
def _init_userprofiles(app_):
"""Init userprofiles module."""
InvenioUserProfiles(app_)
app_.register_blueprint(blueprint_ui_init)
return app_
@pytest.fixture
def app_with_userprofiles(app):
"""Configure userprofiles module with CSRF disabled."""
app.config.update(
USERPROFILES_EXTEND_SECURITY_FORMS=True,
WTF_CSRF_ENABLED=False,
)
return _init_userprofiles(app)
@pytest.fixture
def app_with_userprofiles_csrf(app):
"""Configure userprofiles module with CSRF enabled."""
app.config.update(
USERPROFILES_EXTEND_SECURITY_FORMS=True,
WTF_CSRF_ENABLED=True,
)
return _init_userprofiles(app)
@pytest.fixture
def models_fixture(app):
"""Flask app with example data used to test models."""
with app.app_context():
datastore = app.extensions['security'].datastore
datastore.create_user(
email='existing@inveniosoftware.org',
password='tester',
active=True
)
datastore.create_user(
email='test2@inveniosoftware.org',
password='tester',
active=True
)
datastore.create_user(
email='test3@inveniosoftware.org',
password='tester',
active=True
)
datastore.commit()
return app
@pytest.fixture
def params():
"""Fixture for remote app params."""
def params(x):
return dict(
request_token_params={'scope': ''},
base_url='https://foo.bar/',
request_token_url=None,
access_token_url='https://foo.bar/oauth/access_token',
authorize_url='https://foo.bar/oauth/authorize',
consumer_key=x,
consumer_secret='testsecret',
)
return params
@pytest.fixture
def remote():
"""Fixture for remote app."""
return type('test_remote', (), dict(
name='example_remote',
request_token_params={'scope': ''},
base_url='https://foo.bar/',
request_token_url=None,
access_token_url='https://foo.bar/oauth/access_token',
authorize_url='https://foo.bar/oauth/authorize',
consumer_key='testkey',
consumer_secret='testsecret',
))()
@pytest.fixture
def views_fixture(base_app, params):
"""Flask application with example data used to test views."""
with base_app.app_context():
datastore = base_app.extensions['security'].datastore
datastore.create_user(
email='existing@inveniosoftware.org',
password='tester',
active=True
)
datastore.create_user(
email='test2@inveniosoftware.org',
password='tester',
active=True
)
datastore.create_user(
email='test3@inveniosoftware.org',
password='tester',
active=True
)
datastore.commit()
base_app.config['OAUTHCLIENT_REMOTE_APPS'].update(
dict(
test=dict(
authorized_handler=lambda *args, **kwargs: 'TEST',
params=params('testid'),
title='MyLinkedTestAccount',
),
test_invalid=dict(
authorized_handler=lambda *args, **kwargs: 'TEST',
params=params('test_invalidid'),
title='Test Invalid',
),
full=dict(
params=params('fullid'),
title='Full',
),
)
)
FlaskOAuth(base_app)
InvenioOAuthClient(base_app)
base_app.register_blueprint(blueprint_client)
base_app.register_blueprint(blueprint_settings)
return base_app
@pytest.fixture
def example_github(request):
"""ORCID example data."""
return {
'name': 'Josiah Carberry',
'expires_in': 3599,
'access_token': 'test_access_token',
'refresh_token': 'test_refresh_token',
'scope': '/authenticate',
'token_type': 'bearer',
}
@pytest.fixture
def example_orcid(request):
"""ORCID example data."""
return {
'name': 'Josiah Carberry',
'expires_in': 3599,
'orcid': '0000-0002-1825-0097',
'access_token': 'test_access_token',
'refresh_token': 'test_refresh_token',
'scope': '/authenticate',
'token_type': 'bearer'
}, dict(external_id='0000-0002-1825-0097',
external_method='orcid',
user=dict(
profile=dict(
full_name='Josiah Carberry'
)
)
)
@pytest.fixture()
def example_cern(request):
"""CERN example data."""
file_path = os.path.join(os.path.dirname(__file__),
'data/oauth_response_content.json')
with open(file_path) as response_file:
json_data = response_file.read()
return OAuthResponse(
resp=None,
content=json_data,
content_type='application/json'
), dict(
access_token='test_access_token',
token_type='bearer',
expires_in=1199,
refresh_token='test_refresh_token'
), dict(
user=dict(
email='test.account@cern.ch',
profile=dict(username='taccount', full_name='Test Account'),
),
external_id='123456', external_method='cern',
active=True
)
@pytest.fixture(scope='session')
def orcid_bio():
"""ORCID response fixture."""
file_path = os.path.join(os.path.dirname(__file__), 'data/orcid_bio.json')
with open(file_path) as response_file:
data = json.load(response_file)
return data
@pytest.fixture()
def user(app_with_userprofiles):
"""Create users."""
with db.session.begin_nested():
datastore = app_with_userprofiles.extensions['security'].datastore
user1 = datastore.create_user(email='info@inveniosoftware.org',
password='tester', active=True)
profile = UserProfile(username='mynick', user=user1)
db.session.add(profile)
db.session.commit()
return user1
@pytest.fixture()
def form_test_data():
"""Test data to fill a registration form."""
return dict(
email='test@tester.com',
profile=dict(
full_name='Test Tester',
username='test123',
),
)
| |
#!/usr/bin/env python
"""These tests only test the aspects of the `fseq.SeqReader` that does not
involve running the the encoding as that behaviour is a complex behaviour
involving all aspects of `fseq` those tests are performed by `test_fseq`.
Those are:
SeqReader.run()
for res in SeqReader
...
SeqReader.clearResults()
SeqReader.next()
SeqReader.reportDirectory
SeqReader.results
However the `SeqReader.run()` is tested for raising exception when no
encoder is present.
"""
import unittest
from fseq import SeqReader, SeqEncoder, ReportBuilderBase
class TestSeqReader(unittest.TestCase):
def setUp(self):
self._mockSources = ['foo/bar.fasta', 'foo/foo/bar.fastq']
self._mockTargets = ['reports', 'Reports']
def test_noDataSource(self):
s = SeqReader()
self.assertEqual(len(s), 0)
def test_addDataSourceStart(self):
s = SeqReader(dataSourcePaths=self._mockSources)
self.assertEqual(len(s), len(self._mockSources))
self.assertEqual(zip(*s.jobQueue)[0], tuple(self._mockSources))
s = SeqReader(dataSourcePaths=self._mockSources[0])
self.assertEqual(len(s), 1)
def test_addDataSource(self):
s = SeqReader()
s.addData(self._mockSources)
self.assertEqual(len(s), len(self._mockSources))
self.assertEqual(zip(*s.jobQueue)[0], tuple(self._mockSources))
s = SeqReader()
s.addData(self._mockSources[0])
self.assertEqual(zip(*s.jobQueue)[0], (self._mockSources[0], ))
def test_addDataSourceTargetStart(self):
s = SeqReader(dataSourcePaths=self._mockSources,
dataTargetPaths=self._mockTargets)
self.assertEqual(len(s), len(self._mockSources))
sources, targets = zip(*s.jobQueue)
self.assertEqual(sources, tuple(self._mockSources))
self.assertEqual(targets, tuple(self._mockTargets))
def test_encoder(self):
e1 = SeqEncoder()
s = SeqReader(seqEncoder=e1)
self.assertEqual(s.seqEncoder, e1)
def test_encoderFails(self):
s = SeqReader()
with self.assertRaises(TypeError):
s.seqEncoder = None
with self.assertRaises(TypeError):
s.seqEncoder = 1
with self.assertRaises(TypeError):
s.seqEncoder = u"dafjk"
with self.assertRaises(TypeError):
s.seqEncoder = s
with self.assertRaises(TypeError):
SeqReader(seqEncoder=u"asfs")
def test_encoderReplace(self):
e1 = SeqEncoder()
e2 = SeqEncoder()
s = SeqReader(seqEncoder=e1)
s.seqEncoder = e2
self.assertEqual(s.seqEncoder, e2)
"""TEST REMOVED SINCE ENCODER ALWAYS PRESENT
def test_allRunFails(self):
s = SeqReader(dataSourcePaths='anypath')
with self.assertRaises(ValueError):
for res in s:
pass
self.assertRaises(ValueError, s.run)
self.assertRaises(ValueError, s.next)
"""
def test_reportBulder(self):
rb = ReportBuilderBase()
rb2 = ReportBuilderBase()
s = SeqReader(reportBuilders=rb)
self.assertEqual(len(tuple(s.reportBuilders)), 1)
self.assertEqual(tuple(s.reportBuilders)[0], rb)
s.addReportBuilders(rb2)
self.assertEqual(len(tuple(s.reportBuilders)), 2)
s = SeqReader(reportBuilders=(rb, rb2))
self.assertEqual(len(tuple(s.reportBuilders)), 2)
self.assertIn(rb, s.reportBuilders)
self.assertIn(rb2, s.reportBuilders)
def test_ReportBuilderBaseRemoval(self):
rb = ReportBuilderBase()
rb2 = ReportBuilderBase()
s = SeqReader(reportBuilders=rb)
s.addReportBuilders(rb2)
s.removeReportBuilders(rb)
self.assertEqual(len(tuple(s.reportBuilders)), 1)
self.assertEqual(tuple(s.reportBuilders)[0], rb2)
s.addReportBuilders(rb)
s.removeReportBuilders(rb, rb2)
self.assertEqual(len(tuple(s.reportBuilders)), 0)
s.addReportBuilders(rb, rb2)
s.removeReportBuilders()
self.assertEqual(len(tuple(s.reportBuilders)), 0)
def test_reportBulderFail(self):
s = SeqReader()
rbs = (ReportBuilderBase(), ReportBuilderBase())
self.assertRaises(TypeError, s.addReportBuilders, None)
self.assertRaises(TypeError, s.addReportBuilders, True)
self.assertRaises(TypeError, s.addReportBuilders, 1)
self.assertRaises(TypeError, s.addReportBuilders, "adsf")
self.assertRaises(TypeError, s.addReportBuilders, rbs)
def test_popDataSources(self):
s = SeqReader()
self.assertEqual(s.popDataSources, True)
s.popDataSources = False
self.assertEqual(s.popDataSources, False)
s = SeqReader(popDataSources=False)
self.assertEqual(s.popDataSources, False)
def test_resetSeqEncoder(self):
s = SeqReader()
self.assertEqual(s.resetSeqEncoder, True)
s.resetSeqEncoder = False
self.assertEqual(s.resetSeqEncoder, False)
s = SeqReader(resetSeqEncoder=False)
self.assertEqual(s.resetSeqEncoder, False)
def test_popEncodingResultsDefault(self):
s = SeqReader()
self.assertEqual(s.popEncodingResults, True)
s = SeqReader(dataSourcePaths=self._mockSources)
self.assertEqual(s.popEncodingResults, True)
s = SeqReader(dataSourcePaths=self._mockSources[0])
self.assertEqual(s.popEncodingResults, False)
def test_popEncodingResultsManual(self):
s = SeqReader(popEncodingResults=False)
self.assertEqual(s.popEncodingResults, False)
s = SeqReader(popEncodingResults=False,
dataSourcePaths=self._mockSources)
self.assertEqual(s.popEncodingResults, False)
s = SeqReader(popEncodingResults=True,
dataSourcePaths=self._mockSources[0])
self.assertEqual(s.popEncodingResults, True)
def test_results(self):
s = SeqReader()
self.assertEqual(list(s.results), [])
def test_reportDirectory(self):
s = SeqReader()
self.assertEqual(s.reportDirectory, '')
if __name__ == '__main__':
unittest.main()
| |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import io
import os
import re
import tempfile
import unittest
import warnings
from collections import OrderedDict
from unittest import mock
from airflow import configuration
from airflow.configuration import (
DEFAULT_CONFIG,
AirflowConfigException,
AirflowConfigParser,
conf,
expand_env_var,
get_airflow_config,
get_airflow_home,
parameterized_config,
run_command,
)
from tests.test_utils.config import conf_vars
from tests.test_utils.reset_warning_registry import reset_warning_registry
@unittest.mock.patch.dict(
'os.environ',
{
'AIRFLOW__TESTSECTION__TESTKEY': 'testvalue',
'AIRFLOW__TESTSECTION__TESTPERCENT': 'with%percent',
'AIRFLOW__TESTCMDENV__ITSACOMMAND_CMD': 'echo -n "OK"',
'AIRFLOW__TESTCMDENV__NOTACOMMAND_CMD': 'echo -n "NOT OK"',
},
)
class TestConf(unittest.TestCase):
def test_airflow_home_default(self):
with unittest.mock.patch.dict('os.environ'):
if 'AIRFLOW_HOME' in os.environ:
del os.environ['AIRFLOW_HOME']
self.assertEqual(get_airflow_home(), expand_env_var('~/airflow'))
def test_airflow_home_override(self):
with unittest.mock.patch.dict('os.environ', AIRFLOW_HOME='/path/to/airflow'):
self.assertEqual(get_airflow_home(), '/path/to/airflow')
def test_airflow_config_default(self):
with unittest.mock.patch.dict('os.environ'):
if 'AIRFLOW_CONFIG' in os.environ:
del os.environ['AIRFLOW_CONFIG']
self.assertEqual(get_airflow_config('/home/airflow'), expand_env_var('/home/airflow/airflow.cfg'))
def test_airflow_config_override(self):
with unittest.mock.patch.dict('os.environ', AIRFLOW_CONFIG='/path/to/airflow/airflow.cfg'):
self.assertEqual(get_airflow_config('/home//airflow'), '/path/to/airflow/airflow.cfg')
@conf_vars({("core", "percent"): "with%%inside"})
def test_case_sensitivity(self):
# section and key are case insensitive for get method
# note: this is not the case for as_dict method
self.assertEqual(conf.get("core", "percent"), "with%inside")
self.assertEqual(conf.get("core", "PERCENT"), "with%inside")
self.assertEqual(conf.get("CORE", "PERCENT"), "with%inside")
def test_env_var_config(self):
opt = conf.get('testsection', 'testkey')
self.assertEqual(opt, 'testvalue')
opt = conf.get('testsection', 'testpercent')
self.assertEqual(opt, 'with%percent')
self.assertTrue(conf.has_option('testsection', 'testkey'))
with unittest.mock.patch.dict(
'os.environ', AIRFLOW__KUBERNETES_ENVIRONMENT_VARIABLES__AIRFLOW__TESTSECTION__TESTKEY='nested'
):
opt = conf.get('kubernetes_environment_variables', 'AIRFLOW__TESTSECTION__TESTKEY')
self.assertEqual(opt, 'nested')
@mock.patch.dict(
'os.environ', AIRFLOW__KUBERNETES_ENVIRONMENT_VARIABLES__AIRFLOW__TESTSECTION__TESTKEY='nested'
)
@conf_vars({("core", "percent"): "with%%inside"})
def test_conf_as_dict(self):
cfg_dict = conf.as_dict()
# test that configs are picked up
self.assertEqual(cfg_dict['core']['unit_test_mode'], 'True')
self.assertEqual(cfg_dict['core']['percent'], 'with%inside')
# test env vars
self.assertEqual(cfg_dict['testsection']['testkey'], '< hidden >')
self.assertEqual(
cfg_dict['kubernetes_environment_variables']['AIRFLOW__TESTSECTION__TESTKEY'], '< hidden >'
)
def test_conf_as_dict_source(self):
# test display_source
cfg_dict = conf.as_dict(display_source=True)
self.assertEqual(cfg_dict['core']['load_examples'][1], 'airflow.cfg')
self.assertEqual(cfg_dict['core']['load_default_connections'][1], 'airflow.cfg')
self.assertEqual(cfg_dict['testsection']['testkey'], ('< hidden >', 'env var'))
def test_conf_as_dict_sensitive(self):
# test display_sensitive
cfg_dict = conf.as_dict(display_sensitive=True)
self.assertEqual(cfg_dict['testsection']['testkey'], 'testvalue')
self.assertEqual(cfg_dict['testsection']['testpercent'], 'with%percent')
# test display_source and display_sensitive
cfg_dict = conf.as_dict(display_sensitive=True, display_source=True)
self.assertEqual(cfg_dict['testsection']['testkey'], ('testvalue', 'env var'))
@conf_vars({("core", "percent"): "with%%inside"})
def test_conf_as_dict_raw(self):
# test display_sensitive
cfg_dict = conf.as_dict(raw=True, display_sensitive=True)
self.assertEqual(cfg_dict['testsection']['testkey'], 'testvalue')
# Values with '%' in them should be escaped
self.assertEqual(cfg_dict['testsection']['testpercent'], 'with%%percent')
self.assertEqual(cfg_dict['core']['percent'], 'with%%inside')
def test_conf_as_dict_exclude_env(self):
# test display_sensitive
cfg_dict = conf.as_dict(include_env=False, display_sensitive=True)
# Since testsection is only created from env vars, it shouldn't be
# present at all if we don't ask for env vars to be included.
self.assertNotIn('testsection', cfg_dict)
def test_command_precedence(self):
test_config = '''[test]
key1 = hello
key2_cmd = printf cmd_result
key3 = airflow
key4_cmd = printf key4_result
'''
test_config_default = '''[test]
key1 = awesome
key2 = airflow
[another]
key6 = value6
'''
test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default))
test_conf.read_string(test_config)
test_conf.sensitive_config_values = test_conf.sensitive_config_values | {
('test', 'key2'),
('test', 'key4'),
}
self.assertEqual('hello', test_conf.get('test', 'key1'))
self.assertEqual('cmd_result', test_conf.get('test', 'key2'))
self.assertEqual('airflow', test_conf.get('test', 'key3'))
self.assertEqual('key4_result', test_conf.get('test', 'key4'))
self.assertEqual('value6', test_conf.get('another', 'key6'))
self.assertEqual('hello', test_conf.get('test', 'key1', fallback='fb'))
self.assertEqual('value6', test_conf.get('another', 'key6', fallback='fb'))
self.assertEqual('fb', test_conf.get('another', 'key7', fallback='fb'))
self.assertEqual(True, test_conf.getboolean('another', 'key8_boolean', fallback='True'))
self.assertEqual(10, test_conf.getint('another', 'key8_int', fallback='10'))
self.assertEqual(1.0, test_conf.getfloat('another', 'key8_float', fallback='1'))
self.assertTrue(test_conf.has_option('test', 'key1'))
self.assertTrue(test_conf.has_option('test', 'key2'))
self.assertTrue(test_conf.has_option('test', 'key3'))
self.assertTrue(test_conf.has_option('test', 'key4'))
self.assertFalse(test_conf.has_option('test', 'key5'))
self.assertTrue(test_conf.has_option('another', 'key6'))
cfg_dict = test_conf.as_dict(display_sensitive=True)
self.assertEqual('cmd_result', cfg_dict['test']['key2'])
self.assertNotIn('key2_cmd', cfg_dict['test'])
# If we exclude _cmds then we should still see the commands to run, not
# their values
cfg_dict = test_conf.as_dict(include_cmds=False, display_sensitive=True)
self.assertNotIn('key4', cfg_dict['test'])
self.assertEqual('printf key4_result', cfg_dict['test']['key4_cmd'])
@mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac")
@conf_vars(
{
("secrets", "backend"): "airflow.providers.hashicorp.secrets.vault.VaultBackend",
("secrets", "backend_kwargs"): '{"url": "http://127.0.0.1:8200", "token": "token"}',
}
)
def test_config_from_secret_backend(self, mock_hvac):
"""Get Config Value from a Secret Backend"""
mock_client = mock.MagicMock()
mock_hvac.Client.return_value = mock_client
mock_client.secrets.kv.v2.read_secret_version.return_value = {
'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56',
'lease_id': '',
'renewable': False,
'lease_duration': 0,
'data': {
'data': {'value': 'sqlite:////Users/airflow/airflow/airflow.db'},
'metadata': {
'created_time': '2020-03-28T02:10:54.301784Z',
'deletion_time': '',
'destroyed': False,
'version': 1,
},
},
'wrap_info': None,
'warnings': None,
'auth': None,
}
test_config = '''[test]
sql_alchemy_conn_secret = sql_alchemy_conn
'''
test_config_default = '''[test]
sql_alchemy_conn = airflow
'''
test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default))
test_conf.read_string(test_config)
test_conf.sensitive_config_values = test_conf.sensitive_config_values | {
('test', 'sql_alchemy_conn'),
}
self.assertEqual(
'sqlite:////Users/airflow/airflow/airflow.db', test_conf.get('test', 'sql_alchemy_conn')
)
def test_getboolean(self):
"""Test AirflowConfigParser.getboolean"""
test_config = """
[type_validation]
key1 = non_bool_value
[true]
key2 = t
key3 = true
key4 = 1
[false]
key5 = f
key6 = false
key7 = 0
[inline-comment]
key8 = true #123
"""
test_conf = AirflowConfigParser(default_config=test_config)
with self.assertRaisesRegex(
AirflowConfigException,
re.escape(
'Failed to convert value to bool. Please check "key1" key in "type_validation" section. '
'Current value: "non_bool_value".'
),
):
test_conf.getboolean('type_validation', 'key1')
self.assertTrue(isinstance(test_conf.getboolean('true', 'key3'), bool))
self.assertEqual(True, test_conf.getboolean('true', 'key2'))
self.assertEqual(True, test_conf.getboolean('true', 'key3'))
self.assertEqual(True, test_conf.getboolean('true', 'key4'))
self.assertEqual(False, test_conf.getboolean('false', 'key5'))
self.assertEqual(False, test_conf.getboolean('false', 'key6'))
self.assertEqual(False, test_conf.getboolean('false', 'key7'))
self.assertEqual(True, test_conf.getboolean('inline-comment', 'key8'))
def test_getint(self):
"""Test AirflowConfigParser.getint"""
test_config = """
[invalid]
key1 = str
[valid]
key2 = 1
"""
test_conf = AirflowConfigParser(default_config=test_config)
with self.assertRaisesRegex(
AirflowConfigException,
re.escape(
'Failed to convert value to int. Please check "key1" key in "invalid" section. '
'Current value: "str".'
),
):
test_conf.getint('invalid', 'key1')
self.assertTrue(isinstance(test_conf.getint('valid', 'key2'), int))
self.assertEqual(1, test_conf.getint('valid', 'key2'))
def test_getfloat(self):
"""Test AirflowConfigParser.getfloat"""
test_config = """
[invalid]
key1 = str
[valid]
key2 = 1.23
"""
test_conf = AirflowConfigParser(default_config=test_config)
with self.assertRaisesRegex(
AirflowConfigException,
re.escape(
'Failed to convert value to float. Please check "key1" key in "invalid" section. '
'Current value: "str".'
),
):
test_conf.getfloat('invalid', 'key1')
self.assertTrue(isinstance(test_conf.getfloat('valid', 'key2'), float))
self.assertEqual(1.23, test_conf.getfloat('valid', 'key2'))
def test_has_option(self):
test_config = '''[test]
key1 = value1
'''
test_conf = AirflowConfigParser()
test_conf.read_string(test_config)
self.assertTrue(test_conf.has_option('test', 'key1'))
self.assertFalse(test_conf.has_option('test', 'key_not_exists'))
self.assertFalse(test_conf.has_option('section_not_exists', 'key1'))
def test_remove_option(self):
test_config = '''[test]
key1 = hello
key2 = airflow
'''
test_config_default = '''[test]
key1 = awesome
key2 = airflow
'''
test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default))
test_conf.read_string(test_config)
self.assertEqual('hello', test_conf.get('test', 'key1'))
test_conf.remove_option('test', 'key1', remove_default=False)
self.assertEqual('awesome', test_conf.get('test', 'key1'))
test_conf.remove_option('test', 'key2')
self.assertFalse(test_conf.has_option('test', 'key2'))
def test_getsection(self):
test_config = '''
[test]
key1 = hello
[new_section]
key = value
'''
test_config_default = '''
[test]
key1 = awesome
key2 = airflow
[testsection]
key3 = value3
'''
test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default))
test_conf.read_string(test_config)
self.assertEqual(OrderedDict([('key1', 'hello'), ('key2', 'airflow')]), test_conf.getsection('test'))
self.assertEqual(
OrderedDict([('key3', 'value3'), ('testkey', 'testvalue'), ('testpercent', 'with%percent')]),
test_conf.getsection('testsection'),
)
self.assertEqual(
OrderedDict([('key', 'value')]),
test_conf.getsection('new_section'),
)
self.assertEqual(
None,
test_conf.getsection('non_existent_section'),
)
def test_get_section_should_respect_cmd_env_variable(self):
with tempfile.NamedTemporaryFile(delete=False) as cmd_file:
cmd_file.write(b"#!/usr/bin/env bash\n")
cmd_file.write(b"echo -n difficult_unpredictable_cat_password\n")
cmd_file.flush()
os.chmod(cmd_file.name, 0o0555)
cmd_file.close()
with mock.patch.dict("os.environ", {"AIRFLOW__WEBSERVER__SECRET_KEY_CMD": cmd_file.name}):
content = conf.getsection("webserver")
os.unlink(cmd_file.name)
self.assertEqual(content["secret_key"], "difficult_unpredictable_cat_password")
def test_kubernetes_environment_variables_section(self):
test_config = '''
[kubernetes_environment_variables]
key1 = hello
AIRFLOW_HOME = /root/airflow
'''
test_config_default = '''
[kubernetes_environment_variables]
'''
test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default))
test_conf.read_string(test_config)
self.assertEqual(
OrderedDict([('key1', 'hello'), ('AIRFLOW_HOME', '/root/airflow')]),
test_conf.getsection('kubernetes_environment_variables'),
)
def test_broker_transport_options(self):
section_dict = conf.getsection("celery_broker_transport_options")
self.assertTrue(isinstance(section_dict['visibility_timeout'], int))
self.assertTrue(isinstance(section_dict['_test_only_bool'], bool))
self.assertTrue(isinstance(section_dict['_test_only_float'], float))
self.assertTrue(isinstance(section_dict['_test_only_string'], str))
@conf_vars(
{
("celery", "worker_concurrency"): None,
("celery", "celeryd_concurrency"): None,
}
)
def test_deprecated_options(self):
# Guarantee we have a deprecated setting, so we test the deprecation
# lookup even if we remove this explicit fallback
conf.deprecated_options = {
('celery', 'worker_concurrency'): ('celery', 'celeryd_concurrency'),
}
# Remove it so we are sure we use the right setting
conf.remove_option('celery', 'worker_concurrency')
with self.assertWarns(DeprecationWarning):
with mock.patch.dict('os.environ', AIRFLOW__CELERY__CELERYD_CONCURRENCY="99"):
self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)
with self.assertWarns(DeprecationWarning), conf_vars({('celery', 'celeryd_concurrency'): '99'}):
self.assertEqual(conf.getint('celery', 'worker_concurrency'), 99)
@conf_vars(
{
('logging', 'logging_level'): None,
('core', 'logging_level'): None,
}
)
def test_deprecated_options_with_new_section(self):
# Guarantee we have a deprecated setting, so we test the deprecation
# lookup even if we remove this explicit fallback
conf.deprecated_options = {
('logging', 'logging_level'): ('core', 'logging_level'),
}
# Remove it so we are sure we use the right setting
conf.remove_option('core', 'logging_level')
conf.remove_option('logging', 'logging_level')
with self.assertWarns(DeprecationWarning):
with mock.patch.dict('os.environ', AIRFLOW__CORE__LOGGING_LEVEL="VALUE"):
self.assertEqual(conf.get('logging', 'logging_level'), "VALUE")
with self.assertWarns(DeprecationWarning), conf_vars({('core', 'logging_level'): 'VALUE'}):
self.assertEqual(conf.get('logging', 'logging_level'), "VALUE")
@conf_vars(
{
("celery", "result_backend"): None,
("celery", "celery_result_backend"): None,
("celery", "celery_result_backend_cmd"): None,
}
)
def test_deprecated_options_cmd(self):
# Guarantee we have a deprecated setting, so we test the deprecation
# lookup even if we remove this explicit fallback
conf.deprecated_options[('celery', "result_backend")] = ('celery', 'celery_result_backend')
conf.sensitive_config_values.add(('celery', 'celery_result_backend'))
conf.remove_option('celery', 'result_backend')
with conf_vars({('celery', 'celery_result_backend_cmd'): '/bin/echo 99'}):
with self.assertWarns(DeprecationWarning):
tmp = None
if 'AIRFLOW__CELERY__RESULT_BACKEND' in os.environ:
tmp = os.environ.pop('AIRFLOW__CELERY__RESULT_BACKEND')
self.assertEqual(conf.getint('celery', 'result_backend'), 99)
if tmp:
os.environ['AIRFLOW__CELERY__RESULT_BACKEND'] = tmp
def test_deprecated_values(self):
def make_config():
test_conf = AirflowConfigParser(default_config='')
# Guarantee we have a deprecated setting, so we test the deprecation
# lookup even if we remove this explicit fallback
test_conf.deprecated_values = {
'core': {
'hostname_callable': (re.compile(r':'), r'.', '2.1'),
},
}
test_conf.read_dict(
{
'core': {
'executor': 'SequentialExecutor',
'sql_alchemy_conn': 'sqlite://',
'hostname_callable': 'socket:getfqdn',
},
}
)
return test_conf
with self.assertWarns(FutureWarning):
test_conf = make_config()
self.assertEqual(test_conf.get('core', 'hostname_callable'), 'socket.getfqdn')
with self.assertWarns(FutureWarning):
with unittest.mock.patch.dict('os.environ', AIRFLOW__CORE__HOSTNAME_CALLABLE='socket:getfqdn'):
test_conf = make_config()
self.assertEqual(test_conf.get('core', 'hostname_callable'), 'socket.getfqdn')
with reset_warning_registry():
with warnings.catch_warnings(record=True) as warning:
with unittest.mock.patch.dict(
'os.environ',
AIRFLOW__CORE__HOSTNAME_CALLABLE='CarrierPigeon',
):
test_conf = make_config()
self.assertEqual(test_conf.get('core', 'hostname_callable'), 'CarrierPigeon')
self.assertListEqual([], warning)
def test_deprecated_funcs(self):
for func in [
'load_test_config',
'get',
'getboolean',
'getfloat',
'getint',
'has_option',
'remove_option',
'as_dict',
'set',
]:
with mock.patch(f'airflow.configuration.conf.{func}') as mock_method:
with self.assertWarns(DeprecationWarning):
getattr(configuration, func)()
mock_method.assert_called_once()
def test_command_from_env(self):
test_cmdenv_config = '''[testcmdenv]
itsacommand = NOT OK
notacommand = OK
'''
test_cmdenv_conf = AirflowConfigParser()
test_cmdenv_conf.read_string(test_cmdenv_config)
test_cmdenv_conf.sensitive_config_values.add(('testcmdenv', 'itsacommand'))
with unittest.mock.patch.dict('os.environ'):
# AIRFLOW__TESTCMDENV__ITSACOMMAND_CMD maps to ('testcmdenv', 'itsacommand') in
# sensitive_config_values and therefore should return 'OK' from the environment variable's
# echo command, and must not return 'NOT OK' from the configuration
self.assertEqual(test_cmdenv_conf.get('testcmdenv', 'itsacommand'), 'OK')
# AIRFLOW__TESTCMDENV__NOTACOMMAND_CMD maps to no entry in sensitive_config_values and therefore
# the option should return 'OK' from the configuration, and must not return 'NOT OK' from
# the environment variable's echo command
self.assertEqual(test_cmdenv_conf.get('testcmdenv', 'notacommand'), 'OK')
def test_parameterized_config_gen(self):
cfg = parameterized_config(DEFAULT_CONFIG)
# making sure some basic building blocks are present:
self.assertIn("[core]", cfg)
self.assertIn("dags_folder", cfg)
self.assertIn("sql_alchemy_conn", cfg)
self.assertIn("fernet_key", cfg)
# making sure replacement actually happened
self.assertNotIn("{AIRFLOW_HOME}", cfg)
self.assertNotIn("{FERNET_KEY}", cfg)
def test_config_use_original_when_original_and_fallback_are_present(self):
self.assertTrue(conf.has_option("core", "FERNET_KEY"))
self.assertFalse(conf.has_option("core", "FERNET_KEY_CMD"))
fernet_key = conf.get('core', 'FERNET_KEY')
with conf_vars({('core', 'FERNET_KEY_CMD'): 'printf HELLO'}):
fallback_fernet_key = conf.get("core", "FERNET_KEY")
self.assertEqual(fernet_key, fallback_fernet_key)
def test_config_throw_error_when_original_and_fallback_is_absent(self):
self.assertTrue(conf.has_option("core", "FERNET_KEY"))
self.assertFalse(conf.has_option("core", "FERNET_KEY_CMD"))
with conf_vars({('core', 'fernet_key'): None}):
with self.assertRaises(AirflowConfigException) as cm:
conf.get("core", "FERNET_KEY")
exception = str(cm.exception)
message = "section/key [core/fernet_key] not found in config"
self.assertEqual(message, exception)
def test_config_override_original_when_non_empty_envvar_is_provided(self):
key = "AIRFLOW__CORE__FERNET_KEY"
value = "some value"
with mock.patch.dict('os.environ', {key: value}):
fernet_key = conf.get('core', 'FERNET_KEY')
self.assertEqual(value, fernet_key)
def test_config_override_original_when_empty_envvar_is_provided(self):
key = "AIRFLOW__CORE__FERNET_KEY"
value = "some value"
with mock.patch.dict('os.environ', {key: value}):
fernet_key = conf.get('core', 'FERNET_KEY')
self.assertEqual(value, fernet_key)
@mock.patch.dict("os.environ", {"AIRFLOW__CORE__DAGS_FOLDER": "/tmp/test_folder"})
def test_write_should_respect_env_variable(self):
with io.StringIO() as string_file:
conf.write(string_file)
content = string_file.getvalue()
self.assertIn("dags_folder = /tmp/test_folder", content)
def test_run_command(self):
write = r'sys.stdout.buffer.write("\u1000foo".encode("utf8"))'
cmd = f'import sys; {write}; sys.stdout.flush()'
self.assertEqual(run_command(f"python -c '{cmd}'"), '\u1000foo')
self.assertEqual(run_command('echo "foo bar"'), 'foo bar\n')
self.assertRaises(AirflowConfigException, run_command, 'bash -c "exit 1"')
def test_confirm_unittest_mod(self):
self.assertTrue(conf.get('core', 'unit_test_mode'))
@conf_vars({("core", "store_serialized_dags"): "True"})
def test_store_dag_code_default_config(self):
store_serialized_dags = conf.getboolean('core', 'store_serialized_dags', fallback=False)
store_dag_code = conf.getboolean("core", "store_dag_code", fallback=store_serialized_dags)
self.assertFalse(conf.has_option("core", "store_dag_code"))
self.assertTrue(store_serialized_dags)
self.assertTrue(store_dag_code)
@conf_vars({("core", "store_serialized_dags"): "True", ("core", "store_dag_code"): "False"})
def test_store_dag_code_config_when_set(self):
store_serialized_dags = conf.getboolean('core', 'store_serialized_dags', fallback=False)
store_dag_code = conf.getboolean("core", "store_dag_code", fallback=store_serialized_dags)
self.assertTrue(conf.has_option("core", "store_dag_code"))
self.assertTrue(store_serialized_dags)
self.assertFalse(store_dag_code)
| |
# Copyright (c) 2014 Alex Meade. All rights reserved.
# Copyright (c) 2014 Clinton Knight. All rights reserved.
# Copyright (c) 2014 Andrew Kerr. All rights reserved.
# Copyright (c) 2015 Tom Barron. All rights reserved.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Mock unit tests for the NetApp block storage library
"""
import copy
import uuid
import mock
from oslo_utils import units
from cinder import exception
from cinder.i18n import _
from cinder import test
from cinder.tests.unit.volume.drivers.netapp.dataontap.client import (
fake_api as netapp_api)
from cinder.tests.unit.volume.drivers.netapp.dataontap import fakes as fake
from cinder.volume.drivers.netapp.dataontap import block_base
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume import utils as volume_utils
class NetAppBlockStorageLibraryTestCase(test.TestCase):
def setUp(self):
super(NetAppBlockStorageLibraryTestCase, self).setUp()
# Inject fake netapp_lib module classes.
netapp_api.mock_netapp_lib([block_base])
kwargs = {'configuration': mock.Mock()}
self.library = block_base.NetAppBlockStorageLibrary(
'driver', 'protocol', **kwargs)
self.library.zapi_client = mock.Mock()
self.zapi_client = self.library.zapi_client
self.mock_request = mock.Mock()
def tearDown(self):
super(NetAppBlockStorageLibraryTestCase, self).tearDown()
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr',
mock.Mock(return_value={'Volume': 'vol1'}))
def test_get_pool(self):
pool = self.library.get_pool({'name': 'volume-fake-uuid'})
self.assertEqual('vol1', pool)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr',
mock.Mock(return_value=None))
def test_get_pool_no_metadata(self):
pool = self.library.get_pool({'name': 'volume-fake-uuid'})
self.assertEqual(None, pool)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr',
mock.Mock(return_value=dict()))
def test_get_pool_volume_unknown(self):
pool = self.library.get_pool({'name': 'volume-fake-uuid'})
self.assertEqual(None, pool)
def test_create_volume(self):
volume_size_in_bytes = int(fake.SIZE) * units.Gi
self.mock_object(na_utils, 'get_volume_extra_specs')
self.mock_object(na_utils, 'log_extra_spec_warnings')
self.mock_object(block_base, 'LOG')
self.mock_object(volume_utils, 'extract_host', mock.Mock(
return_value=fake.POOL_NAME))
self.mock_object(self.library, '_setup_qos_for_volume',
mock.Mock(return_value=None))
self.mock_object(self.library, '_create_lun')
self.mock_object(self.library, '_create_lun_handle')
self.mock_object(self.library, '_add_lun_to_table')
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.create_volume(fake.VOLUME)
self.library._create_lun.assert_called_once_with(
fake.POOL_NAME, fake.LUN_NAME, volume_size_in_bytes,
fake.LUN_METADATA, None)
self.assertEqual(0, self.library.
_mark_qos_policy_group_for_deletion.call_count)
self.assertEqual(0, block_base.LOG.error.call_count)
def test_create_volume_no_pool(self):
self.mock_object(volume_utils, 'extract_host', mock.Mock(
return_value=None))
self.assertRaises(exception.InvalidHost, self.library.create_volume,
fake.VOLUME)
def test_create_volume_exception_path(self):
self.mock_object(block_base, 'LOG')
self.mock_object(na_utils, 'get_volume_extra_specs')
self.mock_object(self.library, '_setup_qos_for_volume',
mock.Mock(return_value=None))
self.mock_object(self.library, '_create_lun', mock.Mock(
side_effect=Exception))
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.assertRaises(exception.VolumeBackendAPIException,
self.library.create_volume, fake.VOLUME)
self.assertEqual(1, self.library.
_mark_qos_policy_group_for_deletion.call_count)
self.assertEqual(1, block_base.LOG.exception.call_count)
def test_create_volume_no_pool_provided_by_scheduler(self):
fake_volume = copy.deepcopy(fake.VOLUME)
# Set up fake volume whose 'host' field is missing pool information.
fake_volume['host'] = '%s@%s' % (fake.HOST_NAME, fake.BACKEND_NAME)
self.assertRaises(exception.InvalidHost, self.library.create_volume,
fake_volume)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
def test_map_lun(self, mock_get_or_create_igroup, mock_get_lun_attr):
os = 'linux'
protocol = 'fcp'
self.library.host_type = 'linux'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
self.zapi_client.map_lun.return_value = '1'
lun_id = self.library._map_lun('fake_volume',
fake.FC_FORMATTED_INITIATORS,
protocol, None)
self.assertEqual('1', lun_id)
mock_get_or_create_igroup.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS, protocol, os)
self.zapi_client.map_lun.assert_called_once_with(
fake.LUN_PATH, fake.IGROUP1_NAME, lun_id=None)
@mock.patch.object(block_base.NetAppBlockStorageLibrary, '_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
@mock.patch.object(block_base, 'LOG', mock.Mock())
def test_map_lun_mismatch_host_os(
self, mock_get_or_create_igroup, mock_get_lun_attr):
os = 'windows'
protocol = 'fcp'
self.library.host_type = 'linux'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
self.library._map_lun('fake_volume',
fake.FC_FORMATTED_INITIATORS,
protocol, None)
mock_get_or_create_igroup.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS, protocol,
self.library.host_type)
self.zapi_client.map_lun.assert_called_once_with(
fake.LUN_PATH, fake.IGROUP1_NAME, lun_id=None)
self.assertEqual(1, block_base.LOG.warning.call_count)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_find_mapped_lun_igroup')
def test_map_lun_preexisting(self, mock_find_mapped_lun_igroup,
mock_get_or_create_igroup, mock_get_lun_attr):
os = 'linux'
protocol = 'fcp'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
mock_find_mapped_lun_igroup.return_value = (fake.IGROUP1_NAME, '2')
self.zapi_client.map_lun.side_effect = netapp_api.NaApiError
lun_id = self.library._map_lun(
'fake_volume', fake.FC_FORMATTED_INITIATORS, protocol, None)
self.assertEqual('2', lun_id)
mock_find_mapped_lun_igroup.assert_called_once_with(
fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_or_create_igroup')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_find_mapped_lun_igroup')
def test_map_lun_api_error(self, mock_find_mapped_lun_igroup,
mock_get_or_create_igroup, mock_get_lun_attr):
os = 'linux'
protocol = 'fcp'
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH, 'OsType': os}
mock_get_or_create_igroup.return_value = (fake.IGROUP1_NAME, os,
'iscsi')
mock_find_mapped_lun_igroup.return_value = (None, None)
self.zapi_client.map_lun.side_effect = netapp_api.NaApiError
self.assertRaises(netapp_api.NaApiError, self.library._map_lun,
'fake_volume', fake.FC_FORMATTED_INITIATORS,
protocol, None)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_find_mapped_lun_igroup')
def test_unmap_lun(self, mock_find_mapped_lun_igroup):
mock_find_mapped_lun_igroup.return_value = (fake.IGROUP1_NAME, 1)
self.library._unmap_lun(fake.LUN_PATH, fake.FC_FORMATTED_INITIATORS)
self.zapi_client.unmap_lun.assert_called_once_with(fake.LUN_PATH,
fake.IGROUP1_NAME)
def test_find_mapped_lun_igroup(self):
self.assertRaises(NotImplementedError,
self.library._find_mapped_lun_igroup,
fake.LUN_PATH,
fake.FC_FORMATTED_INITIATORS)
def test_has_luns_mapped_to_initiators(self):
self.zapi_client.has_luns_mapped_to_initiators.return_value = True
self.assertTrue(self.library._has_luns_mapped_to_initiators(
fake.FC_FORMATTED_INITIATORS))
self.zapi_client.has_luns_mapped_to_initiators.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS)
def test_get_or_create_igroup_preexisting(self):
self.zapi_client.get_igroup_by_initiators.return_value = [fake.IGROUP1]
self.library._create_igroup_add_initiators = mock.Mock()
igroup_name, host_os, ig_type = self.library._get_or_create_igroup(
fake.FC_FORMATTED_INITIATORS, 'fcp', 'linux')
self.assertEqual(fake.IGROUP1_NAME, igroup_name)
self.assertEqual('linux', host_os)
self.assertEqual('fcp', ig_type)
self.zapi_client.get_igroup_by_initiators.assert_called_once_with(
fake.FC_FORMATTED_INITIATORS)
self.assertEqual(
0, self.library._create_igroup_add_initiators.call_count)
@mock.patch.object(uuid, 'uuid4', mock.Mock(return_value=fake.UUID1))
def test_get_or_create_igroup_none_preexisting(self):
"""This method also tests _create_igroup_add_initiators."""
self.zapi_client.get_igroup_by_initiators.return_value = []
igroup_name, os, ig_type = self.library._get_or_create_igroup(
fake.FC_FORMATTED_INITIATORS, 'fcp', 'linux')
self.assertEqual('openstack-' + fake.UUID1, igroup_name)
self.zapi_client.create_igroup.assert_called_once_with(
igroup_name, 'fcp', 'linux')
self.assertEqual(len(fake.FC_FORMATTED_INITIATORS),
self.zapi_client.add_igroup_initiator.call_count)
self.assertEqual('linux', os)
self.assertEqual('fcp', ig_type)
def test_get_fc_target_wwpns(self):
self.assertRaises(NotImplementedError,
self.library._get_fc_target_wwpns)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_build_initiator_target_map')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_map_lun')
def test_initialize_connection_fc(self, mock_map_lun,
mock_build_initiator_target_map):
self.maxDiff = None
mock_map_lun.return_value = '1'
mock_build_initiator_target_map.return_value = (fake.FC_TARGET_WWPNS,
fake.FC_I_T_MAP, 4)
target_info = self.library.initialize_connection_fc(fake.FC_VOLUME,
fake.FC_CONNECTOR)
self.assertDictEqual(target_info, fake.FC_TARGET_INFO)
mock_map_lun.assert_called_once_with(
'fake_volume', fake.FC_FORMATTED_INITIATORS, 'fcp', None)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_build_initiator_target_map')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_map_lun')
def test_initialize_connection_fc_no_wwpns(
self, mock_map_lun, mock_build_initiator_target_map):
mock_map_lun.return_value = '1'
mock_build_initiator_target_map.return_value = (None, None, 0)
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_fc,
fake.FC_VOLUME,
fake.FC_CONNECTOR)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_has_luns_mapped_to_initiators')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_unmap_lun')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
def test_terminate_connection_fc(self, mock_get_lun_attr, mock_unmap_lun,
mock_has_luns_mapped_to_initiators):
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH}
mock_unmap_lun.return_value = None
mock_has_luns_mapped_to_initiators.return_value = True
target_info = self.library.terminate_connection_fc(fake.FC_VOLUME,
fake.FC_CONNECTOR)
self.assertDictEqual(target_info, fake.FC_TARGET_INFO_EMPTY)
mock_unmap_lun.assert_called_once_with(fake.LUN_PATH,
fake.FC_FORMATTED_INITIATORS)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_build_initiator_target_map')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_has_luns_mapped_to_initiators')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_unmap_lun')
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_lun_attr')
def test_terminate_connection_fc_no_more_luns(
self, mock_get_lun_attr, mock_unmap_lun,
mock_has_luns_mapped_to_initiators,
mock_build_initiator_target_map):
mock_get_lun_attr.return_value = {'Path': fake.LUN_PATH}
mock_unmap_lun.return_value = None
mock_has_luns_mapped_to_initiators.return_value = False
mock_build_initiator_target_map.return_value = (fake.FC_TARGET_WWPNS,
fake.FC_I_T_MAP, 4)
target_info = self.library.terminate_connection_fc(fake.FC_VOLUME,
fake.FC_CONNECTOR)
self.assertDictEqual(target_info, fake.FC_TARGET_INFO_UNMAP)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_fc_target_wwpns')
def test_build_initiator_target_map_no_lookup_service(
self, mock_get_fc_target_wwpns):
self.library.lookup_service = None
mock_get_fc_target_wwpns.return_value = fake.FC_FORMATTED_TARGET_WWPNS
(target_wwpns, init_targ_map, num_paths) = \
self.library._build_initiator_target_map(fake.FC_CONNECTOR)
self.assertSetEqual(set(fake.FC_TARGET_WWPNS), set(target_wwpns))
self.assertDictEqual(fake.FC_I_T_MAP_COMPLETE, init_targ_map)
self.assertEqual(0, num_paths)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_fc_target_wwpns')
def test_build_initiator_target_map_with_lookup_service(
self, mock_get_fc_target_wwpns):
self.library.lookup_service = mock.Mock()
self.library.lookup_service.get_device_mapping_from_network.\
return_value = fake.FC_FABRIC_MAP
mock_get_fc_target_wwpns.return_value = fake.FC_FORMATTED_TARGET_WWPNS
(target_wwpns, init_targ_map, num_paths) = \
self.library._build_initiator_target_map(fake.FC_CONNECTOR)
self.assertSetEqual(set(fake.FC_TARGET_WWPNS), set(target_wwpns))
self.assertDictEqual(fake.FC_I_T_MAP, init_targ_map)
self.assertEqual(4, num_paths)
@mock.patch.object(na_utils, 'check_flags')
def test_do_setup_san_configured(self, mock_check_flags):
self.library.configuration.netapp_lun_ostype = 'windows'
self.library.configuration.netapp_host_type = 'solaris'
self.library.configuration.netapp_lun_space_reservation = 'disabled'
self.library.do_setup(mock.Mock())
self.assertTrue(mock_check_flags.called)
self.assertEqual('windows', self.library.lun_ostype)
self.assertEqual('solaris', self.library.host_type)
@mock.patch.object(na_utils, 'check_flags')
def test_do_setup_san_unconfigured(self, mock_check_flags):
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.configuration.netapp_lun_space_reservation = 'enabled'
self.library.do_setup(mock.Mock())
self.assertTrue(mock_check_flags.called)
self.assertEqual('linux', self.library.lun_ostype)
self.assertEqual('linux', self.library.host_type)
def test_do_setup_space_reservation_disabled(self):
self.mock_object(na_utils, 'check_flags')
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.configuration.netapp_lun_space_reservation = 'disabled'
self.library.do_setup(mock.Mock())
self.assertEqual('false', self.library.lun_space_reservation)
def test_do_setup_space_reservation_enabled(self):
self.mock_object(na_utils, 'check_flags')
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.configuration.netapp_lun_space_reservation = 'enabled'
self.library.do_setup(mock.Mock())
self.assertEqual('true', self.library.lun_space_reservation)
def test_get_existing_vol_manage_missing_id_path(self):
self.assertRaises(exception.ManageExistingInvalidReference,
self.library._get_existing_vol_with_manage_ref,
{})
def test_get_existing_vol_manage_not_found(self):
self.zapi_client.get_lun_by_args.return_value = []
self.assertRaises(exception.ManageExistingInvalidReference,
self.library._get_existing_vol_with_manage_ref,
{'source-id': 'src_id',
'source-name': 'lun_path'})
self.assertEqual(1, self.zapi_client.get_lun_by_args.call_count)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_extract_lun_info',
mock.Mock(return_value=block_base.NetAppLun(
'lun0', 'lun0', '3', {'UUID': 'src_id'})))
def test_get_existing_vol_manage_lun(self):
self.zapi_client.get_lun_by_args.return_value = ['lun0', 'lun1']
lun = self.library._get_existing_vol_with_manage_ref(
{'source-id': 'src_id', 'path': 'lun_path'})
self.assertEqual(1, self.zapi_client.get_lun_by_args.call_count)
self.library._extract_lun_info.assert_called_once_with('lun0')
self.assertEqual('lun0', lun.name)
@mock.patch.object(block_base.NetAppBlockStorageLibrary,
'_get_existing_vol_with_manage_ref',
mock.Mock(return_value=block_base.NetAppLun(
'handle', 'name', '1073742824', {})))
def test_manage_existing_get_size(self):
size = self.library.manage_existing_get_size(
{'id': 'vol_id'}, {'ref': 'ref'})
self.assertEqual(2, size)
self.library._get_existing_vol_with_manage_ref.assert_called_once_with(
{'ref': 'ref'})
@mock.patch.object(block_base.LOG, 'info')
def test_unmanage(self, log):
mock_lun = block_base.NetAppLun('handle', 'name', '1',
{'Path': 'p', 'UUID': 'uuid'})
self.library._get_lun_from_table = mock.Mock(return_value=mock_lun)
self.library.unmanage({'name': 'vol'})
self.library._get_lun_from_table.assert_called_once_with('vol')
self.assertEqual(1, log.call_count)
def test_check_vol_type_for_lun(self):
self.assertRaises(NotImplementedError,
self.library._check_volume_type_for_lun,
'vol', 'lun', 'existing_ref', {})
def test_is_lun_valid_on_storage(self):
self.assertTrue(self.library._is_lun_valid_on_storage('lun'))
def test_initialize_connection_iscsi(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = (
target_details_list)
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list',
mock.Mock(return_value=target_details_list[1]))
self.zapi_client.get_iscsi_service_details.return_value = (
fake.ISCSI_SERVICE_IQN)
self.mock_object(
na_utils, 'get_iscsi_connection_properties',
mock.Mock(return_value=fake.ISCSI_CONNECTION_PROPERTIES))
target_info = self.library.initialize_connection_iscsi(volume,
connector)
self.assertEqual(fake.ISCSI_CONNECTION_PROPERTIES, target_info)
block_base.NetAppBlockStorageLibrary._map_lun.assert_called_once_with(
fake.ISCSI_VOLUME['name'], [fake.ISCSI_CONNECTOR['initiator']],
'iscsi', None)
self.zapi_client.get_iscsi_target_details.assert_called_once_with()
block_base.NetAppBlockStorageLibrary._get_preferred_target_from_list\
.assert_called_once_with(
target_details_list)
self.zapi_client.get_iscsi_service_details.assert_called_once_with()
def test_initialize_connection_iscsi_no_target_list(self):
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = None
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list')
self.mock_object(
na_utils, 'get_iscsi_connection_properties',
mock.Mock(return_value=fake.ISCSI_CONNECTION_PROPERTIES))
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_iscsi,
volume, connector)
self.assertEqual(
0, block_base.NetAppBlockStorageLibrary
._get_preferred_target_from_list.call_count)
self.assertEqual(
0, self.zapi_client.get_iscsi_service_details.call_count)
self.assertEqual(
0, na_utils.get_iscsi_connection_properties.call_count)
def test_initialize_connection_iscsi_no_preferred_target(self):
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = None
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list',
mock.Mock(return_value=None))
self.mock_object(na_utils, 'get_iscsi_connection_properties')
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_iscsi,
volume, connector)
self.assertEqual(0, self.zapi_client
.get_iscsi_service_details.call_count)
self.assertEqual(0, na_utils.get_iscsi_connection_properties
.call_count)
def test_initialize_connection_iscsi_no_iscsi_service_details(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
volume = fake.ISCSI_VOLUME
connector = fake.ISCSI_CONNECTOR
self.mock_object(block_base.NetAppBlockStorageLibrary, '_map_lun',
mock.Mock(return_value=fake.ISCSI_LUN['lun_id']))
self.zapi_client.get_iscsi_target_details.return_value = (
target_details_list)
self.mock_object(block_base.NetAppBlockStorageLibrary,
'_get_preferred_target_from_list',
mock.Mock(return_value=target_details_list[1]))
self.zapi_client.get_iscsi_service_details.return_value = None
self.mock_object(na_utils, 'get_iscsi_connection_properties')
self.assertRaises(exception.VolumeBackendAPIException,
self.library.initialize_connection_iscsi,
volume,
connector)
block_base.NetAppBlockStorageLibrary._map_lun.assert_called_once_with(
fake.ISCSI_VOLUME['name'], [fake.ISCSI_CONNECTOR['initiator']],
'iscsi', None)
self.zapi_client.get_iscsi_target_details.assert_called_once_with()
block_base.NetAppBlockStorageLibrary._get_preferred_target_from_list\
.assert_called_once_with(target_details_list)
def test_get_target_details_list(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(target_details_list[0], result)
def test_get_preferred_target_from_empty_list(self):
target_details_list = []
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(None, result)
def test_get_preferred_target_from_list_with_one_interface_disabled(self):
target_details_list = copy.deepcopy(fake.ISCSI_TARGET_DETAILS_LIST)
target_details_list[0]['interface-enabled'] = 'false'
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(target_details_list[1], result)
def test_get_preferred_target_from_list_with_all_interfaces_disabled(self):
target_details_list = copy.deepcopy(fake.ISCSI_TARGET_DETAILS_LIST)
for target in target_details_list:
target['interface-enabled'] = 'false'
result = self.library._get_preferred_target_from_list(
target_details_list)
self.assertEqual(target_details_list[0], result)
def test_get_preferred_target_from_list_with_filter(self):
target_details_list = fake.ISCSI_TARGET_DETAILS_LIST
filter = [target_detail['address']
for target_detail in target_details_list[1:]]
result = self.library._get_preferred_target_from_list(
target_details_list, filter)
self.assertEqual(target_details_list[1], result)
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
@mock.patch.object(block_base, 'LOG', mock.Mock())
def test_setup_error_invalid_lun_os(self):
self.library.configuration.netapp_lun_ostype = 'unknown'
self.library.do_setup(mock.Mock())
self.assertRaises(exception.NetAppDriverException,
self.library.check_for_setup_error)
msg = _("Invalid value for NetApp configuration"
" option netapp_lun_ostype.")
block_base.LOG.error.assert_called_once_with(msg)
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
@mock.patch.object(block_base, 'LOG', mock.Mock())
def test_setup_error_invalid_host_type(self):
self.library.configuration.netapp_lun_ostype = 'linux'
self.library.configuration.netapp_host_type = 'future_os'
self.library.do_setup(mock.Mock())
self.assertRaises(exception.NetAppDriverException,
self.library.check_for_setup_error)
msg = _("Invalid value for NetApp configuration"
" option netapp_host_type.")
block_base.LOG.error.assert_called_once_with(msg)
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
def test_check_for_setup_error_both_config(self):
self.library.configuration.netapp_lun_ostype = 'linux'
self.library.configuration.netapp_host_type = 'linux'
self.library.do_setup(mock.Mock())
self.zapi_client.get_lun_list.return_value = ['lun1']
self.library._extract_and_populate_luns = mock.Mock()
self.library.check_for_setup_error()
self.library._extract_and_populate_luns.assert_called_once_with(
['lun1'])
@mock.patch.object(na_utils, 'check_flags', mock.Mock())
def test_check_for_setup_error_no_os_host(self):
self.library.configuration.netapp_lun_ostype = None
self.library.configuration.netapp_host_type = None
self.library.do_setup(mock.Mock())
self.zapi_client.get_lun_list.return_value = ['lun1']
self.library._extract_and_populate_luns = mock.Mock()
self.library.check_for_setup_error()
self.library._extract_and_populate_luns.assert_called_once_with(
['lun1'])
def test_delete_volume(self):
mock_get_lun_attr = self.mock_object(self.library, '_get_lun_attr')
mock_get_lun_attr.return_value = fake.LUN_METADATA
self.library.zapi_client = mock.Mock()
self.library.lun_table = fake.LUN_TABLE
self.library.delete_volume(fake.VOLUME)
mock_get_lun_attr.assert_called_once_with(
fake.LUN_NAME, 'metadata')
self.library.zapi_client.destroy_lun.assert_called_once_with(fake.PATH)
def test_delete_volume_no_metadata(self):
self.mock_object(self.library, '_get_lun_attr', mock.Mock(
return_value=None))
self.library.zapi_client = mock.Mock()
self.mock_object(self.library, 'zapi_client')
self.library.delete_volume(fake.VOLUME)
self.library._get_lun_attr.assert_called_once_with(
fake.LUN_NAME, 'metadata')
self.assertEqual(0, self.library.zapi_client.destroy_lun.call_count)
self.assertEqual(0,
self.zapi_client.
mark_qos_policy_group_for_deletion.call_count)
def test_clone_source_to_destination(self):
self.mock_object(na_utils, 'get_volume_extra_specs', mock.Mock(
return_value=fake.EXTRA_SPECS))
self.mock_object(self.library, '_setup_qos_for_volume', mock.Mock(
return_value=fake.QOS_POLICY_GROUP_INFO))
self.mock_object(self.library, '_clone_lun')
self.mock_object(self.library, 'extend_volume')
self.mock_object(self.library, 'delete_volume')
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.lun_space_reservation = 'false'
self.library._clone_source_to_destination(fake.CLONE_SOURCE,
fake.CLONE_DESTINATION)
na_utils.get_volume_extra_specs.assert_called_once_with(
fake.CLONE_DESTINATION)
self.library._setup_qos_for_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.EXTRA_SPECS)
self.library._clone_lun.assert_called_once_with(
fake.CLONE_SOURCE_NAME, fake.CLONE_DESTINATION_NAME,
space_reserved='false',
qos_policy_group_name=fake.QOS_POLICY_GROUP_NAME)
self.library.extend_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.CLONE_DESTINATION_SIZE,
qos_policy_group_name=fake.QOS_POLICY_GROUP_NAME)
self.assertEqual(0, self.library.delete_volume.call_count)
self.assertEqual(0, self.library.
_mark_qos_policy_group_for_deletion.call_count)
def test_clone_source_to_destination_exception_path(self):
self.mock_object(na_utils, 'get_volume_extra_specs', mock.Mock(
return_value=fake.EXTRA_SPECS))
self.mock_object(self.library, '_setup_qos_for_volume', mock.Mock(
return_value=fake.QOS_POLICY_GROUP_INFO))
self.mock_object(self.library, '_clone_lun')
self.mock_object(self.library, 'extend_volume', mock.Mock(
side_effect=Exception))
self.mock_object(self.library, 'delete_volume')
self.mock_object(self.library, '_mark_qos_policy_group_for_deletion')
self.library.lun_space_reservation = 'true'
self.assertRaises(exception.VolumeBackendAPIException,
self.library._clone_source_to_destination,
fake.CLONE_SOURCE, fake.CLONE_DESTINATION)
na_utils.get_volume_extra_specs.assert_called_once_with(
fake.CLONE_DESTINATION)
self.library._setup_qos_for_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.EXTRA_SPECS)
self.library._clone_lun.assert_called_once_with(
fake.CLONE_SOURCE_NAME, fake.CLONE_DESTINATION_NAME,
space_reserved='true',
qos_policy_group_name=fake.QOS_POLICY_GROUP_NAME)
self.library.extend_volume.assert_called_once_with(
fake.CLONE_DESTINATION, fake.CLONE_DESTINATION_SIZE,
qos_policy_group_name=fake.QOS_POLICY_GROUP_NAME)
self.assertEqual(1, self.library.delete_volume.call_count)
self.assertEqual(1, self.library.
_mark_qos_policy_group_for_deletion.call_count)
def test_create_lun(self):
self.assertRaises(NotImplementedError, self.library._create_lun,
fake.VOLUME_ID, fake.LUN_ID, fake.SIZE,
fake.LUN_METADATA)
def test_clone_lun(self):
self.assertRaises(NotImplementedError, self.library._clone_lun,
fake.VOLUME_ID, 'new-' + fake.VOLUME_ID)
def test_create_volume_from_snapshot(self):
mock_do_clone = self.mock_object(self.library,
'_clone_source_to_destination')
source = {
'name': fake.SNAPSHOT['name'],
'size': fake.SNAPSHOT['volume_size']
}
self.library.create_volume_from_snapshot(fake.VOLUME, fake.SNAPSHOT)
mock_do_clone.assert_has_calls([
mock.call(source, fake.VOLUME)])
def test_create_cloned_volume(self):
fake_lun = block_base.NetAppLun(fake.LUN_HANDLE, fake.LUN_ID,
fake.LUN_SIZE, fake.LUN_METADATA)
mock_get_lun_from_table = self.mock_object(self.library,
'_get_lun_from_table')
mock_get_lun_from_table.return_value = fake_lun
mock_do_clone = self.mock_object(self.library,
'_clone_source_to_destination')
source = {
'name': fake_lun.name,
'size': fake.VOLUME_REF['size']
}
self.library.create_cloned_volume(fake.VOLUME, fake.VOLUME_REF)
mock_do_clone.assert_has_calls([
mock.call(source, fake.VOLUME)])
| |
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
from decimal import Decimal, ROUND_DOWN
import json
import random
import shutil
import subprocess
import time
import re
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
def p2p_port(n):
return 11000 + n + os.getpid()%999
def rpc_port(n):
return 12000 + n + os.getpid()%999
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(1)
def sync_mempools(rpc_connections):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(1)
bitcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(p2p_port(n))+"\n");
f.write("rpcport="+str(rpc_port(n))+"\n");
return datadir
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
bitcoind and bitcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir=initialize_datadir("cache", i)
args = [ os.getenv("BITCOIND", "bitcoind"), "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
bitcoind_processes[i] = subprocess.Popen(args)
subprocess.check_call([ os.getenv("BITCOINCLI", "bitcoin-cli"), "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:rt@127.0.0.1:%d"%(rpc_port(i),)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
# blocks are created with timestamps 10 minutes apart, starting
# at 1 Jan 2014
block_time = 1388534400
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
rpcs[peer].generate(1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
wait_bitcoinds()
for i in range(4):
os.remove(log_filename("cache", i, "debug.log"))
os.remove(log_filename("cache", i, "db.log"))
os.remove(log_filename("cache", i, "peers.dat"))
os.remove(log_filename("cache", i, "fee_estimates.dat"))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None):
"""
Start a bitcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
args = [ os.getenv("BITCOIND", "bitcoind"), "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ]
if extra_args is not None: args.extend(extra_args)
bitcoind_processes[i] = subprocess.Popen(args)
devnull = open("/dev/null", "w+")
subprocess.check_call([ os.getenv("BITCOINCLI", "bitcoin-cli"), "-datadir="+datadir] +
_rpchost_to_args(rpchost) +
["-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
url = "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i))
if timewait is not None:
proxy = AuthServiceProxy(url, timeout=timewait)
else:
proxy = AuthServiceProxy(url)
proxy.url = url # store URL on proxy for info
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None):
"""
Start multiple bitcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for i in range(num_nodes) ]
return [ start_node(i, dirname, extra_args[i], rpchost) for i in range(num_nodes) ]
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
def stop_node(node, i):
node.stop()
bitcoind_processes[i].wait()
del bitcoind_processes[i]
def stop_nodes(nodes):
for node in nodes:
node.stop()
del nodes[:] # Emptying array closes connections as a side effect
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def wait_bitcoinds():
# Wait for all bitcoinds to cleanly exit
for bitcoind in bitcoind_processes.values():
bitcoind.wait()
bitcoind_processes.clear()
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using it's output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc:
pass
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
| |
from pyoptsparse import Optimization, SNOPT, pyOpt_solution
from os import path
import numpy as np
from numpy import sqrt,pi,sin,cos,fabs
import matplotlib.pyplot as plt
from matplotlib import rcParams
rcParams['font.family'] = 'Times New Roman'
import VAWT_Wake_Model as vwm
from ACsingle import actuatorcylinder
from sys import argv
from joblib import Parallel, delayed
import _vawtwake
import _bpmvawtacoustic
def obj_func(xdict):
global dia
global rot
global chord
global twist
global delta
global B
global H
global rho
global mu
global Vinf
global windroseDirections
global windFrequencies
global af_data
global cl_data
global cd_data
global coef0
global coef1
global coef2
global coef3
global coef4
global coef5
global coef6
global coef7
global coef8
global coef9
global funcs
global power_iso_tot
global ntheta
global interp
# Simpson's rule integration division
m = 220
n = 200
x = xdict['xvars'] # turbine x-positions
y = xdict['yvars'] # turbine y-positions
funcs = {}
nturb = np.size(x) # number of turbines
nwind = np.size(windroseDirections) # number of wind directions
power_turb = np.zeros(nturb)
power_dir = np.zeros(nwind)
# reordering rotation directions to a matrix of wind directions
rotw = np.zeros((nwind,nturb))
k = 0
for i in range(nwind):
for j in range(nturb):
rotw[i,j] = rot[k]
k += 1
winddir_turb = np.zeros_like(windroseDirections)
for d in range(0, nwind):
# adjusting coordinate system for wind direction
winddir_turb[d] = 270. - windroseDirections[d]
if winddir_turb[d] < 0.:
winddir_turb[d] += 360.
winddir_turb_rad = pi*winddir_turb[d]/180.0
xw = x*cos(-winddir_turb_rad) - y*sin(-winddir_turb_rad)
yw = x*sin(-winddir_turb_rad) + y*cos(-winddir_turb_rad)
# calculating wake velocity components
wakex,wakey = vawt_wake(xw,yw,dia,rotw[d],ntheta,chord,B,Vinf,coef0,coef1,coef2,coef3,coef4,coef5,coef6,coef7,coef8,coef9,m,n)
# calculating power (W)
res = Parallel(n_jobs=-1)(delayed(vawt_power)(i,dia,rotw[d],ntheta,chord,H,B,Vinf,af_data,cl_data,cd_data,twist,delta,rho,interp,wakex,wakey) for i in range(nturb) )
for i in range(nturb):
power_turb[i] = res[i]
power_dir[d] = np.sum(power_turb)*windFrequencies[d]
# calculating noise (dB)
SPL_d = bpm_noise(x,y,windroseDirections[d],rotw[d],wakex,wakey)
SPL_dir = np.array(SPL_d)
if d == 0:
SPL = SPL_dir
else:
SPL = np.append(SPL,SPL_dir)
power = np.sum(power_dir)
funcs['obj'] = (-1.*power/1e3)
funcs['SPL'] = (SPL)/10.
print 'Power:',power,'W (Isolated: '+str(power_iso_tot)+' W; '+str(power/power_iso_tot)+') Max SPL:',max(SPL),'dB'
# calculating separation between turbines
sep = sep_func(np.append(x,y))
funcs['sep'] = sep
fail = False
return funcs, fail
def vawt_wake(xw,yw,dia,rotw,ntheta,chord,B,Vinf,coef0,coef1,coef2,coef3,coef4,coef5,coef6,coef7,coef8,coef9,m,n):
global wake_method
t = np.size(xw) # number of turbines
for i in range(t):
xt = np.delete(xw,i)
yt = np.delete(yw,i)
diat = np.delete(dia,i)
rott = np.delete(rotw,i)
# xtd = np.delete(xw,i)
# ytd = np.delete(yw,i)
# diatd = np.delete(dia,i)
# rottd = np.delete(rotw,i)
# xt,yt,diat,rott = vwm.wake_order(xw[i],yw[i],dia[i],xtd,ytd,diatd,rottd)
if wake_method == 'simp':
wakexd,wakeyd = _vawtwake.overlap(ntheta,xt,yt,diat,rott,chord,B,xw[i],yw[i],dia[i],Vinf,coef0,coef1,coef2,coef3,coef4,coef5,coef6,coef7,coef8,coef9,m,n,1,1)
elif wake_method == 'gskr':
wakexd,wakeyd = vwm.overlap(ntheta,xt,yt,diat,rott,chord,B,xw[i],yw[i],dia[i],Vinf,False)
if i == 0:
wakex = wakexd
wakey = wakeyd
else:
wakex = np.append(wakex,wakexd)
wakey = np.append(wakey,wakeyd)
return wakex,wakey
def vawt_power(i,dia,rotw,ntheta,chord,H,B,Vinf,af_data,cl_data,cd_data,twist,delta,rho,interp,wakext,wakeyt):
global thetavec
global Vnp
global Vnn
global Vtp
global Vtn
global Cpp
global Cpn
global useAC
wakex = np.zeros(ntheta)
wakey = np.zeros(ntheta)
for j in range(ntheta):
wakex[j] = wakext[j+ntheta*i]
wakey[j] = wakeyt[j+ntheta*i]
if useAC == True:
Cp,_,_,_ = actuatorcylinder(ntheta,af_data,cl_data,cd_data,dia[i]/2.,chord,twist,delta,B,rotw[i],Vinf,rho,interp,wakex,wakey)
power_turb = (0.5*rho*Vinf**3)*(dia[i]*H)*Cp
elif useAC == False:
power_turb,Cp = _vawtwake.powercalc(thetavec,Vinf,wakex,wakey,Vnp,Vnn,Vtp,Vtn,Cpp,Cpn,rotw[i],dia[i]/2.,H,af_data,cl_data,cd_data,twist,rho,interp)
return power_turb
# SPL CALCULATION BASED ON BPM ACOUSTIC MODEL
def bpm_noise(turbineX,turbineY,winddir,rot,wakex,wakey):
global turb_dia
global obs
global B
global Hub
global H
global chord
global Vinf
global ntheta
nobs = np.size(obs[:,0])
noise_corr = 1.
nu = 1.78e-5
c0 = 343.2
psi = 14.0
AR = 5.
rad = turb_dia/2.
div = 5
c = np.ones(div)*chord
c1 = c*0.5
alpha = np.ones(div)*0.0
high = np.linspace(0,H,div+1)
SPL = Parallel(n_jobs=-1)(delayed(bpmnoise)(ntheta,turbineX,turbineY,obs[i],winddir,B,Hub,high,rad,c,c1,alpha,nu,c0,psi,AR,noise_corr,rot,Vinf,wakex,wakey) for i in range(nobs) )
return SPL
def bpmnoise(ntheta,turbineX,turbineY,obs,winddir,B,Hub,high,rad,c,c1,alpha,nu,c0,psi,AR,noise_corr,rot,Vinf,wakex,wakey):
return _bpmvawtacoustic.turbinepos(ntheta,turbineX,turbineY,obs,winddir,B,Hub,high,rad,c,c1,alpha,nu,c0,psi,AR,noise_corr,rot,Vinf,wakex,wakey)
def sep_func(loc):
global turb_dia
space = 1.65 # turbine diameters apart
n = np.size(loc)/2
x = loc[0:n]
y = loc[n:]
sep = np.zeros((n-1)*n/2)
k = 0
for i in range(0, n):
for j in range(i+1, n):
sep[k] = (x[j]-x[i])**2+(y[j]-y[i])**2
k += 1
return sep - (space*turb_dia)**2
## Main
if __name__ == "__main__":
# RUN OPTIMIZATION
optimize = True
# optimize = False
# PLOT RESULTS
plot = True
# plot = False
plot_type = 'start'
plot_type = 'start-finish'
# plot_type = 'finish'
# SAVE RESULTS
saveresult = True
# saveresult = False
global turb_dia
global dia
global rot
global chord
global twist
global delta
global B
global H
global rho
global mu
global Vinf
global windroseDirections
global windFrequencies
global obs
global Hub
global af_data
global cl_data
global cd_data
global coef0
global coef1
global coef2
global coef3
global coef4
global coef5
global coef6
global coef7
global coef8
global coef9
global funcs
global power_iso_tot
global Vnp
global Vnn
global Vtp
global Vtn
global Cpp
global Cpn
global ntheta
global thetavec
global useAC
global wake_method
# SPLlim = float(argv[1])
# rotdir_spec = argv[2]
# ntheta = int(argv[3])
# wake_method = argv[4]
# nRows = int(argv[5])
# nCols = int(argv[6])
SPLlim = 100. # sound pressure level limit of observers
rotdir_spec = 'cn' # rotation direction (cn- counter-rotating, co- co-rotating)
ntheta = 72 # number of points around blade flight path
wake_method = 'simp' # wake model calculation using Simpson's rule
wake_method = 'gskr' # wake model calculation using 21-point Gauss-Kronrod
nRows = 2 # number of paired group rows
nCols = 2 # number of paired group columns
print '\nSPL Limit:',SPLlim
if rotdir_spec == 'cn':
print 'Rotation Specification: Counter-rotating'
elif rotdir_spec == 'co':
print 'Rotation Specification: Co-rotating'
print 'Points around VAWT:',ntheta
if wake_method == 'simp':
print "Using Simpson's Rule for Wake Calculation"
elif wake_method == 'gskr':
print "Using 21-Point Gauss-Kronrod Quadrature for Wake Calculation"
print 'Rows of Paired Groups:',nRows
print 'Columns of Paired Groups:',nCols,'\n'
basepath = path.join(path.dirname(path.realpath('__file__')), 'data')
foildata = basepath + path.sep + 'airfoils/du06w200.dat'
af_data,cl_data,cd_data = vwm.airfoil_data(foildata)
coef0,coef1,coef2,coef3,coef4,coef5,coef6,coef7,coef8,coef9 = vwm.coef_val()
# define wind specifications
windroseDirections = np.array([205.,225.,245.])
windFrequencies = np.array([0.25,0.50,0.25])
nwind = np.size(windroseDirections)
print 'wind:',windroseDirections
# define turbine specifications
Vinf = 8. # free stream velocity (m/s)
turb_dia = 1.2 # turbine diameter (m)
tsrd = 2.625 # tip-speed ratio
turb_rot = tsrd*Vinf/(turb_dia/2.) # turbine rotation rate (rad/sec)
twist = 0.0 # blade twist angle (rad)
delta = 0.0 # blade curvature angle (rad)
B = 3 # number of turbine blades
chord = 0.128 # chord length (m)
H = 6.1 # turbine blade height (m)
Hub = 2. # turbine hub height (m)
rho = 1.225 # air density (kg/m^3)
mu = 1.7894e-5 # fluid viscosity (kg/ms)
# interp = 1 # linear airfoil interpolation
interp = 2 # cubic spline interpolation
thetavec = np.zeros(ntheta)
for i in range(ntheta):
thetavec[i] = (2.*pi/ntheta)*(i+1)-(2.*pi/ntheta)/2.
# setup initial turbine positions
grid_start = 2. # location of starting corner (m)
pair_sep = 3. # separation distance between pairs (m)
group_sep = pair_sep + 10. # separation distance between paired groups (m)
x01 = np.zeros(nRows*nCols)
y01 = np.zeros(nRows*nCols)
x02 = np.zeros(nRows*nCols)
y02 = np.zeros(nRows*nCols)
x1 = np.linspace(grid_start,grid_start+group_sep*(nCols-1),nCols)
y1 = np.linspace(grid_start,grid_start+group_sep*(nRows-1),nRows)
x2 = np.linspace(grid_start+pair_sep,grid_start+pair_sep+group_sep*(nCols-1),nCols)
# y2 = np.linspace(grid_start+pair_sep,grid_start+pair_sep+group_sep*(nRows-1),nRows) # use to angle the pairs
k = 0
for i in range(nRows):
for j in range(nCols):
x01[k] = x1[j]
y01[k] = y1[i]
x02[k] = x2[j]
y02[k] = y1[i]
# y02[k] = y2[i]
k += 1
x0 = np.concatenate((x01,x02))
y0 = np.concatenate((y01,y02))
nturb = np.size(x0)
print 'x0:',x0.tolist()
print 'y0:',y0.tolist()
dia = np.ones(nturb)*turb_dia
# specify turbine rotation direction
if rotdir_spec == 'cn':
rot1 = np.ones_like(x01)*turb_rot
rot2 = np.ones_like(x02)*-turb_rot
rot = np.concatenate((rot1,rot2))
elif rotdir_spec == 'co':
rot = np.ones(nturb)*turb_rot
print 'rot:',rot.tolist(),'\n'
for i in range(nwind-1):
rot = np.append(rot,rot)
# specifying boundary locations
spaceval = 2.
xlow = 0.
xupp = max(x2[-1],y1[-1]) + 2.
# xupp = max(x2[-1],y2[-1]) + 2.
ylow = 0.
yupp = max(x2[-1],y1[-1]) + 2.
# yupp = max(x2[-1],y2[-1]) + 2.
# specifying observer locations
grid_x = xupp/2.
grid_y = yupp/2.
grid_radius = int(sqrt((grid_x)**2 + (grid_y)**2)) + 4.
nobs = 8
obs_theta = np.linspace(-pi,pi,nobs+1)
obs = np.zeros((nobs,3))
for i in range(nobs):
obs[i,0] = grid_x + grid_radius*cos(obs_theta[i])
obs[i,1] = grid_y + grid_radius*sin(obs_theta[i])
obs[i,2] = 2.
print nobs,'observers around a radius of ',grid_radius,'\n'
# power value precompute (for CCW and CW directions)
Cp_iso,Tpp,Vnp,Vtp = actuatorcylinder(ntheta,af_data,cl_data,cd_data,turb_dia/2.,chord,twist,delta,B,fabs(turb_rot),Vinf,rho,interp,np.zeros(ntheta),np.zeros(ntheta)) # CCW
Cpp = (fabs(turb_rot)*B/(2.*pi*rho*Vinf**3))*Tpp
_,Tpn,Vnn,Vtn = actuatorcylinder(ntheta,af_data,cl_data,cd_data,turb_dia/2.,chord,twist,delta,B,-fabs(turb_rot),Vinf,rho,interp,np.zeros(ntheta),np.zeros(ntheta)) # CW
Cpn = (fabs(turb_rot)*B/(2.*pi*rho*Vinf**3))*Tpn
power_iso = (0.5*rho*Vinf**3)*(dia[0]*H)*Cp_iso # isolated power of a single turbine (W)
power_iso_tot = power_iso*nturb # total power of isolated turbines (W)
# option to use actuator cylinder or not (use a correction factor method)
useAC = True
useAC = False
if optimize == True:
# optimization setup
optProb = Optimization('VAWT_Power', obj_func)
optProb.addObj('obj')
n = np.size(x0)
optProb.addVarGroup('xvars', n, 'c', lower=xlow, upper=xupp, value=x0)
optProb.addVarGroup('yvars', n, 'c', lower=ylow, upper=yupp, value=y0)
num_cons_sep = (n-1)*n/2
optProb.addConGroup('sep', num_cons_sep, lower=0, upper=None)
num_cons_obs = nobs*nwind
optProb.addConGroup('SPL', num_cons_obs, lower=0, upper=SPLlim/10.)
opt = SNOPT()
opt.setOption('Scale option',0)
if rotdir_spec == 'cn':
opt.setOption('Print file',basepath + path.sep + 'optimization_results/SNOPT_print_SPL'+str(SPLlim)+'_turb'+str(n)+'_counterrot.out')
opt.setOption('Summary file',basepath + path.sep + 'optimization_results/SNOPT_summary_SPL'+str(SPLlim)+'_turb'+str(n)+'_counterrot.out')
elif rotdir_spec == 'co':
opt.setOption('Print file',basepath + path.sep + 'optimization_results/SNOPT_print_SPL'+str(SPLlim)+'_turb'+str(n)+'_corot.out')
opt.setOption('Summary file',basepath + path.sep + 'optimization_results/SNOPT_summary_SPL'+str(SPLlim)+'_turb'+str(n)+'_corot.out')
# run optimization
res = opt(optProb)
print res
pow = np.array(-1*res.fStar)*1e3
xf = res.xStar['xvars']
yf = res.xStar['yvars']
SPLd = funcs['SPL']*10.
SPLw = np.zeros((nwind,nobs))
k = 0
for i in range(nwind):
for j in range(nobs):
SPLw[i,j] = SPLd[k]
k += 1
else:
xf = x0
yf = y0
# xf = np.array([])
# yf = np.array([])
input = {'xvars':xf,'yvars':yf}
funcs,_ = obj_func(input)
pow = -1*funcs['obj']*1e3
SPLd = funcs['SPL']*10.
SPLw = np.zeros((nwind,nobs))
k = 0
for i in range(nwind):
for j in range(nobs):
SPLw[i,j] = SPLd[k]
k += 1
print 'Wind Directions:',windroseDirections
print 'The power is:',pow,'W (',pow/(power_iso_tot),')'
print 'The isolated power is:',power_iso_tot,'W'
print 'The x-locations:',xf
print 'The y-locations:',yf
print 'SPL:',SPLw
if saveresult == True:
# writing results to text file
if rotdir_spec == 'cn':
filename = basepath + path.sep + 'optimization_results/Optrun_SPL'+str(SPLlim)+'_r'+str(nRows)+'_c'+str(nCols)+'_counterrot.txt'
elif rotdir_spec == 'co':
filename = basepath + path.sep + 'optimization_results/Optrun_SPL'+str(SPLlim)+'_r'+str(nRows)+'_c'+str(nCols)+'_corot.txt'
target = open(filename,'w')
target.write('\nThe power is: '+str(pow)+' W ('+str(pow/(power_iso_tot))+')\n')
target.write('The isolated power is: '+str(power_iso_tot)+' W\n')
target.write('Max SPL: '+str(np.max(SPLw))+' dB\n')
target.write('\nWind Directions: '+str(windroseDirections.tolist())+' degrees\n')
target.write('X-locations (initial): '+str(x0.tolist())+' m\n')
target.write('X-locations (final): '+str(xf.tolist())+' m\n')
target.write('Y-locations (initial): '+str(y0.tolist())+' m\n')
target.write('Y-locations (final): '+str(yf.tolist())+' m\n')
target.write('\nSPL: '+str(SPLw.tolist())+' dB\n')
target.close()
if plot == True:
fs = 20
ms = 10
plt.figure(1,figsize=(11.5,8))
plt.subplots_adjust(right=0.68)
if plot_type == 'start':
plt.plot(1e1000,1e1000,'o',color='k',markersize=ms,fillstyle='full',label='Turbines (CCW)')
plt.plot(1e1000,1e1000,'o',color='silver',markersize=ms,fillstyle='full',label='Turbines (CW)')
elif plot_type == 'start-finish':
plt.plot(1e1000,1e1000,'o',color='k',markersize=ms,fillstyle='none',label='Original Turbines')
plt.plot(1e1000,1e1000,'o',color='k',markersize=ms,fillstyle='full',label='Optimized (CCW)')
plt.plot(1e1000,1e1000,'o',color='silver',markersize=ms,fillstyle='full',label='Optimized (CW)')
elif plot_type == 'finish':
plt.plot(1e1000,1e1000,'o',color='k',markersize=ms,fillstyle='full',label='Turbines (CCW)')
plt.plot(1e1000,1e1000,'o',color='silver',markersize=ms,fillstyle='full',label='Turbines (CW)')
for i in range(np.size(x0)):
if rot[i] > 0.:
if plot_type == 'start':
circ = plt.Circle((x0[i],y0[i]),dia[i]/2.,facecolor='k',edgecolor='k')
plt.gca().add_patch(circ)
elif plot_type == 'start-finish':
circ = plt.Circle((x0[i],y0[i]),dia[i]/2.,facecolor='w',edgecolor='k')
plt.gca().add_patch(circ)
elif rot[i] < 0.:
if plot_type == 'start':
circ = plt.Circle((x0[i],y0[i]),dia[i]/2.,facecolor='silver',edgecolor='k')
plt.gca().add_patch(circ)
elif plot_type == 'start-finish':
circ = plt.Circle((x0[i],y0[i]),dia[i]/2.,facecolor='w',edgecolor='gray')
plt.gca().add_patch(circ)
if plot_type == 'start-finish':
for i in range(np.size(x0)):
plt.plot([x0[i], xf[i]], [y0[i], yf[i]], '--k',zorder=-1)
for i in range(np.size(xf)):
if rot[i] > 0.:
if plot_type == 'start-finish' or plot_type == 'finish':
circ = plt.Circle((xf[i],yf[i]),dia[i]/2.,facecolor='k',edgecolor='k')
plt.gca().add_patch(circ)
elif rot[i] < 0.:
if plot_type == 'start-finish' or plot_type == 'finish':
circ = plt.Circle((xf[i],yf[i]),dia[i]/2.,facecolor='silver',edgecolor='k')
plt.gca().add_patch(circ)
plt.plot(obs[:,0],obs[:,1],'^',color='lime',markersize=ms,label='Observers')
rect = plt.Rectangle((xlow,ylow), xupp-xlow,yupp-ylow, linestyle='dashed',linewidth=2,facecolor="#ffffff",fill=False,label='Boundaries')
plt.gca().add_patch(rect)
plt.annotate('N', xy=((min(obs[:,0])-5)*(2./3.), (max(obs[:,0])+5)*43./45.), xycoords='data', xytext=(0,-40), textcoords='offset points', size=fs, va="center", ha="center", arrowprops=dict(arrowstyle='simple', facecolor='k'), horizontalalignment='right', verticalalignment='top',color='k')
plt.annotate('Wind', xy=(((max(obs[:,0])+5)-(min(obs[:,0])-5))/4.5,(min(obs[:,0])-5)*1./15.), xycoords='data', xytext=(-50*np.tan(45.*np.pi/180),-50), textcoords='offset points', arrowprops=dict(facecolor='skyblue',width=5,headwidth=15), horizontalalignment='right', verticalalignment='top', fontsize=fs,color='k') # wind at 225 degrees
# plt.annotate('Wind', xy=(((max(obs[:,0])+5)-(min(obs[:,0])-5))/4.,(min(obs[:,0])-5)*4./15.), xycoords='data', xytext=(-50,0), textcoords='offset points', arrowprops=dict(facecolor='skyblue',width=5,headwidth=15), horizontalalignment='right', verticalalignment='center', fontsize=fs,color='k') # wind at 90 degrees
plt.legend(loc="upper left", bbox_to_anchor=(1,1),fontsize=fs)
plt.xticks(fontsize=fs)
plt.yticks(fontsize=fs)
plt.xlabel('X-Position (m)',fontsize=fs)
plt.ylabel('Y-Position (m)',fontsize=fs)
plt.xlim(min(obs[:,0])-5,max(obs[:,0])+5)
plt.ylim(min(obs[:,0])-5,max(obs[:,0])+5)
if saveresult == True:
if rotdir_spec == 'cn':
plt.savefig(basepath + path.sep + 'optimization_results/SPLrunlayout_'+str(SPLlim)+'_r'+str(nRows)+'_c'+str(nCols)+'_counterrot.png')
elif rotdir_spec == 'co':
plt.savefig(basepath + path.sep + 'optimization_results/SPLrunlayout_'+str(SPLlim)+'_r'+str(nRows)+'_c'+str(nCols)+'_corot.png')
plt.show()
| |
from unittest import TestCase
import unittest
from .context import PySimpleAutomata
from PySimpleAutomata import DFA
from PySimpleAutomata import NFA
from PySimpleAutomata import AFW
from PySimpleAutomata import automata_IO
####################################################################
# DFA ##############################################################
class TestDfaDotImporter(TestCase):
def setUp(self):
self.maxDiff = None
self.dfa_test = {
'alphabet': {'5c', '10c', 'gum'},
'states': {'s0', 's1', 's2', 's3'},
'initial_state': 's0',
'accepting_states': {'s0'},
'transitions': {('s0', '5c'): 's1',
('s0', '10c'): 's2',
('s1', '5c'): 's2',
('s1', '10c'): 's3',
('s2', '5c'): 's3',
('s2', '10c'): 's3',
('s3', 'gum'): 's0'}
}
self.dfa_test_02 = {
'alphabet': {'5c', '10c', 'gum'},
'states': {
('s3', 't2'),
('s3', 't3'),
('s0', 't3'),
('s2', 't3'),
('s2', 't0'),
('s1', 't2'),
('s0', 't0'),
('s1', 't4'),
('s0', 't1'),
('s0', 't5'),
('s2', 't1'),
('s2', 't5'),
('s3', 't4'),
('s3', 't0'),
('s0', 't2'),
('s2', 't2'),
('s1', 't0'),
('s1', 't3'),
('s1', 't5'),
('s3', 't1'),
('s0', 't4'),
('s2', 't4'),
('s3', 't5'),
('s1', 't1')
},
'initial_state': ('s0', 't0'),
'accepting_states': {('s0', 't5'), ('s0', 't4')},
'transitions': {
(('s3', 't3'), 'gum'): ('s0', 't1'),
(('s0', 't1'), '10c'): ('s2', 't2'),
(('s3', 't2'), 'gum'): ('s0', 't4'),
(('s0', 't1'), '5c'): ('s1', 't5'),
(('s2', 't1'), '10c'): ('s3', 't2'),
(('s1', 't0'), '5c'): ('s2', 't1'),
(('s1', 't1'), '10c'): ('s3', 't2'),
(('s2', 't0'), '5c'): ('s3', 't1'),
(('s0', 't2'), '5c'): ('s1', 't3'),
(('s1', 't1'), '5c'): ('s2', 't5'),
(('s3', 't5'), 'gum'): ('s0', 't0'),
(('s1', 't2'), '5c'): ('s2', 't3'),
(('s2', 't2'), '5c'): ('s3', 't3'),
(('s2', 't1'), '5c'): ('s3', 't5'),
(('s0', 't0'), '5c'): ('s1', 't1')
}
}
def test_dfa_dot_importer(self):
""" Tests importing a dfa from a simple dot file"""
dfa_01 = automata_IO.dfa_dot_importer(
'./tests/dot/dfa/dfa_intersection_1_test_01.dot')
self.assertDictEqual(dfa_01, self.dfa_test)
def test_dfa_dot_importer_from_intersection(self):
""" Tests importing a dfa from a dot file derived from an
intersection """
dfa_02 = automata_IO.dfa_dot_importer(
'./tests/dot/automata_io'
'/automata_io_dfa_importing_intersection.dot')
self.assertDictEqual(dfa_02, self.dfa_test_02)
def test_dfa_dot_importer_no_state_only_transitions(self):
""" WARNING! importing a .dot where no explicit state,
but just transitions are present.
This will NOT FAIL, but will recognize only transitions:
states, initial states, accepting states will remain empty.
"""
automata_IO.dfa_dot_importer(
'./tests/dot/automata_io'
'/automata_io_dfa_importing_no_state.dot')
class TestDfaToDot(TestCase):
def setUp(self):
self.maxDiff = None
self.dfa_01 = automata_IO.dfa_dot_importer(
'./tests/dot/dfa/dfa_intersection_1_test_01.dot')
self.dfa_02 = automata_IO.dfa_dot_importer(
'./tests/dot/dfa/dfa_intersection_2_test_01.dot')
self.dfa_imported_intersect = automata_IO.dfa_dot_importer(
'./tests/dot/automata_io'
'/automata_io_dfa_imported_intersection.dot')
self.dfa_intersected = DFA.dfa_intersection(self.dfa_01,
self.dfa_02)
def test_dfa_to_dot(self):
""" Tests a simple dfa render thorough graphiz library"""
automata_IO.dfa_to_dot(self.dfa_01,
'graphviz_dfa_render_test',
'tests/outputs')
def test_dfa_graphviz_intersection_render(self):
""" Tests a rendering of a dfa resulting from an
intersection, so consisting in more complex nodes"""
automata_IO.dfa_to_dot(self.dfa_intersected,
'graphviz_dfa_intersection_render_test',
'tests/outputs')
class TestDfaJsonImporter(TestCase):
def setUp(self):
self.maxDiff = None
self.dfa_01 = {
"alphabet": {
"5c",
"10c",
"gum"
},
"states": {
"s0",
"s1",
"s2",
"s3",
"s4"
},
"initial_state": "s0",
"accepting_states": {
"s0",
"s2"
},
"transitions": {
("s0", "5c"): "s1",
("s0", "10c"): "s4",
("s1", "5c"): "s2",
("s1", "10c"): "s3",
("s2", "5c"): "s3",
("s2", "10c"): "s3",
("s4", "5c"): "s3",
("s4", "10c"): "s3",
("s3", "gum"): "s0"
}
}
def test_dfa_json_importer(self):
""" Tests a correct dfa import from json file """
self.assertDictEqual(automata_IO.dfa_json_importer(
'./tests/json/dfa/dfa_json_importer_01.json'),
self.dfa_01)
class TestDfaToJson(TestCase):
def setUp(self):
self.maxDiff = None
self.dfa_01 = automata_IO.dfa_json_importer(
'./tests/json/dfa/dfa_export_to_json_1.json')
self.dfa_02 = {
"alphabet": {
"5c",
"10c",
"gum"
},
"states": {
"s0",
"s1",
"s2",
"s3",
"s4"
},
"initial_state": "s0",
"accepting_states": {
"s0",
"s2"
},
"transitions": {
"s0": 'a',
"s1": 'a',
"s2": 'a',
"s3": 'a',
"s4": 'a'
}
}
def test_dfa_to_json(self):
""" Tests a correct export to JSON file of a dfa. """
name = 'JSON_test_dfa_1'
automata_IO.dfa_to_json(self.dfa_01, name, 'tests/outputs')
re_imported_dfa = automata_IO.dfa_json_importer(
'tests/outputs/' + name + '.json')
self.assertDictEqual(self.dfa_01, re_imported_dfa)
def test_dfa_to_json_undetected_wrong_transitions(self):
""" WARNING! Case where the dfa transitions are wrong but
the export ends without problem. """
name = 'JSON_test_dfa_2'
automata_IO.dfa_to_json(self.dfa_02, name, 'tests/outputs')
####################################################################
# NFA ##############################################################
class TestNfaDotImporter(TestCase):
def setUp(self):
self.maxDiff = None
self.nfa_test_01 = {
'alphabet': {'10c', '5c', 'gum'},
'states': {'s0', 's1', 's2', 's3'},
'initial_states': {'s0', 's3'},
'accepting_states': {'s0'},
'transitions': {
('s0', '10c'): {'s2'},
('s0', '5c'): {'s1', 's2'},
('s1', '10c'): {'s3'},
('s1', '5c'): {'s2', 's3'},
('s2', '10c'): {'s3'},
('s2', '5c'): {'s3'},
('s3', 'gum'): {'s0'}
}
}
self.nfa_test_02 = {
'alphabet': {'5c', '10c', 'gum'},
'states': {
('c3', 't1'),
('s0', 't0'),
('c3', 'c1'),
('c1', 'c1'),
('c2', 'c2'),
('c4', 't3'),
('c4', 'c3'),
('c2', 't1'),
('c4', 't2'),
('s0', 't3'),
('c1', 'c4'),
('c2', 'c3'),
('c4', 'c4'),
('c2', 'c4'),
('c1', 't1'),
('s1', 'c2'),
('c1', 'c2'),
('s1', 't1'),
('s1', 't2'),
('c3', 't3'),
('c4', 'c2'),
('c3', 't2'),
('c2', 't2'),
('c4', 't1'),
('s0', 't1'),
('s0', 'c3'),
('s0', 't2'),
('s1', 'c4'),
('c2', 't3'),
('c2', 't0'),
('c4', 't0'),
('s0', 'c2'),
('c3', 'c4'),
('c1', 't0'),
('s0', 'c4'),
('c1', 't3'),
('s0', 'c1'),
('c1', 'c3'),
('c3', 't0'),
('s1', 't0'),
('c3', 'c2'),
('c4', 'c1'),
('c2', 'c1'),
('c1', 't2'),
('s1', 'c3'),
('s1', 't3'),
('s1', 'c1'),
('c3', 'c3')
},
'initial_states': {('s0', 't0')},
'accepting_states': {('s1', 'c4'), ('c4', 'c4'),
('c4', 't3'), ('s1', 't3')},
'transitions': {
(('c2', 'c2'), 'gum'): {('c4', 'c4')},
(('c2', 't0'), '5c'): {('c3', 'c1')},
(('s0', 't1'), 'gum'): {('s1', 't3')},
(('c2', 'c2'), '5c'): {('c3', 'c3')},
(('c3', 't2'), 'gum'): {('c1', 't0')},
(('s0', 't1'), '5c'): {('c1', 't2')},
(('c2', 't1'), 'gum'): {('c4', 't3')},
(('c3', 't1'), 'gum'): {('c1', 't3')},
(('s0', 'c2'), 'gum'): {('s1', 'c4')},
(('c2', 't1'), '5c'): {('c3', 't2')},
(('c1', 'c1'), '10c'): {('c2', 'c2')},
(('s0', 't0'), '5c'): {('c1', 'c1')},
(('c1', 't0'), '10c'): {('c2', 't1')},
(('s0', 'c2'), '5c'): {('c1', 'c3')},
(('s0', 't2'), 'gum'): {('s1', 't0')},
(('c3', 'c3'), 'gum'): {('c1', 'c1')},
(('c2', 'c3'), 'gum'): {('c4', 'c1')},
(('c2', 't2'), 'gum'): {('c4', 't0')},
(('c3', 'c2'), 'gum'): {('c1', 'c4')},
(('s0', 'c3'), 'gum'): {('s1', 'c1')}
}
}
def test_nfa_dot_importer(self):
""" Tests importing a nfa from a simple .dot file """
nfa_01 = automata_IO.nfa_dot_importer(
'./tests/dot/automata_io'
'/automata_io_nfa_dot_importer_test_01.dot')
self.assertDictEqual(nfa_01, self.nfa_test_01)
def test_nfa_dot_importer_intersection(self):
""" Tests importing a nfa from a dot file derived from an
intersection """
nfa_02 = automata_IO.nfa_dot_importer(
'./tests/dot/automata_io'
'/automata_io_nfa_imported_intersection.dot')
self.assertDictEqual(nfa_02, self.nfa_test_02)
def test_nfa_dot_importer_from_simple_pydot_render(self):
""" Tests if a dfa imported from dot file generated by
nfa_pydot_render() is correct """
nfa_01 = automata_IO.nfa_dot_importer(
'./tests/dot/automata_io'
'/automata_io_nfa_importer_pydot_nfa_simple.dot')
self.assertDictEqual(nfa_01, self.nfa_test_01)
class TestNfaToDot(TestCase):
def setUp(self):
self.maxDiff = None
self.nfa_test_01 = {
'alphabet': {'10c', '5c', 'gum'},
'states': {'s0', 's1', 's2', 's3'},
'initial_states': {'s0', 's3'},
'accepting_states': {'s0'},
'transitions': {
('s0', '10c'): {'s2'},
('s0', '5c'): {'s1', 's2'},
('s1', '10c'): {'s3'},
('s1', '5c'): {'s2', 's3'},
('s2', '10c'): {'s3'},
('s2', '5c'): {'s3'},
('s3', 'gum'): {'s0'}
}
}
self.nfa_test_02 = {
'alphabet': {'5c', '10c', 'gum'},
'states': {
('c3', 't1'),
('s0', 't0'),
('c3', 'c1'),
('c1', 'c1'),
('c2', 'c2'),
('c4', 't3'),
('c4', 'c3'),
('c2', 't1'),
('c4', 't2'),
('s0', 't3'),
('c1', 'c4'),
('c2', 'c3'),
('c4', 'c4'),
('c2', 'c4'),
('c1', 't1'),
('s1', 'c2'),
('c1', 'c2'),
('s1', 't1'),
('s1', 't2'),
('c3', 't3'),
('c4', 'c2'),
('c3', 't2'),
('c2', 't2'),
('c4', 't1'),
('s0', 't1'),
('s0', 'c3'),
('s0', 't2'),
('s1', 'c4'),
('c2', 't3'),
('c2', 't0'),
('c4', 't0'),
('s0', 'c2'),
('c3', 'c4'),
('c1', 't0'),
('s0', 'c4'),
('c1', 't3'),
('s0', 'c1'),
('c1', 'c3'),
('c3', 't0'),
('s1', 't0'),
('c3', 'c2'),
('c4', 'c1'),
('c2', 'c1'),
('c1', 't2'),
('s1', 'c3'),
('s1', 't3'),
('s1', 'c1'),
('c3', 'c3')
},
'initial_states': {('s0', 't0'), ('c1', 't3')},
'accepting_states': {('s1', 'c4'), ('c4', 'c4'),
('c4', 't3'), ('s1', 't3')},
'transitions': {
(('c2', 'c2'), 'gum'): {('c4', 'c4')},
(('c2', 't0'), '5c'): {('c3', 'c1')},
(('s0', 't1'), 'gum'): {('s1', 't3')},
(('c2', 'c2'), '5c'): {('c3', 'c3')},
(('c3', 't2'), 'gum'): {('c1', 't0')},
(('s0', 't1'), '5c'): {('c1', 't2')},
(('c2', 't1'), 'gum'): {('c4', 't3')},
(('c3', 't1'), 'gum'): {('c1', 't3')},
(('s0', 'c2'), 'gum'): {('s1', 'c4')},
(('c2', 't1'), '5c'): {('c3', 't2')},
(('c1', 'c1'), '10c'): {('c2', 'c2')},
(('s0', 't0'), '5c'): {('c1', 'c1')},
(('c1', 't0'), '10c'): {('c2', 't1')},
(('s0', 'c2'), '5c'): {('c1', 'c3')},
(('s0', 't2'), 'gum'): {('s1', 't0')},
(('c3', 'c3'), 'gum'): {('c1', 'c1')},
(('c2', 'c3'), 'gum'): {('c4', 'c1')},
(('c2', 't2'), 'gum'): {('c4', 't0')},
(('c3', 'c2'), 'gum'): {('c1', 'c4')},
(('s0', 'c3'), 'gum'): {('s1', 'c1')}
}
}
def test_nfa_to_dot(self):
""" Tests a simple nfa rendering thorough graphviz
library"""
automata_IO.nfa_to_dot(self.nfa_test_01,
'graphviz_nfa_simple',
'tests/outputs')
def test_nfa_graphviz_intersection_render(self):
""" Tests rendering through graphviz library a nfa
derived from an intersection """
automata_IO.nfa_to_dot(self.nfa_test_02,
'graphviz_nfa_intersection',
'tests/outputs')
class TestNfaJsonImporter(TestCase):
def setUp(self):
self.maxDiff = None
self.dfa_01 = {
"alphabet": {
"a",
"b",
"c"
},
"states": {
"a0",
"t0",
"t1",
"t2",
"t3",
"t4"
},
"initial_states": {
"t0",
"a0"
},
"accepting_states": {
"t0",
"t4",
"a0"
},
"transitions": {
("t0", "b"): {"t1"},
("t0", "a"): {"t2"},
("t1", "c"): {"t3", "t2"},
("t1", "b"): {"t4"},
("t2", "b"): {"t1"},
("t2", "a"): {"t2", "t4"},
("t3", "c"): {"t0"},
("t3", "b"): {"t0", "t3"},
("t3", "a"): {"t4", "t1"},
("t4", "a"): {"t4"},
("t4", "b"): {"t0"},
("t4", "c"): {"t0"},
("a0", "a"): {"t1"}
}
}
def test_nfa_json_importer(self):
""" Tests a correct nfa import from a JSON file. """
imported = automata_IO.nfa_json_importer(
'./tests/json/nfa/nfa_json_importer_1.json')
self.assertDictEqual(imported,
self.dfa_01)
class TestNfaToJson(TestCase):
def setUp(self):
self.maxDiff = None
self.nfa_01 = {
"alphabet": {
"a",
"b",
"c"
},
"states": {
"a0",
"t0",
"t1",
"t2",
"t3",
"t4"
},
"initial_states": {
"t0",
"a0"
},
"accepting_states": {
"t0",
"t4",
"a0"
},
"transitions": {
("t0", "b"): {"t1"},
("t0", "a"): {"t2"},
("t1", "c"): {"t3", "t2"},
("t1", "b"): {"t4"},
("t2", "b"): {"t1"},
("t2", "a"): {"t2", "t4"},
("t3", "c"): {"t0"},
("t3", "b"): {"t0", "t3"},
("t3", "a"): {"t4", "t1"},
("t4", "a"): {"t4"},
("t4", "b"): {"t0"},
("t4", "c"): {"t0"},
("a0", "a"): {"t1"}
}
}
def test_nfa_to_json(self):
""" Tests a correct export to JSON file of a nfa. """
name = 'JSON_test_nfa_1'
automata_IO.nfa_to_json(self.nfa_01, name, 'tests/outputs')
re_imported_nfa = automata_IO.nfa_json_importer(
'tests/outputs/' + name + '.json')
self.assertDictEqual(self.nfa_01, re_imported_nfa)
####################################################################
# AFW ##############################################################
class TestAfwJsonImporter(TestCase):
def setUp(self):
self.maxDiff = None
self.afw_test_01 = {
'alphabet': {'a', 'b'},
'states': {'s', 'q0', 'q1', 'q2'},
'initial_state': 's',
'accepting_states': {'q0'},
'transitions': {
('q0', 'b'): 'q0 or q2',
('q0', 'a'): 'q1',
('q1', 'a'): 'q0',
('q1', 'b'): 'q1 or q2',
('q2', 'a'): 'q2',
('s', 'a'): 's',
('s', 'b'): 's and q0'
}
}
self.afw_test_empty = {
'alphabet': set(),
'states': set(),
'initial_state': 'state_0',
'accepting_states': set(),
'transitions': {}
}
def test_afw_json_importer(self):
""" Tests importing a afw from a .json file """
afw_01 = automata_IO.afw_json_importer(
'./tests/json/automata_io'
'/automata_io_afw_json_importer_test_01.json')
self.assertDictEqual(afw_01, self.afw_test_01)
class TestAfwToJson(TestCase):
def setUp(self):
self.maxDiff = None
self.afw_test_01 = {
'alphabet': {'a', 'b'},
'states': {'s', 'q0', 'q1', 'q2'},
'initial_state': 's',
'accepting_states': {'q0'},
'transitions': {
('q0', 'b'): 'q0 or q2',
('q0', 'a'): 'q1',
('q1', 'a'): 'q0',
('q1', 'b'): 'q1 or q2',
('q2', 'a'): 'q2',
('s', 'a'): 's',
('s', 'b'): 's and q0'
}
}
self.afw_test_empty = {
'alphabet': set(),
'states': set(),
'initial_state': 'state_0',
'accepting_states': set(),
'transitions': {}
}
def test_afw_to_json(self):
""" Tests a correct afw export to JSON file """
name = 'JSON_afw_export'
automata_IO.afw_to_json(self.afw_test_01, name,
'tests/outputs')
re_imported_afw = automata_IO.afw_json_importer(
'tests/outputs/' + name + '.json')
self.assertDictEqual(self.afw_test_01, re_imported_afw)
| |
from __future__ import (division, absolute_import, print_function,
unicode_literals)
from future import standard_library
from future.builtins import *
from .common import UnmatchedStepError, combined_match_dict, u
from copy import copy
from io import StringIO
import re
import yaml
class Step(object):
def parse(self, step_set, toparse, start_index):
raise NotImplementedError('Step subclasses must implement parse')
def __call__(self, context, args):
raise NotImplementedError('Step subclasses must implement __call__')
class RegexStep(Step):
def __init__(self, regex, multiline=False):
self.multiline = multiline
self.regex = re.compile(regex)
def parse(self, step_set, toparse, start_index):
if self.multiline:
return self.parse_multiline(toparse, start_index)
else:
return self.parse_line(toparse, start_index)
def parse_multiline(self, toparse, start_index):
match = self.regex.match(toparse, start_index)
if match:
match_dict = combined_match_dict(match)
end = match.end()
return (match_dict, self), end
line = toparse[start_index:].split('\n', 1)[0]
raise UnmatchedStepError(line)
def parse_line(self, toparse, start_index):
end = len(toparse)
line_end = toparse.find('\n', start_index)
if line_end == -1:
line_end = end
line = toparse[start_index:line_end]
match = self.regex.search(line)
if match:
match_dict = combined_match_dict(match)
start = match.start()
end = match.end()
if start > 0:
match_dict['prefix_content'] = line[:start]
if end < len(line):
match_dict['suffix_content'] = line[end:]
return (match_dict, self), line_end + 1
raise UnmatchedStepError(line)
def __repr__(self):
return '<{}.{} "{}">'.format(self.__class__.__module__,
self.__class__.__name__,
self.regex.pattern)
class RegexFuncStep(RegexStep):
def __init__(self, regex, func, multiline=False):
super(RegexFuncStep, self).__init__(regex, multiline)
self.func = func
def __call__(self, context, args):
return self.func(context, args)
@classmethod
def make(cls, regex, multiline=False):
"""Decorator which wraps the specified regex and func in a
RegexFuncStep instance
"""
def make_inst(func):
return cls(regex, func, multiline)
return make_inst
def __repr__(self):
return '<{}.{} {}.{} "{}">'.format(
self.__class__.__module__, self.__class__.__name__,
self.func.__module__, self.func.__name__,
self.regex.pattern)
class PrefixStep(RegexStep):
""" A simple step which matches a regex and returns True
For use with composite steps to require a prefix before another
step. For example, to add a prefix before a YamlFuncStep.
Defaults to multiline=True so it can match in the middle of a
line.
"""
def __init__(self, regex, multiline=True):
super(PrefixStep, self).__init__(regex, multiline)
def __call__(self, context, args):
return True
class YamlStep(Step):
def parse(self, step_set, toparse, start_index):
step_io = StringIO(u(toparse))
step_io.seek(start_index, 0)
loader = yaml.Loader(step_io)
try:
val = loader.get_data()
except yaml.error.YAMLError:
line = toparse[start_index:].split('\n', 1)[0]
raise UnmatchedStepError(line)
if loader.tokens:
val_end = loader.tokens[0].start_mark.index
else:
val_end = loader.get_mark().index
return (val, self), start_index + val_end
class YamlFuncStep(YamlStep):
def __init__(self, func):
self.func = func
def __call__(self, context, args):
return self.func(context, args)
@classmethod
def make(cls, func):
"""Decorator which wraps the specified func in a
YamlFuncStep instance
"""
return cls(func)
class CompositeStep(Step):
def __init__(self, first_step, second_step):
self.first_step = first_step
self.second_step = second_step
def parse(self, step_set, toparse, start_index):
(args, _), i = self.first_step.parse(step_set, toparse, start_index)
(args2, _), i = self.second_step.parse(step_set, toparse, i)
args.update(args2)
return (args, self), i
def __call__(self, context, args):
context.last_return = self.first_step(context, args)
self.second_step(context, args)
class PredicateStep(CompositeStep):
""" A composite of two other steps, a predicate step, and a
conditional step.
When parsing, both steps must parse, or the step doesn't match.
If a conditional step is not specified, the parse method will call
step_set.parse and create a new PredicateStep with the returned
instance as its conditional step.
When running, if the predicate step returns a truthy value, then
the conditional step is run. Otherwise, the conditional step is
not run.
"""
def __init__(self, first_step, second_step=None):
super(PredicateStep, self).__init__(first_step, second_step)
def parse(self, step_set, toparse, start_index):
if self.second_step:
return super(PredicateStep, self).parse(step_set, toparse,
start_index)
(args, _), i = self.first_step.parse(step_set, toparse, start_index)
(args2, second_step), i = step_set.parse_one(toparse, i)
args.update(args2)
step = copy(self)
step.second_step = second_step
return (args, step), i
def __call__(self, context, args):
last_return = context.get('last_return')
if self.first_step(context, args):
self.second_step(context, args)
return last_return
class LoopStep(PredicateStep):
def __init__(self, regex, context_var, loop_step=None, multiline=True):
prefix_step = PrefixStep(regex, multiline)
super(LoopStep, self).__init__(prefix_step, loop_step)
self.context_var = context_var
def __call__(self, context, args):
for it in context.get(self.context_var, []):
context.it = it
context.last_return = self.second_step(context, args)
return context.get('last_return')
| |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import base64
import os
import binascii
from libcloud.utils.py3 import ET
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlquote
from libcloud.utils.py3 import tostring
from libcloud.utils.py3 import b
from libcloud.utils.xml import fixxpath
from libcloud.utils.files import read_in_chunks
from libcloud.common.types import LibcloudError
from libcloud.common.azure import AzureConnection
from libcloud.storage.base import Object, Container, StorageDriver
from libcloud.storage.types import ContainerIsNotEmptyError
from libcloud.storage.types import ContainerAlreadyExistsError
from libcloud.storage.types import InvalidContainerNameError
from libcloud.storage.types import ContainerDoesNotExistError
from libcloud.storage.types import ObjectDoesNotExistError
from libcloud.storage.types import ObjectHashMismatchError
# Desired number of items in each response inside a paginated request
RESPONSES_PER_REQUEST = 100
# As per the Azure documentation, if the upload file size is less than
# 64MB, we can upload it in a single request. However, in real life azure
# servers seem to disconnect randomly after around 5 MB or 200s of upload.
# So, it is better that for file sizes greater than 4MB, we upload it in
# chunks.
# Also, with large sizes, if we use a lease, the lease will timeout after
# 60 seconds, but the upload might still be in progress. This can be
# handled in code, but if we use chunked uploads, the lease renewal will
# happen automatically.
AZURE_BLOCK_MAX_SIZE = 4 * 1024 * 1024
# Azure block blocks must be maximum 4MB
# Azure page blobs must be aligned in 512 byte boundaries (4MB fits that)
AZURE_CHUNK_SIZE = 4 * 1024 * 1024
# Azure page blob must be aligned in 512 byte boundaries
AZURE_PAGE_CHUNK_SIZE = 512
# The time period (in seconds) for which a lease must be obtained.
# If set as -1, we get an infinite lease, but that is a bad idea. If
# after getting an infinite lease, there was an issue in releasing the
# lease, the object will remain 'locked' forever, unless the lease is
# released using the lease_id (which is not exposed to the user)
AZURE_LEASE_PERIOD = 60
AZURE_STORAGE_HOST_SUFFIX = 'blob.core.windows.net'
class AzureBlobLease(object):
"""
A class to help in leasing an azure blob and renewing the lease
"""
def __init__(self, driver, object_path, use_lease):
"""
:param driver: The Azure storage driver that is being used
:type driver: :class:`AzureStorageDriver`
:param object_path: The path of the object we need to lease
:type object_path: ``str``
:param use_lease: Indicates if we must take a lease or not
:type use_lease: ``bool``
"""
self.object_path = object_path
self.driver = driver
self.use_lease = use_lease
self.lease_id = None
self.params = {'comp': 'lease'}
def renew(self):
"""
Renew the lease if it is older than a predefined time period
"""
if self.lease_id is None:
return
headers = {'x-ms-lease-action': 'renew',
'x-ms-lease-id': self.lease_id,
'x-ms-lease-duration': '60'}
response = self.driver.connection.request(self.object_path,
headers=headers,
params=self.params,
method='PUT')
if response.status != httplib.OK:
raise LibcloudError('Unable to obtain lease', driver=self)
def update_headers(self, headers):
"""
Update the lease id in the headers
"""
if self.lease_id:
headers['x-ms-lease-id'] = self.lease_id
def __enter__(self):
if not self.use_lease:
return self
headers = {'x-ms-lease-action': 'acquire',
'x-ms-lease-duration': '60'}
response = self.driver.connection.request(self.object_path,
headers=headers,
params=self.params,
method='PUT')
if response.status == httplib.NOT_FOUND:
return self
elif response.status != httplib.CREATED:
raise LibcloudError('Unable to obtain lease', driver=self)
self.lease_id = response.headers['x-ms-lease-id']
return self
def __exit__(self, type, value, traceback):
if self.lease_id is None:
return
headers = {'x-ms-lease-action': 'release',
'x-ms-lease-id': self.lease_id}
response = self.driver.connection.request(self.object_path,
headers=headers,
params=self.params,
method='PUT')
if response.status != httplib.OK:
raise LibcloudError('Unable to release lease', driver=self)
class AzureBlobsConnection(AzureConnection):
"""
Represents a single connection to Azure Blobs
"""
class AzureBlobsStorageDriver(StorageDriver):
name = 'Microsoft Azure (blobs)'
website = 'http://windows.azure.com/'
connectionCls = AzureBlobsConnection
hash_type = 'md5'
supports_chunked_encoding = False
ex_blob_type = 'BlockBlob'
def __init__(self, key, secret=None, secure=True, host=None, port=None,
**kwargs):
self._host_argument_set = bool(host)
# B64decode() this key and keep it, so that we don't have to do
# so for every request. Minor performance improvement
secret = base64.b64decode(b(secret))
super(AzureBlobsStorageDriver, self).__init__(key=key, secret=secret,
secure=secure, host=host,
port=port, **kwargs)
def _ex_connection_class_kwargs(self):
result = {}
# host argument has precedence
if not self._host_argument_set:
result['host'] = '%s.%s' % (self.key, AZURE_STORAGE_HOST_SUFFIX)
return result
def _xml_to_container(self, node):
"""
Converts a container XML node to a container instance
:param node: XML info of the container
:type node: :class:`xml.etree.ElementTree.Element`
:return: A container instance
:rtype: :class:`Container`
"""
name = node.findtext(fixxpath(xpath='Name'))
props = node.find(fixxpath(xpath='Properties'))
metadata = node.find(fixxpath(xpath='Metadata'))
extra = {
'url': node.findtext(fixxpath(xpath='Url')),
'last_modified': node.findtext(fixxpath(xpath='Last-Modified')),
'etag': props.findtext(fixxpath(xpath='Etag')),
'lease': {
'status': props.findtext(fixxpath(xpath='LeaseStatus')),
'state': props.findtext(fixxpath(xpath='LeaseState')),
'duration': props.findtext(fixxpath(xpath='LeaseDuration')),
},
'meta_data': {}
}
for meta in metadata.getchildren():
extra['meta_data'][meta.tag] = meta.text
return Container(name=name, extra=extra, driver=self)
def _response_to_container(self, container_name, response):
"""
Converts a HTTP response to a container instance
:param container_name: Name of the container
:type container_name: ``str``
:param response: HTTP Response
:type node: L{}
:return: A container instance
:rtype: :class:`Container`
"""
headers = response.headers
extra = {
'url': 'http://%s%s' % (response.connection.host,
response.connection.action),
'etag': headers['etag'],
'last_modified': headers['last-modified'],
'lease': {
'status': headers.get('x-ms-lease-status', None),
'state': headers.get('x-ms-lease-state', None),
'duration': headers.get('x-ms-lease-duration', None),
},
'meta_data': {}
}
for key, value in response.headers.items():
if key.startswith('x-ms-meta-'):
key = key.split('x-ms-meta-')[1]
extra['meta_data'][key] = value
return Container(name=container_name, extra=extra, driver=self)
def _xml_to_object(self, container, blob):
"""
Converts a BLOB XML node to an object instance
:param container: Instance of the container holding the blob
:type: :class:`Container`
:param blob: XML info of the blob
:type blob: L{}
:return: An object instance
:rtype: :class:`Object`
"""
name = blob.findtext(fixxpath(xpath='Name'))
props = blob.find(fixxpath(xpath='Properties'))
metadata = blob.find(fixxpath(xpath='Metadata'))
etag = props.findtext(fixxpath(xpath='Etag'))
size = int(props.findtext(fixxpath(xpath='Content-Length')))
extra = {
'content_type': props.findtext(fixxpath(xpath='Content-Type')),
'etag': etag,
'md5_hash': props.findtext(fixxpath(xpath='Content-MD5')),
'last_modified': props.findtext(fixxpath(xpath='Last-Modified')),
'url': blob.findtext(fixxpath(xpath='Url')),
'hash': props.findtext(fixxpath(xpath='Etag')),
'lease': {
'status': props.findtext(fixxpath(xpath='LeaseStatus')),
'state': props.findtext(fixxpath(xpath='LeaseState')),
'duration': props.findtext(fixxpath(xpath='LeaseDuration')),
},
'content_encoding': props.findtext(fixxpath(
xpath='Content-Encoding')),
'content_language': props.findtext(fixxpath(
xpath='Content-Language')),
'blob_type': props.findtext(fixxpath(xpath='BlobType'))
}
if extra['md5_hash']:
value = binascii.hexlify(base64.b64decode(b(extra['md5_hash'])))
value = value.decode('ascii')
extra['md5_hash'] = value
meta_data = {}
for meta in metadata.getchildren():
meta_data[meta.tag] = meta.text
return Object(name=name, size=size, hash=etag, meta_data=meta_data,
extra=extra, container=container, driver=self)
def _response_to_object(self, object_name, container, response):
"""
Converts a HTTP response to an object (from headers)
:param object_name: Name of the object
:type object_name: ``str``
:param container: Instance of the container holding the blob
:type: :class:`Container`
:param response: HTTP Response
:type node: L{}
:return: An object instance
:rtype: :class:`Object`
"""
headers = response.headers
size = int(headers['content-length'])
etag = headers['etag']
extra = {
'url': 'http://%s%s' % (response.connection.host,
response.connection.action),
'etag': etag,
'md5_hash': headers.get('content-md5', None),
'content_type': headers.get('content-type', None),
'content_language': headers.get('content-language', None),
'content_encoding': headers.get('content-encoding', None),
'last_modified': headers['last-modified'],
'lease': {
'status': headers.get('x-ms-lease-status', None),
'state': headers.get('x-ms-lease-state', None),
'duration': headers.get('x-ms-lease-duration', None),
},
'blob_type': headers['x-ms-blob-type']
}
if extra['md5_hash']:
value = binascii.hexlify(base64.b64decode(b(extra['md5_hash'])))
value = value.decode('ascii')
extra['md5_hash'] = value
meta_data = {}
for key, value in response.headers.items():
if key.startswith('x-ms-meta-'):
key = key.split('x-ms-meta-')[1]
meta_data[key] = value
return Object(name=object_name, size=size, hash=etag, extra=extra,
meta_data=meta_data, container=container, driver=self)
def iterate_containers(self):
"""
@inherits: :class:`StorageDriver.iterate_containers`
"""
params = {'comp': 'list',
'maxresults': RESPONSES_PER_REQUEST,
'include': 'metadata'}
while True:
response = self.connection.request('/', params)
if response.status != httplib.OK:
raise LibcloudError('Unexpected status code: %s' %
(response.status), driver=self)
body = response.parse_body()
containers = body.find(fixxpath(xpath='Containers'))
containers = containers.findall(fixxpath(xpath='Container'))
for container in containers:
yield self._xml_to_container(container)
params['marker'] = body.findtext('NextMarker')
if not params['marker']:
break
def iterate_container_objects(self, container):
"""
@inherits: :class:`StorageDriver.iterate_container_objects`
"""
params = {'restype': 'container',
'comp': 'list',
'maxresults': RESPONSES_PER_REQUEST,
'include': 'metadata'}
container_path = self._get_container_path(container)
while True:
response = self.connection.request(container_path,
params=params)
if response.status == httplib.NOT_FOUND:
raise ContainerDoesNotExistError(value=None,
driver=self,
container_name=container.name)
elif response.status != httplib.OK:
raise LibcloudError('Unexpected status code: %s' %
(response.status), driver=self)
body = response.parse_body()
blobs = body.find(fixxpath(xpath='Blobs'))
blobs = blobs.findall(fixxpath(xpath='Blob'))
for blob in blobs:
yield self._xml_to_object(container, blob)
params['marker'] = body.findtext('NextMarker')
if not params['marker']:
break
def get_container(self, container_name):
"""
@inherits: :class:`StorageDriver.get_container`
"""
params = {'restype': 'container'}
container_path = '/%s' % (container_name)
response = self.connection.request(container_path, params=params,
method='HEAD')
if response.status == httplib.NOT_FOUND:
raise ContainerDoesNotExistError('Container %s does not exist' %
(container_name), driver=self,
container_name=container_name)
elif response.status != httplib.OK:
raise LibcloudError('Unexpected status code: %s' %
(response.status), driver=self)
return self._response_to_container(container_name, response)
def get_object(self, container_name, object_name):
"""
@inherits: :class:`StorageDriver.get_object`
"""
container = self.get_container(container_name=container_name)
object_path = self._get_object_path(container, object_name)
response = self.connection.request(object_path, method='HEAD')
if response.status == httplib.OK:
obj = self._response_to_object(object_name, container, response)
return obj
raise ObjectDoesNotExistError(value=None, driver=self,
object_name=object_name)
def _get_container_path(self, container):
"""
Return a container path
:param container: Container instance
:type container: :class:`Container`
:return: A path for this container.
:rtype: ``str``
"""
return '/%s' % (container.name)
def _get_object_path(self, container, object_name):
"""
Return an object's CDN path.
:param container: Container instance
:type container: :class:`Container`
:param object_name: Object name
:type object_name: :class:`str`
:return: A path for this object.
:rtype: ``str``
"""
container_url = self._get_container_path(container)
object_name_cleaned = urlquote(object_name)
object_path = '%s/%s' % (container_url, object_name_cleaned)
return object_path
def create_container(self, container_name):
"""
@inherits: :class:`StorageDriver.create_container`
"""
params = {'restype': 'container'}
container_path = '/%s' % (container_name)
response = self.connection.request(container_path, params=params,
method='PUT')
if response.status == httplib.CREATED:
return self._response_to_container(container_name, response)
elif response.status == httplib.CONFLICT:
raise ContainerAlreadyExistsError(
value='Container with this name already exists. The name must '
'be unique among all the containers in the system',
container_name=container_name, driver=self)
elif response.status == httplib.BAD_REQUEST:
raise InvalidContainerNameError(value='Container name contains ' +
'invalid characters.',
container_name=container_name,
driver=self)
raise LibcloudError('Unexpected status code: %s' % (response.status),
driver=self)
def delete_container(self, container):
"""
@inherits: :class:`StorageDriver.delete_container`
"""
# Azure does not check if the container is empty. So, we will do
# a check to ensure that the behaviour is similar to other drivers
for obj in container.iterate_objects():
raise ContainerIsNotEmptyError(
value='Container must be empty before it can be deleted.',
container_name=container.name, driver=self)
params = {'restype': 'container'}
container_path = self._get_container_path(container)
# Note: All the objects in the container must be deleted first
response = self.connection.request(container_path, params=params,
method='DELETE')
if response.status == httplib.ACCEPTED:
return True
elif response.status == httplib.NOT_FOUND:
raise ContainerDoesNotExistError(value=None,
driver=self,
container_name=container.name)
return False
def download_object(self, obj, destination_path, overwrite_existing=False,
delete_on_failure=True):
"""
@inherits: :class:`StorageDriver.download_object`
"""
obj_path = self._get_object_path(obj.container, obj.name)
response = self.connection.request(obj_path, raw=True, data=None)
return self._get_object(obj=obj, callback=self._save_object,
response=response,
callback_kwargs={
'obj': obj,
'response': response.response,
'destination_path': destination_path,
'overwrite_existing': overwrite_existing,
'delete_on_failure': delete_on_failure},
success_status_code=httplib.OK)
def download_object_as_stream(self, obj, chunk_size=None):
"""
@inherits: :class:`StorageDriver.download_object_as_stream`
"""
obj_path = self._get_object_path(obj.container, obj.name)
response = self.connection.request(obj_path, raw=True, data=None)
return self._get_object(obj=obj, callback=read_in_chunks,
response=response,
callback_kwargs={'iterator': response.response,
'chunk_size': chunk_size},
success_status_code=httplib.OK)
def _upload_in_chunks(self, response, data, iterator, object_path,
blob_type, lease, calculate_hash=True):
"""
Uploads data from an interator in fixed sized chunks to S3
:param response: Response object from the initial POST request
:type response: :class:`RawResponse`
:param data: Any data from the initial POST request
:type data: ``str``
:param iterator: The generator for fetching the upload data
:type iterator: ``generator``
:param object_path: The path of the object to which we are uploading
:type object_name: ``str``
:param blob_type: The blob type being uploaded
:type blob_type: ``str``
:param lease: The lease object to be used for renewal
:type lease: :class:`AzureBlobLease`
:keyword calculate_hash: Indicates if we must calculate the data hash
:type calculate_hash: ``bool``
:return: A tuple of (status, checksum, bytes transferred)
:rtype: ``tuple``
"""
# Get the upload id from the response xml
if response.status != httplib.CREATED:
raise LibcloudError('Error initializing upload. Code: %d' %
(response.status), driver=self)
data_hash = None
if calculate_hash:
data_hash = self._get_hash_function()
bytes_transferred = 0
count = 1
chunks = []
headers = {}
lease.update_headers(headers)
if blob_type == 'BlockBlob':
params = {'comp': 'block'}
else:
params = {'comp': 'page'}
# Read the input data in chunk sizes suitable for AWS
for data in read_in_chunks(iterator, AZURE_CHUNK_SIZE):
data = b(data)
content_length = len(data)
offset = bytes_transferred
bytes_transferred += content_length
if calculate_hash:
data_hash.update(data)
chunk_hash = self._get_hash_function()
chunk_hash.update(data)
chunk_hash = base64.b64encode(b(chunk_hash.digest()))
headers['Content-MD5'] = chunk_hash.decode('utf-8')
headers['Content-Length'] = content_length
if blob_type == 'BlockBlob':
# Block id can be any unique string that is base64 encoded
# A 10 digit number can hold the max value of 50000 blocks
# that are allowed for azure
block_id = base64.b64encode(b('%10d' % (count)))
block_id = block_id.decode('utf-8')
params['blockid'] = block_id
# Keep this data for a later commit
chunks.append(block_id)
else:
headers['x-ms-page-write'] = 'update'
headers['x-ms-range'] = 'bytes=%d-%d' % \
(offset, (bytes_transferred - 1))
# Renew lease before updating
lease.renew()
resp = self.connection.request(object_path, method='PUT',
data=data, headers=headers,
params=params)
if resp.status != httplib.CREATED:
resp.parse_error()
raise LibcloudError('Error uploading chunk %d. Code: %d' %
(count, resp.status), driver=self)
count += 1
if calculate_hash:
data_hash = data_hash.hexdigest()
if blob_type == 'BlockBlob':
self._commit_blocks(object_path, chunks, lease)
# The Azure service does not return a hash immediately for
# chunked uploads. It takes some time for the data to get synced
response.headers['content-md5'] = None
return (True, data_hash, bytes_transferred)
def _commit_blocks(self, object_path, chunks, lease):
"""
Makes a final commit of the data.
:param object_path: Server side object path.
:type object_path: ``str``
:param upload_id: A list of (chunk_number, chunk_hash) tuples.
:type upload_id: ``list``
"""
root = ET.Element('BlockList')
for block_id in chunks:
part = ET.SubElement(root, 'Uncommitted')
part.text = str(block_id)
data = tostring(root)
params = {'comp': 'blocklist'}
headers = {}
lease.update_headers(headers)
lease.renew()
response = self.connection.request(object_path, data=data,
params=params, headers=headers,
method='PUT')
if response.status != httplib.CREATED:
raise LibcloudError('Error in blocklist commit', driver=self)
def _check_values(self, blob_type, object_size):
"""
Checks if extension arguments are valid
:param blob_type: The blob type that is being uploaded
:type blob_type: ``str``
:param object_size: The (max) size of the object being uploaded
:type object_size: ``int``
"""
if blob_type not in ['BlockBlob', 'PageBlob']:
raise LibcloudError('Invalid blob type', driver=self)
if blob_type == 'PageBlob':
if not object_size:
raise LibcloudError('Max blob size is mandatory for page blob',
driver=self)
if object_size % AZURE_PAGE_CHUNK_SIZE:
raise LibcloudError('Max blob size is not aligned to '
'page boundary', driver=self)
def upload_object(self, file_path, container, object_name, extra=None,
verify_hash=True, ex_blob_type=None, ex_use_lease=False):
"""
Upload an object currently located on a disk.
@inherits: :class:`StorageDriver.upload_object`
:param ex_blob_type: Storage class
:type ex_blob_type: ``str``
:param ex_use_lease: Indicates if we must take a lease before upload
:type ex_use_lease: ``bool``
"""
if ex_blob_type is None:
ex_blob_type = self.ex_blob_type
# Get the size of the file
file_size = os.stat(file_path).st_size
# The presumed size of the object
object_size = file_size
self._check_values(ex_blob_type, file_size)
# If size is greater than 64MB or type is Page, upload in chunks
if ex_blob_type == 'PageBlob' or file_size > AZURE_BLOCK_MAX_SIZE:
# For chunked upload of block blobs, the initial size must
# be 0.
if ex_blob_type == 'BlockBlob':
object_size = None
return self._put_object(container=container,
object_name=object_name,
object_size=object_size,
file_path=file_path, extra=extra,
verify_hash=verify_hash,
blob_type=ex_blob_type,
use_lease=ex_use_lease)
def upload_object_via_stream(self, iterator, container, object_name,
verify_hash=False, extra=None,
ex_use_lease=False, ex_blob_type=None,
ex_page_blob_size=None):
"""
@inherits: :class:`StorageDriver.upload_object_via_stream`
:param ex_blob_type: Storage class
:type ex_blob_type: ``str``
:param ex_page_blob_size: The maximum size to which the
page blob can grow to
:type ex_page_blob_size: ``int``
:param ex_use_lease: Indicates if we must take a lease before upload
:type ex_use_lease: ``bool``
"""
if ex_blob_type is None:
ex_blob_type = self.ex_blob_type
self._check_values(ex_blob_type, ex_page_blob_size)
return self._put_object(container=container,
object_name=object_name,
object_size=ex_page_blob_size,
extra=extra, verify_hash=verify_hash,
blob_type=ex_blob_type,
use_lease=ex_use_lease,
stream=iterator)
def delete_object(self, obj):
"""
@inherits: :class:`StorageDriver.delete_object`
"""
object_path = self._get_object_path(obj.container, obj.name)
response = self.connection.request(object_path, method='DELETE')
if response.status == httplib.ACCEPTED:
return True
elif response.status == httplib.NOT_FOUND:
raise ObjectDoesNotExistError(value=None, driver=self,
object_name=obj.name)
return False
def _update_metadata(self, headers, meta_data):
"""
Update the given metadata in the headers
:param headers: The headers dictionary to be updated
:type headers: ``dict``
:param meta_data: Metadata key value pairs
:type meta_data: ``dict``
"""
for key, value in list(meta_data.items()):
key = 'x-ms-meta-%s' % (key)
headers[key] = value
def _prepare_upload_headers(self, object_name, object_size,
extra, meta_data, blob_type):
"""
Prepare headers for uploading an object
:param object_name: The full name of the object being updated
:type object_name: ``str``
:param object_size: The size of the object. In case of PageBlobs,
this indicates the maximum size the blob can grow to
:type object_size: ``int``
:param extra: Extra control data for the upload
:type extra: ``dict``
:param meta_data: Metadata key value pairs
:type meta_data: ``dict``
:param blob_type: Page or Block blob type
:type blob_type: ``str``
"""
headers = {}
if blob_type is None:
blob_type = self.ex_blob_type
headers['x-ms-blob-type'] = blob_type
self._update_metadata(headers, meta_data)
if object_size is not None:
headers['Content-Length'] = object_size
if blob_type == 'PageBlob':
headers['Content-Length'] = 0
headers['x-ms-blob-content-length'] = object_size
return headers
def _put_object(self, container, object_name, object_size,
file_path=None, extra=None,
verify_hash=True, blob_type=None, use_lease=False,
stream=None):
"""
Control function that does the real job of uploading data to a blob
"""
extra = extra or {}
meta_data = extra.get('meta_data', {})
content_type = extra.get('content_type', None)
headers = self._prepare_upload_headers(object_name, object_size,
extra, meta_data, blob_type)
object_path = self._get_object_path(container, object_name)
# Get a lease if required and do the operations
with AzureBlobLease(self, object_path, use_lease) as lease:
lease.update_headers(headers)
result_dict = self._upload_object(object_name, content_type,
object_path, headers=headers,
file_path=file_path,
stream=stream)
response = result_dict['response']
bytes_transferred = result_dict['bytes_transferred']
data_hash = result_dict['data_hash']
headers = response.headers
if response.status != httplib.CREATED:
raise LibcloudError(
'Unexpected status code, status_code=%s' % (response.status),
driver=self)
server_hash = headers['content-md5']
if server_hash:
server_hash = binascii.hexlify(base64.b64decode(b(server_hash)))
server_hash = server_hash.decode('utf-8')
else:
# TODO: HACK - We could poll the object for a while and get
# the hash
pass
if (verify_hash and server_hash and data_hash != server_hash):
raise ObjectHashMismatchError(
value='MD5 hash checksum does not match',
object_name=object_name, driver=self)
return Object(name=object_name, size=bytes_transferred,
hash=headers['etag'], extra=None,
meta_data=meta_data, container=container,
driver=self)
def ex_set_object_metadata(self, obj, meta_data):
"""
Set metadata for an object
:param obj: The blob object
:type obj: :class:`Object`
:param meta_data: Metadata key value pairs
:type meta_data: ``dict``
"""
object_path = self._get_object_path(obj.container, obj.name)
params = {'comp': 'metadata'}
headers = {}
self._update_metadata(headers, meta_data)
response = self.connection.request(object_path, method='PUT',
params=params,
headers=headers)
if response.status != httplib.OK:
response.parse_error('Setting metadata')
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging as log
import math
import time
from karbor import exception
from karbor.i18n import _
from karbor.services.protection.bank_plugin import BankPlugin
from karbor.services.protection.bank_plugin import LeasePlugin
from karbor.services.protection import client_factory
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_service import loopingcall
from oslo_utils import uuidutils
from swiftclient import ClientException
swift_bank_plugin_opts = [
cfg.StrOpt('bank_swift_object_container',
default='karbor',
help='The default swift container to use.'),
]
LOG = logging.getLogger(__name__)
log.getLogger('swiftclient').setLevel(log.WARNING)
lease_opt = [cfg.IntOpt('lease_expire_window',
default=600,
help='expired_window for bank lease, in seconds'),
cfg.IntOpt('lease_renew_window',
default=120,
help='period for bank lease, in seconds, '
'between bank lease client renew the lease'),
cfg.IntOpt('lease_validity_window',
default=100,
help='validity_window for bank lease, in seconds'), ]
class SwiftConnectionFailed(exception.KarborException):
message = _("Connection to swift failed: %(reason)s")
class SwiftBankPlugin(BankPlugin, LeasePlugin):
"""Swift bank plugin"""
def __init__(self, config, context=None):
super(SwiftBankPlugin, self).__init__(config)
self._config.register_opts(swift_bank_plugin_opts,
"swift_bank_plugin")
self._config.register_opts(lease_opt,
"swift_bank_plugin")
plugin_cfg = self._config.swift_bank_plugin
self.bank_object_container = plugin_cfg.bank_swift_object_container
self.lease_expire_window = plugin_cfg.lease_expire_window
self.lease_renew_window = plugin_cfg.lease_renew_window
self.context = context
# TODO(luobin):
# init lease_validity_window
# according to lease_renew_window if not configured
self.lease_validity_window = plugin_cfg.lease_validity_window
# TODO(luobin): create a uuid of this bank_plugin
self.owner_id = uuidutils.generate_uuid()
self.lease_expire_time = 0
self.bank_leases_container = "leases"
self._connection = None
def _setup_connection(self):
return client_factory.ClientFactory.create_client('swift',
self.context,
self._config)
@property
def connection(self):
if not self._connection:
_connection = self._setup_connection()
# create container
try:
_connection.put_container(self.bank_object_container)
_connection.put_container(self.bank_leases_container)
except SwiftConnectionFailed as err:
LOG.error("bank plugin create container failed.")
raise exception.CreateContainerFailed(reason=err)
self._connection = _connection
# acquire lease
try:
self.acquire_lease()
except exception.AcquireLeaseFailed:
LOG.error("bank plugin acquire lease failed.")
raise
# start renew lease
renew_lease_loop = loopingcall.FixedIntervalLoopingCall(
self.renew_lease)
renew_lease_loop.start(interval=self.lease_renew_window,
initial_delay=self.lease_renew_window)
return self._connection
def get_owner_id(self, context=None):
return self.owner_id
def update_object(self, key, value, context=None):
serialized = False
try:
if not isinstance(value, str):
value = jsonutils.dumps(value)
serialized = True
self._put_object(container=self.bank_object_container,
obj=key,
contents=value,
headers={
'x-object-meta-serialized': str(serialized)
})
except SwiftConnectionFailed as err:
LOG.error("update object failed, err: %s.", err)
raise exception.BankUpdateObjectFailed(reason=err, key=key)
def delete_object(self, key, context=None):
try:
self._delete_object(container=self.bank_object_container,
obj=key)
except SwiftConnectionFailed as err:
LOG.error("delete object failed, err: %s.", err)
raise exception.BankDeleteObjectFailed(reason=err, key=key)
def get_object(self, key, context=None):
try:
return self._get_object(container=self.bank_object_container,
obj=key)
except SwiftConnectionFailed as err:
LOG.error("get object failed, err: %s.", err)
raise exception.BankGetObjectFailed(reason=err, key=key)
def list_objects(self, prefix=None, limit=None, marker=None,
sort_dir=None, context=None):
try:
if sort_dir == "desc":
body = self._get_container(
container=self.bank_object_container,
prefix=prefix, end_marker=marker)
return [obj.get("name") for obj in body]
else:
body = self._get_container(
container=self.bank_object_container,
prefix=prefix, limit=limit, marker=marker)
return [obj.get("name") for obj in body]
except SwiftConnectionFailed as err:
LOG.error("list objects failed, err: %s.", err)
raise exception.BankListObjectsFailed(reason=err)
def acquire_lease(self):
container = self.bank_leases_container
obj = self.owner_id
contents = self.owner_id
headers = {'X-Delete-After': str(self.lease_expire_window)}
try:
self._put_object(container=container,
obj=obj,
contents=contents,
headers=headers)
self.lease_expire_time = math.floor(
time.time()) + self.lease_expire_window
except SwiftConnectionFailed as err:
LOG.error("acquire lease failed, err:%s.", err)
raise exception.AcquireLeaseFailed(reason=err)
def renew_lease(self):
container = self.bank_leases_container
obj = self.owner_id
headers = {'X-Delete-After': str(self.lease_expire_window)}
try:
self._post_object(container=container,
obj=obj,
headers=headers)
self.lease_expire_time = math.floor(
time.time()) + self.lease_expire_window
except SwiftConnectionFailed as err:
LOG.error("acquire lease failed, err:%s.", err)
def check_lease_validity(self):
if (self.lease_expire_time - math.floor(time.time()) >=
self.lease_validity_window):
return True
else:
return False
def _put_object(self, container, obj, contents, headers=None):
try:
self.connection.put_object(container=container,
obj=obj,
contents=contents,
headers=headers)
except ClientException as err:
raise SwiftConnectionFailed(reason=err)
def _get_object(self, container, obj):
try:
(_resp, body) = self.connection.get_object(container=container,
obj=obj)
if _resp.get("x-object-meta-serialized").lower() == "true":
body = jsonutils.loads(body)
return body
except ClientException as err:
raise SwiftConnectionFailed(reason=err)
def _post_object(self, container, obj, headers):
try:
self.connection.post_object(container=container,
obj=obj,
headers=headers)
except ClientException as err:
raise SwiftConnectionFailed(reason=err)
def _delete_object(self, container, obj):
try:
self.connection.delete_object(container=container,
obj=obj)
except ClientException as err:
raise SwiftConnectionFailed(reason=err)
def _put_container(self, container):
try:
self.connection.put_container(container=container)
except ClientException as err:
raise SwiftConnectionFailed(reason=err)
def _get_container(self, container, prefix=None, limit=None, marker=None,
end_marker=None):
full_listing = True if limit is None else False
try:
(_resp, body) = self.connection.get_container(
container=container,
prefix=prefix,
limit=limit,
marker=marker,
end_marker=end_marker,
full_listing=full_listing
)
return body
except ClientException as err:
raise SwiftConnectionFailed(reason=err)
| |
# -*- coding: utf-8 -*-
'''
FanFilm Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os,sys,re,json,urllib,urlparse,datetime
import re
import base64
try: action = dict(urlparse.parse_qsl(sys.argv[2].replace('?','')))['action']
except: action = None
from resources.lib.libraries import trakt
from resources.lib.libraries import control
from resources.lib.libraries import client
from resources.lib.libraries import cache
from resources.lib.libraries import metacache
from resources.lib.libraries import favourites
from resources.lib.libraries import workers
from resources.lib.libraries import views
from resources.lib.libraries import playcount
from resources.lib.libraries import cleangenre
class movies:
def __init__(self):
self.list = []
self.en_headers = {'Accept-Language': 'en-US'}
self.trakt_link = 'http://api-v2launch.trakt.tv'
self.imdb_link = 'http://www.imdb.com'
self.fanarttv_key = control.fanarttv_key
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.systime = (self.datetime).strftime('%Y%m%d%H%M%S%f')
self.today_date = (self.datetime).strftime('%Y-%m-%d')
self.month_date = (self.datetime - datetime.timedelta(days = 30)).strftime('%Y-%m-%d')
self.month2_date = (self.datetime - datetime.timedelta(days = 60)).strftime('%Y-%m-%d')
self.year_date = (self.datetime - datetime.timedelta(days = 365)).strftime('%Y-%m-%d')
self.year_date10 = (self.datetime - datetime.timedelta(days = 3650)).strftime('%Y-%m-%d')
self.trakt_user = control.setting('trakt.user').strip()
self.imdb_user = control.setting('imdb_user').replace('ur', '')
self.info_lang = control.info_lang or 'en'
self.imdb_info_link = 'http://www.omdbapi.com/?i=%s&plot=full&r=json'
self.imdb_by_query = 'http://www.omdbapi.com/?t=%s&y=%s'
self.tmdb_image = 'http://image.tmdb.org/t/p/original'
self.tmdb_poster = 'http://image.tmdb.org/t/p/w500'
self.persons_link = 'http://www.imdb.com/search/name?count=100&name=%s'
self.personlist_link = 'http://www.imdb.com/search/name?count=100&gender=male,female'
#self.genres_tab = [('Action', 'action'), ('Adventure', 'adventure'), ('Animation', 'animation'),('Biography', 'biography'),
# ('Comedy', 'comedy'), ('Crime', 'crime'), ('Drama', 'drama'),('Family', 'family'), ('Fantasy', 'fantasy'),
# ('History', 'history'), ('Horror', 'horror'),('Music ', 'music'), ('Musical', 'musical'), ('Mystery', 'mystery'),
# ('Romance', 'romance'),('Science Fiction', 'sci_fi'), ('Sport', 'sport'), ('Thriller', 'thriller'), ('War', 'war'),('Western', 'western')]
self.popular_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&languages=en&num_votes=500,&production_status=released&groups=top_1000&sort=moviemeter,asc&count=20&start=1'
self.featured_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&languages=en&num_votes=500,&production_status=released&release_date=date[365],date[60]&sort=moviemeter,asc&count=20&start=1'
self.boxoffice_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&sort=boxoffice_gross_us,desc&count=20&start=1'
self.oscars_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&groups=oscar_best_picture_winners&sort=year,desc&count=20&start=1'
self.trending_link = 'http://api-v2launch.trakt.tv/movies/trending?limit=20&page=1'
self.views_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&languages=en&num_votes=500,&production_status=released&sort=num_votes,desc&count=20&start=1'
self.theaters_link = 'http://www.imdb.com/search/title?title_type=feature&languages=en&num_votes=200,&release_date=%s,%s&sort=release_date_us,desc&count=20&start=1' % (self.year_date, self.today_date)
self.search_link = 'http://api-v2launch.trakt.tv/search?type=movie&query=%s&limit=20'
self.genre_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie,documentary&languages=en&num_votes=100,&genres=%s&sort=moviemeter,asc&count=20&start=1'
self.year_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&languages=en&num_votes=100,&production_status=released&year=%s&sort=moviemeter,asc&count=20&start=1'
self.person_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&production_status=released&role=%s&sort=year,desc&count=40&start=1'
self.certification_link = 'http://api.themoviedb.org/3/discover/movie?api_key=%s&certification=%s&certification_country=US&primary_release_date.lte=%s&page=1' % ('%s', '%s', self.today_date)
self.scn_link = 'http://predb.me'
self.scn_page = 'http://predb.me/?search=%s+720p+tag:-foreign&cats=movies-hd&page=%s'
#self.added_link = 'http://predb.me?start=1'
self.added_link = 'http://www.imdb.com/search/title?title_type=feature,tv_movie&languages=en&num_votes=500,&production_status=released&release_date=%s,%s&sort=release_date,desc&count=20&start=1' % (self.year_date, self.today_date)
self.traktlists_link = 'http://api-v2launch.trakt.tv/users/me/lists'
self.traktlikedlists_link = 'http://api-v2launch.trakt.tv/users/likes/lists?limit=1000000'
self.traktlist_link = 'http://api-v2launch.trakt.tv/users/%s/lists/%s/items'
self.traktcollection_link = 'http://api-v2launch.trakt.tv/users/me/collection/movies'
self.traktwatchlist_link = 'http://api-v2launch.trakt.tv/users/me/watchlist/movies'
self.traktfeatured_link = 'http://api-v2launch.trakt.tv/recommendations/movies?limit=40'
self.trakthistory_link = 'http://api-v2launch.trakt.tv/users/me/history/movies?limit=40&page=1'
self.imdblists_link = 'http://www.imdb.com/user/ur%s/lists?tab=all&sort=modified:desc&filter=titles' % self.imdb_user
self.imdblist_link = 'http://www.imdb.com/list/%s/?view=detail&sort=title:asc&title_type=feature,short,tv_movie,tv_special,video,documentary,game&start=1'
self.imdbwatchlist_link = 'http://www.imdb.com/user/ur%s/watchlist' % self.imdb_user
self.trakt_lang_link = 'http://api-v2launch.trakt.tv/movies/%s/translations/%s'
def get(self, url, idx=True):
try:
try: url = getattr(self, url + '_link')
except: pass
try: u = urlparse.urlparse(url).netloc.lower()
except: pass
if u in self.trakt_link and '/users/' in url:
try:
if url == self.trakthistory_link: raise Exception()
if not '/users/me/' in url: raise Exception()
if trakt.getActivity() > cache.timeout(self.trakt_list, url, self.trakt_user): raise Exception()
self.list = cache.get(self.trakt_list, 72, url, self.trakt_user)
except:
self.list = cache.get(self.trakt_list, 2, url, self.trakt_user)
if '/users/me/' in url:
self.list = sorted(self.list, key=lambda k: re.sub('(^the |^a )', '', k['title'].lower()))
if idx == True: self.worker()
elif u in self.trakt_link:
self.list = cache.get(self.trakt_list, 24, url, self.trakt_user)
if idx == True: self.worker()
elif u in self.imdb_link and ('/user/' in url or '/list/' in url):
self.list = cache.get(self.imdb_list, 2, url, idx)
if idx == True: self.worker()
elif u in self.imdb_link:
self.list = cache.get(self.imdb_list, 24, url)
if idx == True: self.worker()
elif u in self.scn_link:
self.list = cache.get(self.scn_list, 24, url)
if idx == True: self.worker()
if idx == True: self.movieDirectory(self.list)
return self.list
except Exception as e:
control.log("movies get e:%s" % e)
pass
def widget(self):
setting = control.setting('movie_widget')
if setting == '2':
self.get(self.featured_link)
elif setting == '3':
self.get(self.trending_link)
else:
self.get(self.added_link)
def favourites(self):
try:
items = favourites.getFavourites('movies')
self.list = [i[1] for i in items]
for i in self.list:
if not 'name' in i: i['name'] = '%s (%s)' % (i['title'], i['year'])
try: i['title'] = i['title'].encode('utf-8')
except: pass
try: i['originaltitle'] = i['originaltitle'].encode('utf-8')
except: pass
try: i['name'] = i['name'].encode('utf-8')
except: pass
if not 'duration' in i: i['duration'] = '0'
if not 'imdb' in i: i['imdb'] = '0'
if not 'tmdb' in i: i['tmdb'] = '0'
if not 'tvdb' in i: i['tvdb'] = '0'
if not 'tvrage' in i: i['tvrage'] = '0'
if not 'poster' in i: i['poster'] = '0'
if not 'banner' in i: i['banner'] = '0'
if not 'fanart' in i: i['fanart'] = '0'
self.worker()
self.list = sorted(self.list, key=lambda k: k['title'])
self.movieDirectory(self.list)
except:
return
def search(self, query=None):
#try:
if query == None:
t = control.lang(30201).encode('utf-8')
k = control.keyboard('', t) ; k.doModal()
self.query = k.getText() if k.isConfirmed() else None
else:
self.query = query
if (self.query == None or self.query == ''): return
url = self.search_link % (urllib.quote_plus(self.query))
self.list = cache.get(self.trakt_list, 0, url, self.trakt_user)
self.worker()
self.movieDirectory(self.list)
return self.list
#except:
# return
def person(self, query=None):
try:
if query == None:
t = control.lang(30201).encode('utf-8')
k = control.keyboard('', t) ; k.doModal()
self.query = k.getText() if k.isConfirmed() else None
else:
self.query = query
if (self.query == None or self.query == ''): return
url = self.persons_link % urllib.quote_plus(self.query)
self.list = cache.get(self.imdb_person_list, 0, url)
for i in range(0, len(self.list)): self.list[i].update({'action': 'movies'})
self.addDirectory(self.list)
return self.list
except:
return
def genres(self):
genres = [
('Action', 'action'),
('Adventure', 'adventure'),
('Animation', 'animation'),
('Biography', 'biography'),
('Comedy', 'comedy'),
('Crime', 'crime'),
('Drama', 'drama'),
('Documentary','documentary'),
('Family', 'family'),
('Fantasy', 'fantasy'),
('History', 'history'),
('Horror', 'horror'),
('Music ', 'music'),
('Musical', 'musical'),
('Mystery', 'mystery'),
('Romance', 'romance'),
('Science Fiction', 'sci_fi'),
('Sport', 'sport'),
('Thriller', 'thriller'),
('War', 'war'),
('Western', 'western')
]
for i in genres: self.list.append({'name': cleangenre.lang(i[0], self.info_lang), 'url': self.genre_link % i[1], 'image': 'genres.png', 'action': 'movies'})
self.addDirectory(self.list)
return self.list
def certifications(self):
try:
url = self.certifications_link
self.list = cache.get(self.tmdb_certification_list, 24, url)
for i in range(0, len(self.list)): self.list[i].update({'image': 'movieCertificates.jpg', 'action': 'movies'})
self.addDirectory(self.list)
return self.list
except:
return
def years(self):
year = (self.datetime.strftime('%Y'))
for i in range(int(year)-0, int(year)-50, -1): self.list.append({'name': str(i), 'url': self.year_link % str(i), 'image': 'movieYears.jpg', 'action': 'movies'})
self.addDirectory(self.list)
return self.list
def persons(self):
self.list = cache.get(self.imdb_person_list, 24, self.personlist_link)
for i in range(0, len(self.list)): self.list[i].update({'action': 'movies'})
self.addDirectory(self.list)
return self.list
def userlists(self):
try:
userlists = []
if trakt.getTraktCredentialsInfo() == False: raise Exception()
activity = trakt.getActivity()
except:
pass
#control.log('@@ TRAKT LIST %s - %s' %(userlists,activity))
try:
if trakt.getTraktCredentialsInfo() == False: raise Exception()
try:
if activity > cache.timeout(self.trakt_user_list, self.traktlists_link,
self.trakt_user): raise Exception()
userlists += cache.get(self.trakt_user_list, 720, self.traktlists_link, self.trakt_user)
except:
userlists += cache.get(self.trakt_user_list, 0, self.traktlists_link, self.trakt_user)
except:
pass
try:
self.list = []
if self.imdb_user == '': raise Exception()
userlists += cache.get(self.imdb_user_list, 0, self.imdblists_link)
except:
pass
try:
self.list = []
if trakt.getTraktCredentialsInfo() == False: raise Exception()
try:
if activity > cache.timeout(self.trakt_user_list, self.traktlikedlists_link,
self.trakt_user): raise Exception()
userlists += cache.get(self.trakt_user_list, 720, self.traktlikedlists_link, self.trakt_user)
except:
userlists += cache.get(self.trakt_user_list, 0, self.traktlikedlists_link, self.trakt_user)
except:
pass
self.list = userlists
for i in range(0, len(self.list)): self.list[i].update({'image': 'userlists.png', 'action': 'movies'})
#self.addDirectory(self.list, queue=True)
self.addDirectory(self.list)
return self.list
def trakt_list(self, url, user):
try:
q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query))
q.update({'extended': 'full,images'})
q = (urllib.urlencode(q)).replace('%2C', ',')
u = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q
result = trakt.getTrakt(u)
result = json.loads(result)
items = []
for i in result:
try: items.append(i['movie'])
except: pass
if len(items) == 0:
items = result
except:
return
try:
q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query))
p = str(int(q['page']) + 1)
if p == '5': raise Exception()
q.update({'page': p})
q = (urllib.urlencode(q)).replace('%2C', ',')
next = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q
next = next.encode('utf-8')
except:
next = ''
for item in items:
try:
title = item['title']
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
year = item['year']
year = re.sub('[^0-9]', '', str(year))
year = year.encode('utf-8')
if int(year) > int((self.datetime).strftime('%Y')): raise Exception()
imdb = item['ids']['imdb']
if imdb == None or imdb == '': raise Exception()
imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
imdb = imdb.encode('utf-8')
poster = 'http://films4u.org/poster/'+base64.b64encode(imdb)+'.png'
poster = poster.encode('utf-8')
banner = 'http://films4u.org/banner/'+base64.b64encode(imdb)+'.png'
banner = banner.encode('utf-8')
fanart = 'http://films4u.org/fanart/'+base64.b64encode(imdb)+'.png'
fanart = fanart.encode('utf-8')
try:
premiered = item['released']
premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(premiered)[0]
except: premiered = '0'
premiered = premiered.encode('utf-8')
try:
genre = item['genres']
genre = [i.title() for i in genre]
except: genre = '0'
if genre == []: genre = '0'
genre = ' / '.join(genre)
genre = genre.encode('utf-8')
try: duration = str(item['runtime'])
except: duration = '0'
if duration == None: duration = '0'
duration = duration.encode('utf-8')
try: rating = str(item['rating'])
except: rating = '0'
if rating == None or rating == '0.0': rating = '0'
rating = rating.encode('utf-8')
try: votes = str(item['votes'])
except: votes = '0'
try: votes = str(format(int(votes),',d'))
except: pass
if votes == None: votes = '0'
votes = votes.encode('utf-8')
try: mpaa = item['certification']
except: mpaa = '0'
if mpaa == None: mpaa = '0'
mpaa = mpaa.encode('utf-8')
plot = item['overview']
if plot == None: plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
try: tagline = item['tagline']
except: tagline = None
if tagline == None and not plot == '0': tagline = re.compile('[.!?][\s]{1,2}(?=[A-Z])').split(plot)[0]
elif tagline == None: tagline = '0'
tagline = client.replaceHTMLCodes(tagline)
try: tagline = tagline.encode('utf-8')
except: pass
self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': premiered, 'studio': '0', 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': '0', 'writer': '0', 'cast': '0', 'plot': plot, 'tagline': tagline, 'code': imdb, 'imdb': imdb, 'tmdb': '0', 'tvdb': '0', 'tvrage': '0', 'poster': poster, 'banner': banner, 'fanart': fanart, 'next': next})
except:
pass
return self.list
def trakt_user_list(self, url, user):
try:
result = trakt.getTrakt(url)
items = json.loads(result)
except:
pass
for item in items:
try:
try: name = item['list']['name']
except: name = item['name']
name = client.replaceHTMLCodes(name)
name = name.encode('utf-8')
try: url = (trakt.slug(item['list']['user']['username']), item['list']['ids']['slug'])
except: url = ('me', item['ids']['slug'])
url = self.traktlist_link % url
url = url.encode('utf-8')
self.list.append({'name': name, 'url': url, 'context': url})
except:
pass
self.list = sorted(self.list, key=lambda k: re.sub('(^the |^a )', '', k['name'].lower()))
return self.list
def imdb_list(self, url, idx=True):
try:
if url == self.imdbwatchlist_link:
def imdb_watchlist_id(url):
return re.compile('/export[?]list_id=(ls\d*)').findall(client.request(url))[0]
url = cache.get(imdb_watchlist_id, 8640, url)
url = self.imdblist_link % url
result = str(client.request(url,headers=self.en_headers))
try:
if idx == True: raise Exception()
pages = client.parseDOM(result, 'div', attrs = {'class': 'desc'})[0]
pages = re.compile('Page \d+? of (\d*)').findall(pages)[0]
for i in range(1, int(pages)):
u = url.replace('&start=1', '&start=%s' % str(i*100+1))
result += str(client.request(u, headers=self.en_headers))
except:
pass
result = result.replace('\n','')
result = result.decode('iso-8859-1').encode('utf-8')
items = client.parseDOM(result, 'div', attrs = {'class': 'lister-item mode-advanced'})
items += client.parseDOM(result, 'div', attrs = {'class': 'list_item.+?'})
except:
return
try:
next = client.parseDOM(result, 'a', ret='href', attrs = {'class': 'lister-page-next.+?'})
if len(next) == 0:
next = client.parseDOM(result, 'div', attrs = {'class': 'pagination'})[0]
next = zip(client.parseDOM(next, 'a', ret='href'), client.parseDOM(next, 'a'))
next = [i[0] for i in next if 'Next' in i[1]]
next = url.replace(urlparse.urlparse(url).query, urlparse.urlparse(next[0]).query)
next = client.replaceHTMLCodes(next)
next = next.encode('utf-8')
except:
next = ''
for item in items:
try:
try: title = client.parseDOM(item, 'a')[1]
except: pass
try: title = client.parseDOM(item, 'a', attrs = {'onclick': '.+?'})[-1]
except: pass
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
year = client.parseDOM(item, 'span', attrs = {'class': 'lister-item-year.+?'})
year += client.parseDOM(item, 'span', attrs = {'class': 'year_type'})
year = re.findall('(\d{4})', year[0])[0]
year = year.encode('utf-8')
if int(year) > int((self.datetime).strftime('%Y')): raise Exception()
imdb = client.parseDOM(item, 'a', ret='href')[0]
imdb = re.findall('(tt\d*)', imdb)[0]
imdb = imdb.encode('utf-8')
#control.log('[imdb_list] Title: %s ID:%s' %(title,imdb))
try: poster = client.parseDOM(item, 'img', ret='loadlate')[0]
except: poster = '0'
poster = re.sub('(?:_SX\d+?|)(?:_SY\d+?|)(?:_UX\d+?|)_CR\d+?,\d+?,\d+?,\d*','_SX500', poster)
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: genre = client.parseDOM(item, 'span', attrs = {'class': 'genre'})[0]
except: genre = '0'
genre = ' / '.join([i.strip() for i in genre.split(',')])
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = re.findall('(\d+?) min(?:s|)', item)[-1]
except: duration = '0'
duration = duration.encode('utf-8')
rating = '0'
try: rating = client.parseDOM(item, 'span', attrs = {'class': 'rating-rating'})[0]
except: pass
try: rating = client.parseDOM(rating, 'span', attrs = {'class': 'value'})[0]
except: rating = '0'
try: rating = client.parseDOM(item, 'div', ret='data-value', attrs = {'class': '.*?imdb-rating'})[0]
except: pass
if rating == '' or rating == '-': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item, 'div', ret='title', attrs = {'class': '.*?rating-list'})[0]
except: votes = '0'
try: votes = re.findall('\((.+?) vote(?:s|)\)', votes)[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item, 'span', attrs = {'class': 'certificate'})[0]
except: mpaa = '0'
if mpaa == '' or mpaa == 'NOT_RATED': mpaa = '0'
mpaa = mpaa.replace('_', '-')
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
try: director = re.findall('Director(?:s|):(.+?)(?:\||</div>)', item)[0]
except: director = '0'
director = client.parseDOM(director, 'a')
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
try: cast = re.findall('Stars(?:s|):(.+?)(?:\||</div>)', item)[0]
except: cast = '0'
cast = client.replaceHTMLCodes(cast)
cast = cast.encode('utf-8')
cast = client.parseDOM(cast, 'a')
if cast == []: cast = '0'
plot = '0'
try: plot = client.parseDOM(item, 'p', attrs = {'class': 'text-muted'})[0]
except: pass
try: plot = client.parseDOM(item, 'div', attrs = {'class': 'item_description'})[0]
except: pass
plot = plot.rsplit('<span>', 1)[0].strip()
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
fanart = 'http://films4u.org/fanart/'+base64.b64encode(imdb)+'.png'
fanart = fanart.encode('utf-8')
tagline = re.compile('[.!?][\s]{1,2}(?=[A-Z])').split(plot)[0]
try: tagline = tagline.encode('utf-8')
except: pass
self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': '0', 'studio': '0', 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': '0', 'cast': cast, 'plot': plot, 'tagline': tagline, 'code': imdb, 'imdb': imdb, 'tmdb': '0', 'tvdb': '0', 'tvrage': '0', 'poster': poster, 'banner': '0', 'fanart': fanart, 'next': next})
except:
pass
return self.list
def imdb_user_list(self, url):
print("Items", url)
try:
result = client.request(url, headers=self.en_headers)
result = result.decode('iso-8859-1').encode('utf-8')
items = client.parseDOM(result, 'div', attrs = {'class': 'list_name'})
#control.log("##################><><><><> trakt_list item %s" % item)
print("Items",items)
except:
pass
for item in items:
try:
name = client.parseDOM(item, 'a')[0]
name = client.replaceHTMLCodes(name)
name = name.encode('utf-8')
url = client.parseDOM(item, 'a', ret='href')[0]
url = url.split('/list/', 1)[-1].replace('/', '')
url = self.imdblist_link % url
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
self.list.append({'name': name, 'url': url, 'context': url})
except:
pass
self.list = sorted(self.list, key=lambda k: re.sub('(^the |^a )', '', k['name'].lower()))
return self.list
def imdb_person_list(self, url):
try:
result = client.request(url)
result = result.decode('iso-8859-1').encode('utf-8')
items = client.parseDOM(result, 'tr', attrs = {'class': '.+? detailed'})
except:
return
for item in items:
try:
name = client.parseDOM(item, 'a', ret='title')[0]
name = client.replaceHTMLCodes(name)
name = name.encode('utf-8')
url = client.parseDOM(item, 'a', ret='href')[0]
url = re.findall('(nm\d*)', url, re.I)[0]
url = self.person_link % url
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
image = client.parseDOM(item, 'img', ret='src')[0]
if not ('._SX' in image or '._SY' in image): raise Exception()
image = re.sub('_SX\d*|_SY\d*|_CR\d+?,\d+?,\d+?,\d*','_SX500', image)
image = client.replaceHTMLCodes(image)
image = image.encode('utf-8')
self.list.append({'name': name, 'url': url, 'image': image})
except:
pass
return self.list
def scn_list(self, url):
def predb_items():
try:
years = [(self.datetime).strftime('%Y'), (self.datetime - datetime.timedelta(days = 365)).strftime('%Y')]
months = (self.datetime - datetime.timedelta(days = 180)).strftime('%Y%m%d')
result = ''
for i in years:
result += client.request(self.scn_page % (str(i), '1'))
result += client.request(self.scn_page % (str(i), '2'))
items = client.parseDOM(result, 'div', attrs = {'class': 'post'})
items = [(client.parseDOM(i, 'a', attrs = {'class': 'p-title'}), re.compile('(\d{4}-\d{2}-\d{2})').findall(i)) for i in items]
items = [(i[0][0], i[1][0]) for i in items if len(i[0]) > 0 and len(i[1]) > 0]
items = [(re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|3D)(\.|\)|\]|\s)(.+)', '', i[0]), re.compile('[\.|\(|\[|\s](\d{4})[\.|\)|\]|\s]').findall(i[0]), re.sub('[^0-9]', '', i[1])) for i in items]
items = [(i[0], i[1][-1], i[2]) for i in items if len(i[1]) > 0]
items = [i for i in items if int(months) <= int(i[2])]
items = sorted(items,key=lambda x: x[2])[::-1]
items = [(re.sub('(\.|\(|\[|LIMITED|UNCUT)', ' ', i[0]).strip(), i[1]) for i in items]
items = [x for y,x in enumerate(items) if x not in items[:y]]
items = items[:150]
print items
return items
except:
return
def predb_list(i):
try:
url = self.imdb_by_query % (urllib.quote_plus(i[0]), i[1])
item = client.request(url, headers=self.en_headers ,timeout='10')
item = json.loads(item)
title = item['Title']
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
year = item['Year']
year = re.sub('[^0-9]', '', str(year))
year = year.encode('utf-8')
name = '%s (%s)' % (title, year)
try: name = name.encode('utf-8')
except: pass
imdb = item['imdbID']
if imdb == None or imdb == '' or imdb == 'N/A': raise Exception()
imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
imdb = imdb.encode('utf-8')
#poster = 'http://films4u.org/poster/'+base64.b64encode(imdb)+'.png'
#poster = poster.encode('utf-8')
poster = item['Poster']
if poster == None or poster == '' or poster == 'N/A': poster = '0'
if not ('_SX' in poster or '_SY' in poster): poster = '0'
poster = re.sub('_SX\d*|_SY\d*|_CR\d+?,\d+?,\d+?,\d*','_SX500', poster)
poster = poster.encode('utf-8')
fanart = 'http://films4u.org/fanart/'+base64.b64encode(imdb)+'.png'
fanart = fanart.encode('utf-8')
genre = item['Genre']
if genre == None or genre == '' or genre == 'N/A': genre = '0'
genre = genre.replace(', ', ' / ')
genre = genre.encode('utf-8')
duration = item['Runtime']
if duration == None or duration == '' or duration == 'N/A': duration = '0'
duration = re.sub('[^0-9]', '', str(duration))
duration = duration.encode('utf-8')
rating = item['imdbRating']
if rating == None or rating == '' or rating == 'N/A' or rating == '0.0': rating = '0'
rating = rating.encode('utf-8')
votes = item['imdbVotes']
try: votes = str(format(int(votes),',d'))
except: pass
if votes == None or votes == '' or votes == 'N/A': votes = '0'
votes = votes.encode('utf-8')
mpaa = item['Rated']
if mpaa == None or mpaa == '' or mpaa == 'N/A': mpaa = '0'
mpaa = mpaa.encode('utf-8')
director = item['Director']
if director == None or director == '' or director == 'N/A': director = '0'
director = director.replace(', ', ' / ')
director = re.sub(r'\(.*?\)', '', director)
director = ' '.join(director.split())
director = director.encode('utf-8')
writer = item['Writer']
if writer == None or writer == '' or writer == 'N/A': writer = '0'
writer = writer.replace(', ', ' / ')
writer = re.sub(r'\(.*?\)', '', writer)
writer = ' '.join(writer.split())
writer = writer.encode('utf-8')
cast = item['Actors']
if cast == None or cast == '' or cast == 'N/A': cast = '0'
cast = [x.strip() for x in cast.split(',') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
if cast == []: cast = '0'
plot = item['Plot']
if plot == None or plot == '' or plot == 'N/A': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
tagline = re.compile('[.!?][\s]{1,2}(?=[A-Z])').split(plot)[0]
try: tagline = tagline.encode('utf-8')
except: pass
self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': '0', 'studio': '0', 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': plot, 'tagline': tagline, 'name': name, 'code': imdb, 'imdb': imdb, 'tmdb': '0', 'tvdb': '0', 'tvrage': '0', 'poster': poster, 'banner': '0', 'fanart': fanart})
except:
pass
try:
items = cache.get(predb_items, 24)
start = re.compile('start=(\d*)').findall(url)[-1]
start = int(start)
if len(items) > (start + 30): next = self.scn_link + '?start=%s' % (start + 30)
else: next = ''
except:
return
threads = []
for i in range(start - 1, start + 29):
try: threads.append(workers.Thread(predb_list, items[i]))
except: pass
[i.start() for i in threads]
[i.join() for i in threads]
for i in range(0, len(self.list)): self.list[i].update({'next': next})
return self.list
def worker(self):
self.meta = []
total = len(self.list)
#control.log("##################><><><><> WORKER TOTAL %s" % total)
for i in range(0, total): self.list[i].update({'metacache': False})
self.list = metacache.fetch(self.list, self.info_lang)
for r in range(0, total, 20):
threads = []
for i in range(r, r+20):
if i <= total: threads.append(workers.Thread(self.super_info, i))
[i.start() for i in threads]
[i.join() for i in threads]
if len(self.meta) > 0: metacache.insert(self.meta)
self.list = [i for i in self.list if not i['imdb'] == '0']
def super_info(self, i):
try:
#control.log('[super_info] ID:%s' % (str(i)))
zero ='0'.encode('utf-8')
if self.list[i]['metacache'] == True: raise Exception()
try: imdb = self.list[i]['imdb']
except: imdb = '0'
if not imdb == '0': url = self.imdb_info_link % imdb
else: raise Exception()
item = client.request(url, timeout='10')
item = json.loads(item)
title = item['Title']
title = title.encode('utf-8')
if not title == '0':
self.list[i].update({'title': title})
self.list[i].update({'originaltitle': title})
originaltitle = title
year = item['Year']
year = year.encode('utf-8')
if not year == '0': self.list[i].update({'year': year})
imdb = item['imdbID']
if imdb == None or imdb == '' or imdb == 'N/A': imdb = '0'
imdb = imdb.encode('utf-8')
if not imdb == '0': self.list[i].update({'imdb': imdb, 'code': imdb})
#control.log('[super_info] Title: %s ID:%s' % (title, imdb))
try:
poster = item['Poster']
if poster == '' or poster == None: poster = '0'
#if not poster == '0': poster = '%s%s' % (self.tmdb_poster, poster)
poster = poster.encode('utf-8')
if not poster == '0': self.list[i].update({'poster': poster})
except:
poster = zero
try:
if not imdb == '0':
fanart = 'http://films4u.org/fanart/'+base64.b64encode(imdb)+'.png'
fanart= fanart.encode('utf-8')
else:
fanart = zero
except:
fanart = zero
# http://fanart.filmkodi.com/tt0006333.jpg
try:
premiered = item['Released']
premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(premiered)[0]
except: premiered = '0'
if premiered == '' or premiered == None: premiered = '0'
premiered = premiered.encode('utf-8')
if not premiered == '0': self.list[i].update({'premiered': premiered})
#studio = item['production_companies']
#try: studio = [x['name'] for x in studio][0]
#except:
studio = '0'
#if studio == '' or studio == None: studio = '0'
studio = studio.encode('utf-8')
#if not studio == '0': self.list[i].update({'studio': studio})
try: genre = item['Genre']
except: genre = '0'
if genre == '' or genre == None or genre == []: genre = '0'
genre = genre.encode('utf-8')
if not genre == '0': self.list[i].update({'genre': genre})
try: duration = str(item['Runtime'].replace(' min',''))
except: duration = '0'
if duration == '' or duration == None: duration = '0'
duration = duration.encode('utf-8')
if not duration == '0': self.list[i].update({'duration': duration})
try: rating = str(item['imdbRating'])
except: rating = '0'
if rating == '' or rating == None: rating = '0'
rating = rating.encode('utf-8')
if not rating == '0': self.list[i].update({'rating': rating})
try:
votes = str(item['imdbVotes'])
votes = str(format(int(votes),',d'))
except:
votes = '0'
if votes == '' or votes == None: votes = '0'
votes = votes.encode('utf-8')
if not votes == '0': self.list[i].update({'votes': votes})
try:
mpaa = item['Country']
except:
mpaa = '0'
if mpaa == '' or mpaa == None: mpaa = '0'
mpaa = mpaa.encode('utf-8')
if not mpaa == '0': self.list[i].update({'mpaa': mpaa})
try: cast = item['Actors']
except: cast = '0'
if cast == None or cast == '' or cast == 'N/A': cast = '0'
cast = [x.strip() for x in cast.split(',') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
if cast == []: cast = '0'
if not cast == '0': self.list[i].update({'cast': cast})
try: writer = item['Writer']
except: writer = '0'
if writer == '' or writer == None: writer= '0'
writer = writer.encode('utf-8').replace(', ', ' / ')
if len(writer) > 0: self.list[i].update({'writer': writer})
plot = item['Plot']
if plot == '' or plot == None: plot = '0'
plot = plot.encode('utf-8')
if not plot == '0': self.list[i].update({'plot': plot})
director = item['Director']
if director == '' or director == None or director == []: director = '0'
director = director.encode('utf-8')
if not director == '0': self.list[i].update({'director': director})
if not self.info_lang == 'en':
url = self.trakt_lang_link % (imdb, self.info_lang)
try:
item = trakt.getTrakt(url)
item = json.loads(item)[0]
t = item['title']
if not (t == None or t == ''): title = t
try: title = title.encode('utf-8')
except: pass
if not title == '0': self.list[i].update({'title': title})
t = item['overview']
if not (t == None or t == ''): plot = t
try: plot = plot.encode('utf-8')
except: pass
if not plot == '0': self.list[i].update({'plot': plot})
except:
pass
#self.meta.append({'imdb': imdb, 'tmdb': '0', 'tvdb': '0', 'lang': self.info_lang, 'item': {'code': imdb, 'imdb': imdb, 'tmdb': '0', 'poster': poster, 'fanart': fanart, 'premiered': premiered, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': plot, 'tagline': zero}})
self.meta.append({'imdb': imdb, 'tmdb': '0', 'tvdb': '0', 'lang': self.info_lang, 'item': {'title': title, 'originaltitle': originaltitle, 'year': year, 'code': imdb, 'imdb': imdb, 'tmdb': '0', 'poster': poster, 'banner': zero, 'fanart': fanart, 'premiered': premiered, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': writer, 'cast': cast, 'plot': plot}})
#control.log("><><><><> ITEM META IMDB %s" % imdb)
except Exception as e:
control.log('$Super error: %s' % e)
pass
def movieDirectory(self, items):
if items == None or len(items) == 0: return
isFolder = True if control.setting('autoplay') == 'false' and control.setting('host_select') == '1' else False
isFolder = False if control.window.getProperty('PseudoTVRunning') == 'True' else isFolder
playbackMenu = control.lang(30204).encode('utf-8') if control.setting('autoplay') == 'true' else control.lang(30203).encode('utf-8')
traktMode = False if trakt.getTraktCredentials() == False else True
cacheToDisc = False if not action == 'movieSearch' else True
addonPoster, addonBanner = control.addonPoster(), control.addonBanner()
addonFanart, settingFanart = control.addonFanart(), control.setting('fanart')
sysaddon = sys.argv[0]
indicators = playcount.getMovieIndicators(refresh=True) if action == 'movies' else playcount.getMovieIndicators()
watchedMenu = control.lang(30206).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(30206).encode('utf-8')
unwatchedMenu = control.lang(30207).encode('utf-8') if trakt.getTraktIndicatorsInfo() == True else control.lang(30207).encode('utf-8')
try:
favitems = favourites.getFavourites('movies')
favitems = [i[0] for i in favitems]
except:
pass
for i in items:
try:
label = '%s (%s)' % (i['title'], i['year'])
imdb, title, year = i['imdb'], i['originaltitle'], i['year']
#label = i['name']
sysname = urllib.quote_plus('%s (%s)' % (title, year))
systitle = urllib.quote_plus(title)
tmdb = i['tmdb']
poster, banner, fanart = i['poster'], i['banner'], i['fanart']
if poster == '0': poster = addonPoster
if banner == '0' and poster == '0': banner = addonBanner
elif banner == '0': banner = poster
meta = dict((k,v) for k, v in i.iteritems() if not v == '0')
meta.update({'trailer': '%s?action=trailer&name=%s' % (sysaddon, sysname)})
if i['duration'] == '0': meta.update({'duration': '120'})
try: meta.update({'duration': str(int(meta['duration']) * 60)})
except: pass
try: meta.update({'genre': cleangenre.lang(meta['genre'], self.info_lang)})
except: pass
sysmeta = urllib.quote_plus(json.dumps(meta))
url = '%s?action=play&name=%s&title=%s&year=%s&imdb=%s&tmdb=%s&meta=%s&t=%s' % (sysaddon, sysname, systitle, year, imdb, tmdb, sysmeta, self.systime)
sysurl = urllib.quote_plus(url)
if isFolder == True:
url = '%s?action=sources&name=%s&title=%s&year=%s&imdb=%s&tmdb=%s&meta=%s' % (sysaddon, sysname, systitle, year, imdb, tmdb, sysmeta)
cm = []
cm.append((playbackMenu, 'RunPlugin(%s?action=alterSources&url=%s&meta=%s)' % (sysaddon, sysurl, sysmeta)))
cm.append((control.lang(30205).encode('utf-8'), 'Action(Info)'))
#if not action == 'movieSearch':
# cm.append((control.lang(30206).encode('utf-8'), 'RunPlugin(%s?action=moviePlaycount&title=%s&year=%s&imdb=%s&query=7)' % (sysaddon, systitle, year, imdb)))
# cm.append((control.lang(30207).encode('utf-8'), 'RunPlugin(%s?action=moviePlaycount&title=%s&year=%s&imdb=%s&query=6)' % (sysaddon, systitle, year, imdb)))
try:
overlay = int(playcount.getMovieOverlay(indicators, imdb))
#control.log('# %s' % overlay)
if overlay == 7:
cm.append((unwatchedMenu, 'RunPlugin(%s?action=moviePlaycount&imdb=%s&query=6)' % (sysaddon, imdb)))
meta.update({'playcount': 1, 'overlay': 7})
else:
cm.append((watchedMenu, 'RunPlugin(%s?action=moviePlaycount&imdb=%s&query=7)' % (sysaddon, imdb)))
meta.update({'playcount': 0, 'overlay': 6})
except Exception as e:
control.log('#Overlay e %s' % e)
pass
if traktMode == True:
cm.append((control.lang(30208).encode('utf-8'), 'RunPlugin(%s?action=traktManager&name=%s&imdb=%s&content=movie)' % (sysaddon, sysname, imdb)))
if action == 'movieFavourites':
cm.append((control.lang(30210).encode('utf-8'), 'RunPlugin(%s?action=deleteFavourite&meta=%s&content=movies)' % (sysaddon, sysmeta)))
elif action == 'movieSearch':
cm.append((control.lang(30209).encode('utf-8'), 'RunPlugin(%s?action=addFavourite&meta=%s&query=0&content=movies)' % (sysaddon, sysmeta)))
else:
if not imdb in favitems: cm.append((control.lang(30209).encode('utf-8'), 'RunPlugin(%s?action=addFavourite&meta=%s&content=movies)' % (sysaddon, sysmeta)))
else: cm.append((control.lang(30210).encode('utf-8'), 'RunPlugin(%s?action=deleteFavourite&meta=%s&content=movies)' % (sysaddon, sysmeta)))
cm.append((control.lang(30211).encode('utf-8'), 'RunPlugin(%s?action=movieToLibrary&name=%s&title=%s&year=%s&imdb=%s&tmdb=%s)' % (sysaddon, sysname, systitle, year, imdb, tmdb)))
cm.append((control.lang(30212).encode('utf-8'), 'RunPlugin(%s?action=addView&content=movies)' % sysaddon))
#Trailer
cm.append((control.lang(33003).encode('utf-8'),'RunPlugin(%s?action=trailer&name=%s)' % (sysaddon, sysname)))
item = control.item(label=label, iconImage=poster, thumbnailImage=poster)
try: item.setArt({'poster': poster, 'banner': banner})
except: pass
if settingFanart == 'true' and not fanart == '0':
item.setProperty('Fanart_Image', fanart)
elif not addonFanart == None:
item.setProperty('Fanart_Image', addonFanart)
item.setInfo(type='Video', infoLabels = meta)
item.setProperty('Video', 'true')
#item.setProperty('IsPlayable', 'true')
item.addContextMenuItems(cm, replaceItems=True)
control.addItem(handle=int(sys.argv[1]), url=url, listitem=item, isFolder=isFolder)
except:
pass
try:
url = items[0]['next']
if url == '': raise Exception()
url = '%s?action=movies&url=%s' % (sysaddon, urllib.quote_plus(url))
addonNext = control.addonNext()
item = control.item(label=control.lang(30213).encode('utf-8'), iconImage=addonNext, thumbnailImage=addonNext)
item.addContextMenuItems([], replaceItems=False)
if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart)
control.addItem(handle=int(sys.argv[1]), url=url, listitem=item, isFolder=True)
except:
pass
control.content(int(sys.argv[1]), 'movies')
control.directory(int(sys.argv[1]), cacheToDisc=cacheToDisc)
views.setView('movies', {'skin.confluence': 500})
def addDirectory(self, items):
if items == None or len(items) == 0: return
sysaddon = sys.argv[0]
addonFanart = control.addonFanart()
addonThumb = control.addonThumb()
artPath = control.artPath()
for i in items:
try:
try: name = control.lang(i['name']).encode('utf-8')
except: name = i['name']
if i['image'].startswith('http://'): thumb = i['image']
elif not artPath == None: thumb = os.path.join(artPath, i['image'])
else: thumb = addonThumb
url = '%s?action=%s' % (sysaddon, i['action'])
try: url += '&url=%s' % urllib.quote_plus(i['url'])
except: pass
cm = []
try: cm.append((control.lang(30211).encode('utf-8'), 'RunPlugin(%s?action=moviesToLibrary&url=%s)' % (sysaddon, urllib.quote_plus(i['context']))))
except: pass
item = control.item(label=name, iconImage=thumb, thumbnailImage=thumb)
item.addContextMenuItems(cm, replaceItems=False)
if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart)
control.addItem(handle=int(sys.argv[1]), url=url, listitem=item, isFolder=True)
except:
pass
control.directory(int(sys.argv[1]), cacheToDisc=True)
| |
from pandac.PandaModules import *
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from direct.fsm import StateData
from toontown.toon import ToonAvatarPanel
from toontown.friends import ToontownFriendSecret
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPGlobals
FLPPets = 1
FLPOnline = 2
FLPAll = 3
FLPOnlinePlayers = 4
FLPPlayers = 5
FLPEnemies = 6
globalFriendsList = None
def determineFriendName(friendTuple):
friendName = None
if len(friendTuple) == 2:
avId, flags = friendTuple
playerId = None
showType = 0
elif len(friendTuple) == 3:
avId, flags, playerId = friendTuple
showType = 0
elif len(friendTuple) == 4:
avId, flags, playerId, showType = friendTuple
if showType == 1 and playerId:
playerInfo = base.cr.playerFriendsManager.playerId2Info.get(playerId)
friendName = playerInfo.playerName
else:
hasManager = hasattr(base.cr, 'playerFriendsManager')
handle = base.cr.identifyFriend(avId)
if not handle and hasManager:
handle = base.cr.playerFriendsManager.getAvHandleFromId(avId)
if handle:
friendName = handle.getName()
return friendName
def compareFriends(f1, f2):
name1 = determineFriendName(f1)
name2 = determineFriendName(f2)
if name1 > name2:
return 1
elif name1 == name2:
return 0
else:
return -1
def showFriendsList():
global globalFriendsList
if globalFriendsList == None:
globalFriendsList = FriendsListPanel()
globalFriendsList.enter()
return
def hideFriendsList():
if globalFriendsList != None:
globalFriendsList.exit()
return
def showFriendsListTutorial():
global globalFriendsList
if globalFriendsList == None:
globalFriendsList = FriendsListPanel()
globalFriendsList.enter()
if not base.cr.isPaid():
globalFriendsList.secrets['state'] = DGG.DISABLED
globalFriendsList.closeCommand = globalFriendsList.close['command']
globalFriendsList.close['command'] = None
return
def hideFriendsListTutorial():
if globalFriendsList != None:
if hasattr(globalFriendsList, 'closeCommand'):
globalFriendsList.close['command'] = globalFriendsList.closeCommand
if not base.cr.isPaid():
globalFriendsList.secrets['state'] = DGG.NORMAL
globalFriendsList.exit()
return
def isFriendsListShown():
if globalFriendsList != None:
return globalFriendsList.isEntered
return 0
def unloadFriendsList():
global globalFriendsList
if globalFriendsList != None:
globalFriendsList.unload()
globalFriendsList = None
return
class FriendsListPanel(DirectFrame, StateData.StateData):
def __init__(self):
self.leftmostPanel = FLPPets
self.rightmostPanel = FLPPlayers
if base.cr.productName in ['DisneyOnline-UK',
'DisneyOnline-AP',
'JP',
'FR',
'BR']:
self.rightmostPanel = FLPAll
DirectFrame.__init__(self, relief=None)
self.listScrollIndex = [0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0]
self.initialiseoptions(FriendsListPanel)
StateData.StateData.__init__(self, 'friends-list-done')
self.friends = {}
self.textRolloverColor = Vec4(1, 1, 0, 1)
self.textDownColor = Vec4(0.5, 0.9, 1, 1)
self.textDisabledColor = Vec4(0.4, 0.8, 0.4, 1)
self.panelType = FLPOnline
return
def load(self):
if self.isLoaded == 1:
return None
self.isLoaded = 1
gui = loader.loadModel('phase_3.5/models/gui/friendslist_gui')
auxGui = loader.loadModel('phase_3.5/models/gui/avatar_panel_gui')
self.title = DirectLabel(parent=self, relief=None, text='', text_scale=TTLocalizer.FLPtitle, text_fg=(0, 0.1, 0.4, 1), pos=(0.007, 0.0, 0.2))
background_image = gui.find('**/FriendsBox_Open')
self['image'] = background_image
self.reparentTo(base.a2dTopRight)
self.setPos(-0.233, 0, -0.46)
self.scrollList = DirectScrolledList(parent=self, relief=None, incButton_image=(gui.find('**/FndsLst_ScrollUp'),
gui.find('**/FndsLst_ScrollDN'),
gui.find('**/FndsLst_ScrollUp_Rllvr'),
gui.find('**/FndsLst_ScrollUp')), incButton_relief=None, incButton_pos=(0.0, 0.0, -0.316), incButton_image3_color=Vec4(0.6, 0.6, 0.6, 0.6), incButton_scale=(1.0, 1.0, -1.0), decButton_image=(gui.find('**/FndsLst_ScrollUp'),
gui.find('**/FndsLst_ScrollDN'),
gui.find('**/FndsLst_ScrollUp_Rllvr'),
gui.find('**/FndsLst_ScrollUp')), decButton_relief=None, decButton_pos=(0.0, 0.0, 0.117), decButton_image3_color=Vec4(0.6, 0.6, 0.6, 0.6), itemFrame_pos=(-0.17, 0.0, 0.06), itemFrame_relief=None, numItemsVisible=8, items=[])
clipper = PlaneNode('clipper')
clipper.setPlane(Plane(Vec3(-1, 0, 0), Point3(0.2, 0, 0)))
clipNP = self.scrollList.attachNewNode(clipper)
self.scrollList.setClipPlane(clipNP)
self.close = DirectButton(parent=self, relief=None, image=(auxGui.find('**/CloseBtn_UP'), auxGui.find('**/CloseBtn_DN'), auxGui.find('**/CloseBtn_Rllvr')), pos=(0.01, 0, -0.38), command=self.__close)
self.left = DirectButton(parent=self, relief=None, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(0.6, 0.6, 0.6, 0.6), pos=(-0.15, 0.0, -0.38), scale=(-1.0, 1.0, 1.0), command=self.__left)
self.right = DirectButton(parent=self, relief=None, image=(gui.find('**/Horiz_Arrow_UP'),
gui.find('**/Horiz_Arrow_DN'),
gui.find('**/Horiz_Arrow_Rllvr'),
gui.find('**/Horiz_Arrow_UP')), image3_color=Vec4(0.6, 0.6, 0.6, 0.6), pos=(0.17, 0, -0.38), command=self.__right)
self.newFriend = DirectButton(parent=self, relief=None, pos=(-0.14, 0.0, 0.14), image=(auxGui.find('**/Frnds_Btn_UP'), auxGui.find('**/Frnds_Btn_DN'), auxGui.find('**/Frnds_Btn_RLVR')), text=('', TTLocalizer.FriendsListPanelNewFriend, TTLocalizer.FriendsListPanelNewFriend), text_scale=TTLocalizer.FLPnewFriend, text_fg=(0, 0, 0, 1), text_bg=(1, 1, 1, 1), text_pos=(0.1, -0.085), textMayChange=0, command=self.__newFriend)
self.secrets = DirectButton(parent=self, relief=None, pos=TTLocalizer.FLPsecretsPos, image=(auxGui.find('**/ChtBx_ChtBtn_UP'), auxGui.find('**/ChtBx_ChtBtn_DN'), auxGui.find('**/ChtBx_ChtBtn_RLVR')), text=('',
TTLocalizer.FriendsListPanelSecrets,
TTLocalizer.FriendsListPanelSecrets,
''), text_scale=TTLocalizer.FLPsecrets, text_fg=(0, 0, 0, 1), text_bg=(1, 1, 1, 1), text_pos=(-0.04, -0.085), textMayChange=0, command=self.__secrets)
gui.removeNode()
auxGui.removeNode()
return
def unload(self):
if self.isLoaded == 0:
return None
self.isLoaded = 0
self.exit()
del self.title
del self.scrollList
del self.close
del self.left
del self.right
del self.friends
DirectFrame.destroy(self)
return None
def makeFriendButton(self, friendTuple, colorChoice = None, bold = 0):
playerName = None
toonName = None
if len(friendTuple) == 2:
avId, flags = friendTuple
playerId = None
showType = 0
elif len(friendTuple) == 3:
avId, flags, playerId = friendTuple
showType = 0
elif len(friendTuple) == 4:
avId, flags, playerId, showType = friendTuple
command = self.__choseFriend
playerName = None
if playerId:
playerInfo = base.cr.playerFriendsManager.playerId2Info.get(playerId, None)
if playerInfo:
playerName = playerInfo.playerName
toonName = None
hasManager = hasattr(base.cr, 'playerFriendsManager')
handle = base.cr.identifyFriend(avId)
if not handle and hasManager:
handle = base.cr.playerFriendsManager.getAvHandleFromId(avId)
if handle:
toonName = handle.getName()
if showType == 1 and playerId:
if not playerName:
return
print 'ABORTING!!!'
friendName = playerName
rolloverName = toonName
else:
if not toonName:
base.cr.fillUpFriendsMap()
return
friendName = toonName
if playerName:
rolloverName = playerName
else:
rolloverName = 'Unknown'
if playerId:
command = self.__chosePlayerFriend
thing = playerId
else:
thing = avId
fg = ToontownGlobals.ColorNoChat
if flags & ToontownGlobals.FriendChat:
fg = ToontownGlobals.ColorAvatar
if playerId:
fg = ToontownGlobals.ColorPlayer
if colorChoice:
fg = colorChoice
fontChoice = ToontownGlobals.getToonFont()
fontScale = 0.04
bg = None
if colorChoice and bold:
fontScale = 0.04
colorS = 0.7
bg = (colorChoice[0] * colorS,
colorChoice[1] * colorS,
colorChoice[2] * colorS,
colorChoice[3])
db = DirectButton(relief=None, text=friendName, text_scale=fontScale, text_align=TextNode.ALeft, text_fg=fg, text_shadow=bg, text1_bg=self.textDownColor, text2_bg=self.textRolloverColor, text3_fg=self.textDisabledColor, text_font=fontChoice, textMayChange=0, command=command, extraArgs=[thing, showType])
if playerId:
accountName = DirectLabel(parent=db, pos=Vec3(-0.02, 0, 0), text=rolloverName, text_fg=(0, 0, 0, 1), text_bg=(1, 1, 1, 1), text_pos=(0, 0), text_scale=0.045, text_align=TextNode.ARight)
accountName.reparentTo(db.stateNodePath[2])
return db
def enter(self):
if self.isEntered == 1:
return None
self.isEntered = 1
if self.isLoaded == 0:
self.load()
base.localAvatar.obscureFriendsListButton(1)
if ToonAvatarPanel.ToonAvatarPanel.currentAvatarPanel:
ToonAvatarPanel.ToonAvatarPanel.currentAvatarPanel.cleanup()
ToonAvatarPanel.ToonAvatarPanel.currentAvatarPanel = None
self.__updateScrollList()
self.__updateTitle()
self.__updateArrows()
self.show()
self.accept('friendOnline', self.__friendOnline)
self.accept('friendPlayers', self.__friendPlayers)
self.accept('friendOffline', self.__friendOffline)
self.accept('friendsListChanged', self.__friendsListChanged)
self.accept('ignoreListChanged', self.__ignoreListChanged)
self.accept('friendsMapComplete', self.__friendsListChanged)
self.accept(OTPGlobals.PlayerFriendAddEvent, self.__friendsListChanged)
self.accept(OTPGlobals.PlayerFriendUpdateEvent, self.__friendsListChanged)
return
def exit(self):
if self.isEntered == 0:
return None
self.isEntered = 0
self.listScrollIndex[self.panelType] = self.scrollList.index
self.hide()
base.cr.cleanPetsFromFriendsMap()
self.ignore('friendOnline')
self.ignore('friendOffline')
self.ignore('friendsListChanged')
self.ignore('ignoreListChanged')
self.ignore('friendsMapComplete')
self.ignore(OTPGlobals.PlayerFriendAddEvent)
self.ignore(OTPGlobals.PlayerFriendUpdateEvent)
base.localAvatar.obscureFriendsListButton(-1)
messenger.send(self.doneEvent)
return None
def __close(self):
messenger.send('wakeup')
self.exit()
def __left(self):
messenger.send('wakeup')
self.listScrollIndex[self.panelType] = self.scrollList.index
if self.panelType > self.leftmostPanel:
self.panelType -= 1
self.__updateScrollList()
self.__updateTitle()
self.__updateArrows()
def __right(self):
messenger.send('wakeup')
self.listScrollIndex[self.panelType] = self.scrollList.index
if self.panelType < self.rightmostPanel:
self.panelType += 1
self.__updateScrollList()
self.__updateTitle()
self.__updateArrows()
def __secrets(self):
messenger.send('wakeup')
ToontownFriendSecret.showFriendSecret(ToontownFriendSecret.AvatarSecret)
def __newFriend(self):
messenger.send('wakeup')
messenger.send('friendAvatar', [None, None, None])
return
def __choseFriend(self, friendId, showType = 0):
messenger.send('wakeup')
hasManager = hasattr(base.cr, 'playerFriendsManager')
handle = base.cr.identifyFriend(friendId)
if not handle and hasManager:
handle = base.cr.playerFriendsManager.getAvHandleFromId(friendId)
if handle != None:
self.notify.info("Clicked on name in friend's list. doId = %s" % handle.doId)
messenger.send('clickedNametag', [handle])
return
def __chosePlayerFriend(self, friendId, showType = 1):
messenger.send('wakeup')
hasManager = hasattr(base.cr, 'playerFriendsManager')
handle = None
playerFriendInfo = base.cr.playerFriendsManager.playerId2Info.get(friendId)
handle = base.cr.identifyFriend(playerFriendInfo.avatarId)
if not handle and hasManager:
handle = base.cr.playerFriendsManager.getAvHandleFromId(playerFriendInfo.avatarId)
if playerFriendInfo != None:
self.notify.info("Clicked on name in player friend's list. Id = %s" % friendId)
messenger.send('clickedNametagPlayer', [handle, friendId, showType])
return
def __updateScrollList(self):
newFriends = []
petFriends = []
freeChatOneRef = []
speedChatOneRef = []
freeChatDouble = []
speedChatDouble = []
offlineFriends = []
if self.panelType == FLPPlayers:
playerFriendList = base.cr.playerFriendsManager.playerFriendsList
for playerFriendId in playerFriendList:
if base.cr.playerFriendsManager.playerId2Info.has_key(playerFriendId):
playerFriendInfo = base.cr.playerFriendsManager.playerId2Info.get(playerFriendId)
if playerFriendInfo.onlineYesNo:
if playerFriendInfo.understandableYesNo:
if playerFriendInfo.avatarId:
freeChatDouble.insert(0, (playerFriendInfo.avatarId,
0,
playerFriendId,
1))
else:
freeChatOneRef.insert(0, (0,
0,
playerFriendId,
1))
elif playerFriendInfo.avatarId:
speedChatDouble.insert(0, (playerFriendInfo.avatarId,
0,
playerFriendId,
1))
else:
speedChatOneRef.insert(0, (0,
0,
playerFriendId,
1))
elif playerFriendInfo.understandableYesNo:
freeChatOneRef.insert(0, (0,
0,
playerFriendId,
1))
else:
speedChatOneRef.insert(0, (0,
0,
playerFriendId,
1))
if self.panelType == FLPOnlinePlayers:
playerFriendList = base.cr.playerFriendsManager.playerFriendsList
for playerFriendId in playerFriendList:
if base.cr.playerFriendsManager.playerId2Info.has_key(playerFriendId):
playerFriendInfo = base.cr.playerFriendsManager.playerId2Info.get(playerFriendId)
if playerFriendInfo.onlineYesNo:
if playerFriendInfo.understandableYesNo:
if playerFriendInfo.avatarId:
freeChatDouble.insert(0, (playerFriendInfo.avatarId,
0,
playerFriendId,
1))
else:
freeChatOneRef.insert(0, (0,
0,
playerFriendId,
1))
elif playerFriendInfo.avatarId:
speedChatDouble.insert(0, (playerFriendInfo.avatarId,
0,
playerFriendId,
1))
else:
speedChatOneRef.insert(0, (0,
0,
playerFriendId,
1))
if self.panelType == FLPAll:
if base.friendMode == 0:
for friendPair in base.localAvatar.friendsList:
playerId = 0
if hasattr(base.cr, 'playerFriendsManager'):
playerId = base.cr.playerFriendsManager.findPlayerIdFromAvId(friendPair[0])
if playerId:
if friendPair[1] & ToontownGlobals.FriendChat:
freeChatDouble.insert(0, (friendPair[0],
friendPair[1],
playerId,
0))
else:
speedChatDouble.insert(0, (friendPair[0],
friendPair[1],
playerId,
0))
elif base.cr.isFriendOnline(friendPair[0]):
if friendPair[1] & ToontownGlobals.FriendChat:
freeChatOneRef.insert(0, (friendPair[0],
friendPair[1],
0,
0))
else:
speedChatOneRef.insert(0, (friendPair[0],
friendPair[1],
0,
0))
elif friendPair[1] & ToontownGlobals.FriendChat:
freeChatOneRef.insert(0, (friendPair[0],
friendPair[1],
0,
0))
else:
speedChatOneRef.insert(0, (friendPair[0],
friendPair[1],
0,
0))
else:
offlineFriends.append((friendPair[0],
friendPair[1],
playerId,
0))
if hasattr(base.cr, 'playerFriendsManager'):
for avatarId in base.cr.playerFriendsManager.getAllOnlinePlayerAvatars():
playerId = base.cr.playerFriendsManager.findPlayerIdFromAvId(avatarId)
playerFriendInfo = base.cr.playerFriendsManager.playerId2Info.get(playerId)
if not base.cr.playerFriendsManager.askAvatarKnownElseWhere(avatarId):
if playerFriendInfo.understandableYesNo:
freeChatDouble.insert(0, (avatarId,
0,
playerId,
0))
else:
speedChatDouble.insert(0, (avatarId,
0,
playerId,
0))
elif base.friendMode == 1:
for friendId in base.cr.avatarFriendsManager.avatarFriendsList:
playerId = base.cr.playerFriendsManager.findPlayerIdFromAvId(friendId)
newFriends.append((friendId,
0,
playerId,
0))
if self.panelType == FLPOnline:
if base.friendMode == 0:
for friendPair in base.localAvatar.friendsList:
if hasattr(base.cr, 'playerFriendsManager') and base.cr.isFriendOnline(friendPair[0]):
playerId = base.cr.playerFriendsManager.findPlayerIdFromAvId(friendPair[0])
if playerId:
if friendPair[1] & ToontownGlobals.FriendChat:
freeChatDouble.insert(0, (friendPair[0],
friendPair[1],
playerId,
0))
else:
speedChatDouble.insert(0, (friendPair[0],
friendPair[1],
playerId,
0))
elif friendPair[1] & ToontownGlobals.FriendChat:
freeChatOneRef.insert(0, (friendPair[0],
friendPair[1],
0,
0))
else:
speedChatOneRef.insert(0, (friendPair[0],
friendPair[1],
0,
0))
elif base.cr.isFriendOnline(friendPair[0]):
offlineFriends.append((friendPair[0],
friendPair[1],
0,
0))
if hasattr(base.cr, 'playerFriendsManager'):
for avatarId in base.cr.playerFriendsManager.getAllOnlinePlayerAvatars():
playerId = base.cr.playerFriendsManager.findPlayerIdFromAvId(avatarId)
playerFriendInfo = base.cr.playerFriendsManager.playerId2Info.get(playerId)
if not base.cr.playerFriendsManager.askAvatarKnownElseWhere(avatarId):
if playerFriendInfo.understandableYesNo:
freeChatDouble.insert(0, (avatarId,
0,
playerId,
0))
else:
speedChatDouble.insert(0, (avatarId,
0,
playerId,
0))
elif base.friendMode == 1:
for friendId in base.cr.avatarFriendsManager.avatarFriendsList:
friendInfo = base.cr.avatarFriendsManager.avatarId2Info[friendId]
playerId = base.cr.playerFriendsManager.findPlayerIdFromAvId(friendPair[0])
if friendInfo.onlineYesNo:
newFriends.insert(0, (friendId,
0,
playerId,
0))
if self.panelType == FLPPets:
for objId, obj in base.cr.doId2do.items():
from toontown.pets import DistributedPet
if isinstance(obj, DistributedPet.DistributedPet):
friendPair = (objId, 0)
petFriends.append(friendPair)
if self.panelType == FLPEnemies:
for ignored in base.localAvatar.ignoreList:
newFriends.append((ignored, 0))
if self.panelType == FLPAll or self.panelType == FLPOnline:
if base.wantPets and base.localAvatar.hasPet():
petFriends.insert(0, (base.localAvatar.getPetId(), 0))
for friendPair in self.friends.keys():
friendButton = self.friends[friendPair]
self.scrollList.removeItem(friendButton, refresh=0)
friendButton.destroy()
del self.friends[friendPair]
newFriends.sort(compareFriends)
petFriends.sort(compareFriends)
freeChatOneRef.sort(compareFriends)
speedChatOneRef.sort(compareFriends)
freeChatDouble.sort(compareFriends)
speedChatDouble.sort(compareFriends)
offlineFriends.sort(compareFriends)
for friendPair in newFriends:
if not self.friends.has_key(friendPair):
friendButton = self.makeFriendButton(friendPair)
if friendButton:
self.scrollList.addItem(friendButton, refresh=0)
self.friends[friendPair] = friendButton
for friendPair in petFriends:
if not self.friends.has_key(friendPair):
friendButton = self.makeFriendButton(friendPair, ToontownGlobals.ColorNoChat, 0)
if friendButton:
self.scrollList.addItem(friendButton, refresh=0)
self.friends[friendPair] = friendButton
for friendPair in freeChatDouble:
if not self.friends.has_key(friendPair):
friendButton = self.makeFriendButton(friendPair, ToontownGlobals.ColorFreeChat, 1)
if friendButton:
self.scrollList.addItem(friendButton, refresh=0)
self.friends[friendPair] = friendButton
for friendPair in freeChatOneRef:
if not self.friends.has_key(friendPair):
friendButton = self.makeFriendButton(friendPair, ToontownGlobals.ColorFreeChat, 0)
if friendButton:
self.scrollList.addItem(friendButton, refresh=0)
self.friends[friendPair] = friendButton
for friendPair in speedChatDouble:
if not self.friends.has_key(friendPair):
friendButton = self.makeFriendButton(friendPair, ToontownGlobals.ColorSpeedChat, 1)
if friendButton:
self.scrollList.addItem(friendButton, refresh=0)
self.friends[friendPair] = friendButton
for friendPair in speedChatOneRef:
if not self.friends.has_key(friendPair):
friendButton = self.makeFriendButton(friendPair, ToontownGlobals.ColorSpeedChat, 0)
if friendButton:
self.scrollList.addItem(friendButton, refresh=0)
self.friends[friendPair] = friendButton
for friendPair in offlineFriends:
if not self.friends.has_key(friendPair):
friendButton = self.makeFriendButton(friendPair, ToontownGlobals.ColorNoChat, 0)
if friendButton:
self.scrollList.addItem(friendButton, refresh=0)
self.friends[friendPair] = friendButton
self.scrollList.index = self.listScrollIndex[self.panelType]
self.scrollList.refresh()
def __updateTitle(self):
if self.panelType == FLPOnline:
self.title['text'] = TTLocalizer.FriendsListPanelOnlineFriends
elif self.panelType == FLPAll:
self.title['text'] = TTLocalizer.FriendsListPanelAllFriends
elif self.panelType == FLPPets:
self.title['text'] = TTLocalizer.FriendsListPanelPets
elif self.panelType == FLPPlayers:
self.title['text'] = TTLocalizer.FriendsListPanelPlayers
elif self.panelType == FLPOnlinePlayers:
self.title['text'] = TTLocalizer.FriendsListPanelOnlinePlayers
else:
self.title['text'] = TTLocalizer.FriendsListPanelIgnoredFriends
self.title.resetFrameSize()
def __updateArrows(self):
if self.panelType == self.leftmostPanel:
self.left['state'] = 'inactive'
else:
self.left['state'] = 'normal'
if self.panelType == self.rightmostPanel:
self.right['state'] = 'inactive'
else:
self.right['state'] = 'normal'
def __friendOnline(self, doId, commonChatFlags, whitelistChatFlags):
if self.panelType == FLPOnline:
self.__updateScrollList()
def __friendOffline(self, doId):
if self.panelType == FLPOnline:
self.__updateScrollList()
def __friendPlayers(self, doId):
if self.panelType == FLPPlayers:
self.__updateScrollList()
def __friendsListChanged(self, arg1 = None, arg2 = None):
if self.panelType != FLPEnemies:
self.__updateScrollList()
def __ignoreListChanged(self):
if self.panelType == FLPEnemies:
self.__updateScrollList()
| |
from __future__ import print_function, absolute_import, division
from future.builtins import *
from future import standard_library
standard_library.install_aliases()
# Copyright 2017 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import string
import moldesign as mdt
from ..compute import packages
from .. import units as u
from ..utils import exports
def read_stream(filelike, format, name=None):
""" Read a molecule from a file-like object
Note:
Currently only reads the first conformation in a file
Args:
filelike: a file-like object to read a file from
format (str): File format: pdb, sdf, mol2, bbll, etc.
name (str): name to assign to molecule
Returns:
moldesign.Molecule: parsed result
"""
molstring = str(filelike.read()) # openbabel chokes on unicode
return read_string(molstring, format, name=name)
@packages.openbabel.runsremotely
def read_string(molstring, format, name=None):
""" Read a molecule from a file-like object
Note:
Currently only reads the first conformation in a file
Args:
molstring (str): string containing file contents
format (str): File format: pdb, sdf, mol2, bbll, etc.
name (str): name to assign to molecule
Returns:
moldesign.Molecule: parsed result
"""
import pybel as pb
pbmol = pb.readstring(format, molstring)
mol = pybel_to_mol(pbmol, name=name)
return mol
@packages.openbabel.runsremotely
def write_string(mol, format):
""" Create a file from the passed molecule
Args:
mol (moldesign.Molecule): molecule to write
format (str): File format: pdb, sdf, mol2, bbll, etc.
Returns:
str: contents of the file
References:
https://openbabel.org/docs/dev/FileFormats/Overview.html
"""
pbmol = mol_to_pybel(mol)
if format == 'smi': # TODO: always kekulize, never aromatic
outstr = pbmol.write(format=format).strip()
else:
outstr = pbmol.write(format=format)
return str(outstr)
@packages.openbabel.runsremotely
def guess_bond_orders(mol):
"""Use OpenBabel to guess bond orders using geometry and functional group templates.
Args:
mol (moldesign.Molecule): Molecule to perceive the bonds of
Returns:
moldesign.Molecule: New molecule with assigned bonds
"""
# TODO: pH, formal charges
pbmol = mol_to_pybel(mol)
pbmol.OBMol.PerceiveBondOrders()
newmol = pybel_to_mol(pbmol)
return newmol
@packages.openbabel.runsremotely
def add_hydrogen(mol):
"""Add hydrogens to saturate atomic valences.
Args:
mol (moldesign.Molecule): Molecule to saturate
Returns:
moldesign.Molecule: New molecule with all valences saturated
"""
pbmol = mol_to_pybel(mol)
pbmol.OBMol.AddHydrogens()
newmol = pybel_to_mol(pbmol, reorder_atoms_by_residue=True)
mdt.helpers.assign_unique_hydrogen_names(newmol)
return newmol
@exports
def mol_to_pybel(mdtmol):
""" Translate a moldesign molecule object into a pybel molecule object.
Note:
The focus is on translating topology and biomolecular structure -
we don't translate any metadata.
Args:
mdtmol (moldesign.Molecule): molecule to translate
Returns:
pybel.Molecule: translated molecule
"""
import openbabel as ob
import pybel as pb
obmol = ob.OBMol()
obmol.BeginModify()
atommap = {}
resmap = {}
for atom in mdtmol.atoms:
obatom = obmol.NewAtom()
obatom.SetAtomicNum(atom.atnum)
atommap[atom] = obatom
pos = atom.position.value_in(u.angstrom)
obatom.SetVector(*pos)
if atom.residue and atom.residue not in resmap:
obres = obmol.NewResidue()
resmap[atom.residue] = obres
obres.SetChain(str(atom.chain.pdbname)[0] if atom.chain.pdbname else 'Z')
obres.SetName(str(atom.residue.pdbname) if atom.residue.pdbname else 'UNL')
obres.SetNum(str(atom.residue.pdbindex) if atom.residue.pdbindex else 0)
else:
obres = resmap[atom.residue]
obres.AddAtom(obatom)
obres.SetHetAtom(obatom, not atom.residue.is_standard_residue)
obres.SetAtomID(obatom, str(atom.name))
obres.SetSerialNum(obatom,
mdt.utils.if_not_none(atom.pdbindex, atom.index+1))
for atom in mdtmol.bond_graph:
a1 = atommap[atom]
for nbr, order in mdtmol.bond_graph[atom].items():
a2 = atommap[nbr]
if a1.GetIdx() > a2.GetIdx():
obmol.AddBond(a1.GetIdx(), a2.GetIdx(), order)
obmol.EndModify()
pbmol = pb.Molecule(obmol)
for atom in atommap:
idx = atommap[atom].GetIdx()
obatom = obmol.GetAtom(idx)
obatom.SetFormalCharge(int(atom.formal_charge.value_in(u.q_e)))
return pbmol
@exports
def pybel_to_mol(pbmol,
reorder_atoms_by_residue=False,
primary_structure=True,
**kwargs):
""" Translate a pybel molecule object into a moldesign object.
Note:
The focus is on translating topology and biomolecular structure - we don't translate any metadata.
Args:
pbmol (pybel.Molecule): molecule to translate
reorder_atoms_by_residue (bool): change atom order so that all atoms in a residue are stored
contiguously
primary_structure (bool): translate primary structure data as well as atomic data
**kwargs (dict): keyword arguments to moldesign.Molecule __init__ method
Returns:
moldesign.Molecule: translated molecule
"""
newatom_map = {}
newresidues = {}
newchains = {}
newatoms = mdt.AtomList([])
backup_chain_names = list(string.ascii_uppercase)
for pybatom in pbmol.atoms:
obres = pybatom.OBAtom.GetResidue()
name = obres.GetAtomID(pybatom.OBAtom).strip()
if pybatom.atomicnum == 67:
print(("WARNING: openbabel parsed atom serial %d (name:%s) as Holmium; "
"correcting to hydrogen. ") % (pybatom.OBAtom.GetIdx(), name))
atnum = 1
elif pybatom.atomicnum == 0:
print("WARNING: openbabel failed to parse atom serial %d (name:%s); guessing %s. " % (
pybatom.OBAtom.GetIdx(), name, name[0]))
atnum = mdt.data.ATOMIC_NUMBERS[name[0]]
else:
atnum = pybatom.atomicnum
mdtatom = mdt.Atom(atnum=atnum, name=name,
formal_charge=pybatom.formalcharge * u.q_e,
pdbname=name, pdbindex=pybatom.OBAtom.GetIdx())
newatom_map[pybatom.OBAtom.GetIdx()] = mdtatom
mdtatom.position = pybatom.coords * u.angstrom
if primary_structure:
obres = pybatom.OBAtom.GetResidue()
resname = obres.GetName()
residx = obres.GetIdx()
chain_id = obres.GetChain()
chain_id_num = obres.GetChainNum()
if chain_id_num not in newchains:
# create new chain
if not mdt.utils.is_printable(chain_id.strip()) or not chain_id.strip():
chain_id = backup_chain_names.pop()
print('WARNING: assigned name %s to unnamed chain object @ %s' % (
chain_id, hex(chain_id_num)))
chn = mdt.Chain(pdbname=str(chain_id))
newchains[chain_id_num] = chn
else:
chn = newchains[chain_id_num]
if residx not in newresidues:
# Create new residue
pdb_idx = obres.GetNum()
res = mdt.Residue(pdbname=resname,
pdbindex=pdb_idx)
newresidues[residx] = res
chn.add(res)
res.chain = chn
else:
res = newresidues[residx]
res.add(mdtatom)
newatoms.append(mdtatom)
for ibond in range(pbmol.OBMol.NumBonds()):
obbond = pbmol.OBMol.GetBond(ibond)
a1 = newatom_map[obbond.GetBeginAtomIdx()]
a2 = newatom_map[obbond.GetEndAtomIdx()]
order = obbond.GetBondOrder()
bond = mdt.Bond(a1, a2)
bond.order = order
if reorder_atoms_by_residue and primary_structure:
resorder = {}
for atom in newatoms:
resorder.setdefault(atom.residue, len(resorder))
newatoms.sort(key=lambda a: resorder[a.residue])
return mdt.Molecule(newatoms, **kwargs)
def from_smiles(smi, name=None):
""" Translate a smiles string to a 3D structure.
This method uses OpenBabel to generate a plausible 3D conformation of the 2D SMILES topology.
We only use the first result from the conformation generator.
Args:
smi (str): smiles string
name (str): name to assign to molecule (default - the smiles string)
Returns:
moldesign.Molecule: the translated molecule
"""
return _string_to_3d_mol(smi, 'smi', name)
def from_inchi(inchi, name=None):
""" Translate an INCHI string to a 3D structure.
This method uses OpenBabel to generate a plausible 3D conformation of the 2D SMILES topology.
We only use the first result from the conformation generator.
Args:
smi (str): smiles string
name (str): name to assign to molecule (default - the smiles string)
Returns:
moldesign.Molecule: the translated molecule
"""
return _string_to_3d_mol(inchi, 'inchi', name)
@packages.openbabel.runsremotely
def _string_to_3d_mol(s, fmt, name):
import pybel as pb
if name is None: name = s
pbmol = pb.readstring(fmt, str(s)) # avoid passing unicode by casting to str
pbmol.addh()
pbmol.make3D()
mol = pybel_to_mol(pbmol,
name=name,
primary_structure=False)
mdt.helpers.atom_name_check(mol)
return mol
| |
# Copyright 2014-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""CommandCursor class to iterate over command results."""
from collections import deque
from bson import _convert_raw_document_lists_to_streams
from bson.py3compat import integer_types
from pymongo.cursor import _SocketManager, _CURSOR_CLOSED_ERRORS
from pymongo.errors import (ConnectionFailure,
InvalidOperation,
OperationFailure)
from pymongo.message import (_CursorAddress,
_GetMore,
_RawBatchGetMore)
from pymongo.response import PinnedResponse
class CommandCursor(object):
"""A cursor / iterator over command cursors."""
_getmore_class = _GetMore
def __init__(self, collection, cursor_info, address, retrieved=0,
batch_size=0, max_await_time_ms=None, session=None,
explicit_session=False):
"""Create a new command cursor.
The parameter 'retrieved' is unused.
"""
self.__sock_mgr = None
self.__collection = collection
self.__id = cursor_info['id']
self.__data = deque(cursor_info['firstBatch'])
self.__postbatchresumetoken = cursor_info.get('postBatchResumeToken')
self.__address = address
self.__batch_size = batch_size
self.__max_await_time_ms = max_await_time_ms
self.__session = session
self.__explicit_session = explicit_session
self.__killed = (self.__id == 0)
if self.__killed:
self.__end_session(True)
if "ns" in cursor_info:
self.__ns = cursor_info["ns"]
else:
self.__ns = collection.full_name
self.batch_size(batch_size)
if (not isinstance(max_await_time_ms, integer_types)
and max_await_time_ms is not None):
raise TypeError("max_await_time_ms must be an integer or None")
def __del__(self):
self.__die()
def __die(self, synchronous=False):
"""Closes this cursor.
"""
already_killed = self.__killed
self.__killed = True
if self.__id and not already_killed:
cursor_id = self.__id
address = _CursorAddress(
self.__address, self.__ns)
else:
# Skip killCursors.
cursor_id = 0
address = None
self.__collection.database.client._cleanup_cursor(
synchronous,
cursor_id,
address,
self.__sock_mgr,
self.__session,
self.__explicit_session)
if not self.__explicit_session:
self.__session = None
self.__sock_mgr = None
def __end_session(self, synchronous):
if self.__session and not self.__explicit_session:
self.__session._end_session(lock=synchronous)
self.__session = None
def close(self):
"""Explicitly close / kill this cursor.
"""
self.__die(True)
def batch_size(self, batch_size):
"""Limits the number of documents returned in one batch. Each batch
requires a round trip to the server. It can be adjusted to optimize
performance and limit data transfer.
.. note:: batch_size can not override MongoDB's internal limits on the
amount of data it will return to the client in a single batch (i.e
if you set batch size to 1,000,000,000, MongoDB will currently only
return 4-16MB of results per batch).
Raises :exc:`TypeError` if `batch_size` is not an integer.
Raises :exc:`ValueError` if `batch_size` is less than ``0``.
:Parameters:
- `batch_size`: The size of each batch of results requested.
"""
if not isinstance(batch_size, integer_types):
raise TypeError("batch_size must be an integer")
if batch_size < 0:
raise ValueError("batch_size must be >= 0")
self.__batch_size = batch_size == 1 and 2 or batch_size
return self
def _has_next(self):
"""Returns `True` if the cursor has documents remaining from the
previous batch."""
return len(self.__data) > 0
@property
def _post_batch_resume_token(self):
"""Retrieve the postBatchResumeToken from the response to a
changeStream aggregate or getMore."""
return self.__postbatchresumetoken
def _maybe_pin_connection(self, sock_info):
client = self.__collection.database.client
if not client._should_pin_cursor(self.__session):
return
if not self.__sock_mgr:
sock_info.pin_cursor()
sock_mgr = _SocketManager(sock_info, False)
# Ensure the connection gets returned when the entire result is
# returned in the first batch.
if self.__id == 0:
sock_mgr.close()
else:
self.__sock_mgr = sock_mgr
def __send_message(self, operation):
"""Send a getmore message and handle the response.
"""
client = self.__collection.database.client
try:
response = client._run_operation(
operation, self._unpack_response, address=self.__address)
except OperationFailure as exc:
if exc.code in _CURSOR_CLOSED_ERRORS:
# Don't send killCursors because the cursor is already closed.
self.__killed = True
# Return the session and pinned connection, if necessary.
self.close()
raise
except ConnectionFailure:
# Don't send killCursors because the cursor is already closed.
self.__killed = True
# Return the session and pinned connection, if necessary.
self.close()
raise
except Exception:
self.close()
raise
if isinstance(response, PinnedResponse):
if not self.__sock_mgr:
self.__sock_mgr = _SocketManager(response.socket_info,
response.more_to_come)
if response.from_command:
cursor = response.docs[0]['cursor']
documents = cursor['nextBatch']
self.__postbatchresumetoken = cursor.get('postBatchResumeToken')
self.__id = cursor['id']
else:
documents = response.docs
self.__id = response.data.cursor_id
if self.__id == 0:
self.close()
self.__data = deque(documents)
def _unpack_response(self, response, cursor_id, codec_options,
user_fields=None, legacy_response=False):
return response.unpack_response(cursor_id, codec_options, user_fields,
legacy_response)
def _refresh(self):
"""Refreshes the cursor with more data from the server.
Returns the length of self.__data after refresh. Will exit early if
self.__data is already non-empty. Raises OperationFailure when the
cursor cannot be refreshed due to an error on the query.
"""
if len(self.__data) or self.__killed:
return len(self.__data)
if self.__id: # Get More
dbname, collname = self.__ns.split('.', 1)
read_pref = self.__collection._read_preference_for(self.session)
self.__send_message(
self._getmore_class(dbname,
collname,
self.__batch_size,
self.__id,
self.__collection.codec_options,
read_pref,
self.__session,
self.__collection.database.client,
self.__max_await_time_ms,
self.__sock_mgr, False))
else: # Cursor id is zero nothing else to return
self.__die(True)
return len(self.__data)
@property
def alive(self):
"""Does this cursor have the potential to return more data?
Even if :attr:`alive` is ``True``, :meth:`next` can raise
:exc:`StopIteration`. Best to use a for loop::
for doc in collection.aggregate(pipeline):
print(doc)
.. note:: :attr:`alive` can be True while iterating a cursor from
a failed server. In this case :attr:`alive` will return False after
:meth:`next` fails to retrieve the next batch of results from the
server.
"""
return bool(len(self.__data) or (not self.__killed))
@property
def cursor_id(self):
"""Returns the id of the cursor."""
return self.__id
@property
def address(self):
"""The (host, port) of the server used, or None.
.. versionadded:: 3.0
"""
return self.__address
@property
def session(self):
"""The cursor's :class:`~pymongo.client_session.ClientSession`, or None.
.. versionadded:: 3.6
"""
if self.__explicit_session:
return self.__session
def __iter__(self):
return self
def next(self):
"""Advance the cursor."""
# Block until a document is returnable.
while self.alive:
doc = self._try_next(True)
if doc is not None:
return doc
raise StopIteration
__next__ = next
def _try_next(self, get_more_allowed):
"""Advance the cursor blocking for at most one getMore command."""
if not len(self.__data) and not self.__killed and get_more_allowed:
self._refresh()
if len(self.__data):
coll = self.__collection
return coll.database._fix_outgoing(self.__data.popleft(), coll)
else:
return None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
class RawBatchCommandCursor(CommandCursor):
_getmore_class = _RawBatchGetMore
def __init__(self, collection, cursor_info, address, retrieved=0,
batch_size=0, max_await_time_ms=None, session=None,
explicit_session=False):
"""Create a new cursor / iterator over raw batches of BSON data.
Should not be called directly by application developers -
see :meth:`~pymongo.collection.Collection.aggregate_raw_batches`
instead.
.. mongodoc:: cursors
"""
assert not cursor_info.get('firstBatch')
super(RawBatchCommandCursor, self).__init__(
collection, cursor_info, address, retrieved, batch_size,
max_await_time_ms, session, explicit_session)
def _unpack_response(self, response, cursor_id, codec_options,
user_fields=None, legacy_response=False):
raw_response = response.raw_response(
cursor_id, user_fields=user_fields)
if not legacy_response:
# OP_MSG returns firstBatch/nextBatch documents as a BSON array
# Re-assemble the array of documents into a document stream
_convert_raw_document_lists_to_streams(raw_response[0])
return raw_response
def __getitem__(self, index):
raise InvalidOperation("Cannot call __getitem__ on RawBatchCursor")
| |
import functools
from flask import request
from prometheus_client import Counter, Gauge, Histogram, Summary
from prometheus_flask_exporter import PrometheusMetrics
import anchore_engine.configuration.localconfig
from anchore_engine.apis.authorization import auth_function_factory
from anchore_engine.subsys import logger
from anchore_engine.version import version
enabled = False
flask_metrics = None
flask_metric_name = "flask_http_request_duration_seconds"
metrics = {}
# class anchore_flask_track(object):
# def __init__(self, enabled, flask_metrics):
# self.enabled = enabled
# self.flask_metrics = flask_metrics
# def __call__(self, func):
# if self.enabled and self.flask_metrics:
# import anchore_engine.subsys.metrics
# timer = time.time()
# rc = func
# anchore_engine.subsys.metrics.histogram_observe("anchore_http_request_duration_seconds", time.time() - timer, path=request.path, method=request.method, status=httpcode)
# return(rc)
# else:
# return(func)
# class anchore_flask_track(object):
# def __init__(self):
# pass
# def __call__(self, func):
# from anchore_engine.subsys.metrics import flask_metrics, enabled
# if enabled:
# flask_metrics.do_not_track()
# with flask_metrics.histogram('anchore_http_request_duration_seconds', "", labels={'path': lambda: request.path, 'method': lambda: request.method, 'status': lambda respon#se: response[1]}).time():
# rc = func
## #@flask_metrics.do_not_track()
## #rc = None
## #with flask_metrics.histogram('anchore_http_request_duration_seconds', "", labels={'path': lambda: request.path, 'method': lambda: request.method, 'status': lambda resp#o#nse: response[1]}).time():
## # rc = func
# return(rc)
# else:
# return(func)
class disabled_flask_metrics(object):
def _call_nop(self):
def decorator(f):
@functools.wraps(f)
def func(*args, **kwargs):
return f(*args, **kwargs)
return func
return decorator
def do_not_track(self):
return self._call_nop()
def counter(self, *args, **kwargs):
return self._call_nop()
def gauge(self, *args, **kwargs):
return self._call_nop()
def summary(self, *args, **kwargs):
return self._call_nop()
def histogram(self, *args, **kwargs):
return self._call_nop()
def metrics_auth(path):
"""
An auth function factory that returns functions that can be used in before_request() calls to flask for doing
auth for things like subsystems that Anchore doesn't define each route for
:param authorizer_fetch_fn:
:return:
"""
auth_fn = auth_function_factory()
def metrics_auth_fn():
if request.path == path:
return auth_fn()
else:
return None
return metrics_auth_fn
def init_flask_metrics(flask_app, export_defaults=True, **kwargs):
global flask_metrics, enabled
auth_enabled = True
try:
localconfig = anchore_engine.configuration.localconfig.get_config()
metrics_config = localconfig.get("metrics", {})
# Handle typo in config. enabled == enable
enabled = bool(metrics_config.get("enable", False))
if not enabled:
enabled = bool(metrics_config.get("enabled", False))
auth_enabled = not bool(metrics_config.get("auth_disabled", False))
except Exception as err:
logger.warn(
"unable to determine if metrics are enabled - exception: " + str(err)
)
enabled = False
if not enabled:
flask_metrics = disabled_flask_metrics()
return True
if not flask_metrics:
flask_metrics = PrometheusMetrics(
flask_app, export_defaults=export_defaults, group_by_endpoint=True
)
if auth_enabled:
flask_app.before_request(metrics_auth(flask_metrics.path))
flask_metrics.info(
"anchore_service_info",
"Anchore Service Static Information",
version=version,
**kwargs
)
return True
def is_enabled():
global enabled
return enabled
def get_flask_metrics_obj():
global flask_metrics, enabled
if not enabled:
return None
return flask_metrics
def get_summary_obj(name, description="", **kwargs):
global metrics, enabled
if not enabled:
return None
ret = None
try:
if name not in metrics:
metrics[name] = Summary(name, description, list(kwargs.keys()))
ret = metrics[name]
except:
logger.warn("could not create/get named metric (" + str(name) + ")")
return ret
def summary_observe(name, observation, description="", **kwargs):
global metrics, enabled
if not enabled:
return True
try:
if name not in metrics:
metrics[name] = Summary(name, description, list(kwargs.keys()))
if kwargs:
metrics[name].labels(**kwargs).observe(observation)
else:
metrics[name].observe(observation)
except Exception as err:
logger.warn("adding metric failed - exception: " + str(err))
return True
def histogram_observe(name, observation, description="", buckets=None, **kwargs):
global metrics, enabled
if not enabled:
return True
try:
if name not in metrics:
if buckets:
buckets.append(float("inf"))
metrics[name] = Histogram(
name, description, list(kwargs.keys()), buckets=buckets
)
else:
metrics[name] = Histogram(name, description, list(kwargs.keys()))
if kwargs:
metrics[name].labels(**kwargs).observe(observation)
else:
metrics[name].observe(observation)
except Exception as err:
logger.warn("adding metric failed - exception: " + str(err))
return True
def gauge_set(name, observation, description="", **kwargs):
global metrics
if not enabled:
return True
try:
if name not in metrics:
metrics[name] = Gauge(name, description, list(kwargs.keys()))
if kwargs:
metrics[name].labels(**kwargs).set(observation)
else:
metrics[name].set(observation)
except Exception as err:
logger.warn("adding metric failed - exception: " + str(err))
return True
def counter_inc(name, step=1, description="", **kwargs):
global metrics
if not enabled:
return True
try:
if name not in metrics:
metrics[name] = Counter(name, description, list(kwargs.keys()))
if kwargs:
metrics[name].labels(**kwargs).inc(step)
else:
metrics[name].inc(step)
except Exception as err:
logger.warn("adding metric failed - exception: " + str(err))
return True
| |
import configparser
import unittest
import uuid
from pathlib import Path
from TM1py import Element, Hierarchy, Dimension
from TM1py.Objects import Cube
from TM1py.Objects import Rules
from TM1py.Services import TM1Service
from .Utils import skip_if_insufficient_version
class TestCubeService(unittest.TestCase):
tm1: TM1Service
prefix = "TM1py_Tests_Cube_"
cube_name = prefix + "some_name"
dimension_names = [
prefix + "dimension1",
prefix + "dimension2",
prefix + "dimension3"]
@classmethod
def setUp(cls):
# Connection to TM1
cls.config = configparser.ConfigParser()
cls.config.read(Path(__file__).parent.joinpath('config.ini'))
cls.tm1 = TM1Service(**cls.config['tm1srv01'])
for dimension_name in cls.dimension_names:
elements = [Element('Element {}'.format(str(j)), 'Numeric') for j in range(1, 1001)]
hierarchy = Hierarchy(dimension_name=dimension_name,
name=dimension_name,
elements=elements)
dimension = Dimension(dimension_name, [hierarchy])
if not cls.tm1.dimensions.exists(dimension.name):
cls.tm1.dimensions.create(dimension)
# Build Cube
cube = Cube(cls.cube_name, cls.dimension_names)
if not cls.tm1.cubes.exists(cls.cube_name):
cls.tm1.cubes.create(cube)
c = Cube(cls.cube_name, dimensions=cls.dimension_names, rules=Rules(''))
if cls.tm1.cubes.exists(c.name):
cls.tm1.cubes.delete(c.name)
cls.tm1.cubes.create(c)
def test_get_cube(self):
c = self.tm1.cubes.get(self.cube_name)
self.assertIsInstance(c, Cube)
self.assertEqual(c.dimensions, self.dimension_names)
cubes = self.tm1.cubes.get_all()
control_cubes = self.tm1.cubes.get_control_cubes()
model_cubes = self.tm1.cubes.get_model_cubes()
self.assertEqual(len(cubes), len(control_cubes + model_cubes))
def test_update_cube(self):
c = self.tm1.cubes.get(self.cube_name)
c.rules = Rules("SKIPCHECK;\nFEEDERS;")
self.tm1.cubes.update(c)
# test if rule was actually updated
c = self.tm1.cubes.get(self.cube_name)
self.assertEqual(c.rules.text, "SKIPCHECK;\nFEEDERS;")
self.assertTrue(c.skipcheck)
def test_get_control_cubes(self):
control_cubes = self.tm1.cubes.get_control_cubes()
self.assertGreater(len(control_cubes), 0)
for cube in control_cubes:
self.assertTrue(cube.name.startswith("}"))
def test_get_model_cubes(self):
model_cubes = self.tm1.cubes.get_model_cubes()
self.assertGreater(len(model_cubes), 0)
for cube in model_cubes:
self.assertFalse(cube.name.startswith("}"))
def test_get_dimension_names(self):
dimension_names = self.tm1.cubes.get_dimension_names(self.cube_name)
self.assertEqual(dimension_names, self.dimension_names)
def test_get_random_intersection(self):
intersection1 = self.tm1.cubes.get_random_intersection(cube_name=self.cube_name, unique_names=False)
intersection2 = self.tm1.cubes.get_random_intersection(cube_name=self.cube_name, unique_names=False)
self.assertNotEqual(intersection1, intersection2)
intersection1 = self.tm1.cubes.get_random_intersection(cube_name=self.cube_name, unique_names=True)
intersection2 = self.tm1.cubes.get_random_intersection(cube_name=self.cube_name, unique_names=True)
self.assertNotEqual(intersection1, intersection2)
def test_exists(self):
self.assertTrue(self.tm1.cubes.exists(self.cube_name))
self.assertFalse(self.tm1.cubes.exists(uuid.uuid4()))
def test_create_delete_cube(self):
cube_name = self.prefix + "Some_Other_Name"
# element with index 0 is Sandboxes
dimension_names = self.tm1.dimensions.get_all_names()[1:3]
cube = Cube(cube_name, dimension_names)
all_cubes_before = self.tm1.cubes.get_all_names()
self.tm1.cubes.create(cube)
all_cubes_after = self.tm1.cubes.get_all_names()
self.assertEqual(
len(all_cubes_before) + 1,
len(all_cubes_after))
self.assertEqual(
self.tm1.cubes.get_dimension_names(cube_name),
dimension_names)
all_cubes_before = self.tm1.cubes.get_all_names()
self.tm1.cubes.delete(cube_name)
all_cubes_after = self.tm1.cubes.get_all_names()
self.assertEqual(len(all_cubes_before) - 1, len(all_cubes_after))
def test_get_all_names(self):
all_cubes_before = self.tm1.cubes.get_all_names()
cubes_with_rules = self.tm1.cubes.get_all_names_with_rules()
cubes_without_rules = self.tm1.cubes.get_all_names_without_rules()
self.assertEqual(len(all_cubes_before), len(cubes_with_rules) + len(cubes_without_rules))
cube_name = self.prefix + "Some_Other_Name"
dimension_names = self.tm1.dimensions.get_all_names()[1:3]
cube = Cube(cube_name, dimension_names)
self.tm1.cubes.create(cube)
self.assertEqual(len(cubes_without_rules) + 1, len(self.tm1.cubes.get_all_names_without_rules()))
self.assertEqual(len(cubes_with_rules), len(self.tm1.cubes.get_all_names_with_rules()))
cube.rules = "SKIPCHECK"
self.tm1.cubes.update(cube)
self.assertEqual(len(cubes_with_rules) + 1, len(self.tm1.cubes.get_all_names_with_rules()))
self.assertEqual(len(cubes_without_rules), len(self.tm1.cubes.get_all_names_without_rules()))
self.tm1.cubes.delete(cube_name)
@skip_if_insufficient_version(version="11.4")
def test_get_storage_dimension_order(self):
dimensions = self.tm1.cubes.get_storage_dimension_order(cube_name=self.cube_name)
self.assertEqual(dimensions, self.dimension_names)
def test_get_number_of_cubes(self):
number_of_cubes = self.tm1.cubes.get_number_of_cubes()
self.assertIsInstance(number_of_cubes, int)
@skip_if_insufficient_version(version="11.4")
def test_update_storage_dimension_order(self):
self.tm1.cubes.update_storage_dimension_order(
cube_name=self.cube_name,
dimension_names=reversed(self.dimension_names))
dimensions = self.tm1.cubes.get_storage_dimension_order(self.cube_name)
self.assertEqual(
list(reversed(dimensions)),
self.dimension_names)
@skip_if_insufficient_version(version="11.6")
def test_load(self):
response = self.tm1.cubes.load(cube_name=self.cube_name)
self.assertTrue(response.ok)
@skip_if_insufficient_version(version="11.6")
def test_unload(self):
response = self.tm1.cubes.unload(cube_name=self.cube_name)
self.assertTrue(response.ok)
def test_lock(self):
response = self.tm1.cubes.lock(cube_name=self.cube_name)
self.assertTrue(response.ok)
def test_unlock(self):
self.tm1.cubes.lock(cube_name=self.cube_name)
response = self.tm1.cubes.unlock(cube_name=self.cube_name)
self.assertTrue(response.ok)
def test_check_rules_without_errors(self):
errors = self.tm1.cubes.check_rules(cube_name=self.cube_name)
self.assertEqual(0, len(errors))
def test_check_rules_with_errors(self):
cube = self.tm1.cubes.get(cube_name=self.cube_name)
cube.rules = "SKIPCHECK"
self.tm1.cubes.update(cube)
errors = self.tm1.cubes.check_rules(cube_name=self.cube_name)
self.assertEqual(1, len(errors))
def test_get_measure_dimension(self):
measure_dimension = self.tm1.cubes.get_measure_dimension(self.cube_name)
self.assertEqual(self.dimension_names[-1], measure_dimension)
@classmethod
def tearDown(cls):
cls.tm1.cubes.delete(cls.cube_name)
for dimension in cls.dimension_names:
cls.tm1.dimensions.delete(dimension)
cls.tm1.logout()
if __name__ == '__main__':
unittest.main()
| |
# pylint: disable=wrong-import-position
import ssl
from os import path
from functools import partial
from pathlib import Path
import h11
import pytest
from anyio import create_task_group, open_file, EndOfStream
from overly import (
Server,
ssl_socket_wrapper,
default_ssl_cert,
send_200,
send_303,
send_400,
send_500,
delay,
send_request_as_json,
accept_cookies_and_respond,
send_gzip,
send_deflate,
send_chunked,
send_200_blank_headers,
finish,
HttpMethods,
)
import asks
from asks.request_object import RequestProcessor
from asks.errors import TooManyRedirects, BadStatus, RequestTimeout
pytestmark = pytest.mark.anyio
_TEST_LOC = ("localhost", 25001)
_SSL_CONTEXT = ssl.create_default_context(cadata=default_ssl_cert)
@pytest.fixture
def server(request):
srv = Server(_TEST_LOC, **request.param)
srv.daemon = True
srv.start()
srv.ready_to_go.wait()
yield srv
srv.kill_threads = True
srv.join()
@pytest.mark.parametrize('server', [dict(steps=[send_200, finish])], indirect=True)
async def test_http_get(server):
r = await asks.get(server.http_test_url)
assert r.status_code == 200
# GET tests
@pytest.mark.parametrize('server', [
dict(steps=[send_200, finish], socket_wrapper=ssl_socket_wrapper)
], indirect=True)
async def test_https_get(server, caplog):
import logging
caplog.set_level(logging.DEBUG)
# If we use ssl_context= to trust the CA, then we can successfully do a
# GET over https.
r = await asks.get(server.https_test_url, ssl_context=_SSL_CONTEXT)
assert r.status_code == 200
@pytest.mark.parametrize('server', [
dict(steps=[send_200, finish], socket_wrapper=ssl_socket_wrapper)
], indirect=True)
async def test_https_get_checks_cert(server):
try:
expected_error = ssl.SSLCertVerificationError
except AttributeError:
# If we're running in Python <3.7, we won't have the specific error
# that will be raised, but we can expect it to raise an SSLError
# nonetheless
expected_error = ssl.SSLError
# The server's certificate isn't signed by any real CA. By default, asks
# should notice that, and raise an error.
with pytest.raises(expected_error):
await asks.get(server.https_test_url)
# # async def test_bad_www_and_schema_get():
# r = await asks.get('http://reddit.com')
# assert r.status_code == 200
@pytest.mark.parametrize('server', [dict(steps=[send_400, finish])], indirect=True)
async def test_http_get_client_error(server):
r = await asks.get(server.http_test_url)
with pytest.raises(BadStatus) as excinfo:
r.raise_for_status()
assert excinfo.match("400 Client Error: BAD REQUEST")
assert excinfo.value.status_code == 400
@pytest.mark.parametrize('server', [dict(steps=[send_500, finish])], indirect=True)
async def test_http_get_server_error(server):
r = await asks.get(server.http_test_url)
with pytest.raises(BadStatus) as excinfo:
r.raise_for_status()
assert excinfo.match("500 Server Error: INTERNAL SERVER ERROR")
assert excinfo.value.status_code == 500
# Redirect tests
@pytest.mark.parametrize('server', [
dict(
max_requests=4,
steps=[
[(HttpMethods.GET, "/redirect_1"), send_303, finish],
[(HttpMethods.GET, "/"), send_200, finish],
[(HttpMethods.GET, "/redirect_1"), send_303, finish],
[(HttpMethods.GET, "/"), send_200, finish],
],
ordered_steps=True,
)
], indirect=True)
async def test_http_redirect(server):
r = await asks.get(server.http_test_url + "/redirect_1")
assert len(r.history) == 1
# make sure history doesn't persist across responses
r.history.append("not a response obj")
r = await asks.get(server.http_test_url + "/redirect_1")
assert len(r.history) == 1
@pytest.mark.parametrize('server', [
dict(
max_requests=3,
steps=[
[
(HttpMethods.GET, "/redirect_max"),
partial(send_303, headers=[("location", "redirect_max1")]),
finish,
],
[
(HttpMethods.GET, "/redirect_max1"),
partial(send_303, headers=[("location", "redirect_max")]),
finish,
],
],
)
], indirect=True)
async def test_http_max_redirect_error(server):
with pytest.raises(TooManyRedirects):
await asks.get(server.http_test_url + "/redirect_max", max_redirects=1)
@pytest.mark.parametrize('server', [
dict(
max_requests=2,
steps=[
[
(HttpMethods.GET, "/path/redirect"),
partial(send_303, headers=[("location", "../foo/bar")]),
finish,
],
[(HttpMethods.GET, "/foo/bar"), send_200, finish],
],
)
], indirect=True)
async def test_redirect_relative_url(server):
r = await asks.get(server.http_test_url + "/path/redirect", max_redirects=1)
assert len(r.history) == 1
assert r.url == "http://{0}:{1}/foo/bar".format(*_TEST_LOC)
@pytest.mark.parametrize('server', [
dict(
max_requests=2,
steps=[
[
(HttpMethods.GET, "/redirect_once"),
partial(send_303, headers=[("location", "/")]),
finish,
],
[(HttpMethods.GET, "/"), send_200, finish],
],
)
], indirect=True)
async def test_http_under_max_redirect(server):
r = await asks.get(server.http_test_url + "/redirect_once", max_redirects=2)
assert r.status_code == 200
@pytest.mark.parametrize('server', [
dict(
max_requests=1,
steps=[
[
(HttpMethods.GET, "/redirect_once"),
partial(send_303, headers=[("location", "/")]),
finish,
],
],
)
], indirect=True)
async def test_dont_follow_redirects(server):
r = await asks.get(server.http_test_url + "/redirect_once", follow_redirects=False)
assert r.status_code == 303
assert r.headers["location"] == "/"
# Timeout tests
@pytest.mark.parametrize('server', [dict(steps=[delay(2), send_200, finish])], indirect=True)
async def test_http_timeout_error(server):
with pytest.raises(RequestTimeout):
await asks.get(server.http_test_url, timeout=1)
@pytest.mark.parametrize('server', [dict(steps=[send_200, finish])], indirect=True)
async def test_http_timeout(server):
r = await asks.get(server.http_test_url, timeout=10)
assert r.status_code == 200
# Param set test
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_param_dict_set(server):
r = await asks.get(server.http_test_url, params={"cheese": "the best"})
j = r.json()
assert next(v == "the best" for k, v in j["params"] if k == "cheese")
# Data set test
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_data_dict_set(server):
r = await asks.post(server.http_test_url, data={"cheese": "please bby"})
j = r.json()
assert next(v == "please bby" for k, v in j["form"] if k == "cheese")
# Cookie send test
@pytest.mark.parametrize('server', [
dict(steps=[accept_cookies_and_respond, finish])
], indirect=True)
async def test_cookie_dict_send(server):
cookies = {"Test-Cookie": "Test Cookie Value", "koooookie": "pie"}
r = await asks.get(server.http_test_url, cookies=cookies)
for cookie in r.cookies:
assert cookie.name in cookies
if " " in cookie.value:
assert cookie.value == '"' + cookies[cookie.name] + '"'
else:
assert cookie.value == cookies[cookie.name]
# Custom headers test
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_header_set(server):
r = await asks.get(
server.http_test_url, headers={"Asks-Header": "Test Header Value"}
)
j = r.json()
assert any(k == "asks-header" for k, _ in j["headers"])
assert "cOntenT-tYPe" in r.headers
# File send test
TEST_DIR = path.dirname(path.abspath(__file__))
TEST_FILE1 = path.join(TEST_DIR, "test_file1.txt")
TEST_FILE2 = path.join(TEST_DIR, "test_file2")
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_file_send_single(server):
r = await asks.post(server.http_test_url, files={"file_1": TEST_FILE1})
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
file_data = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
assert file_data["file"] == "Compooper"
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_file_send_double(server):
r = await asks.post(
server.http_test_url, files={"file_1": TEST_FILE1, "file_2": TEST_FILE2}
)
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
assert any(file_data["name"] == "file_2" for file_data in j["files"])
file_data_1 = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
file_data_2 = next(
file_data for file_data in j["files"] if file_data["name"] == "file_2"
)
assert file_data_1["file"] == "Compooper"
assert file_data_2["file"] == "My slug <3"
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_file_send_file_and_form_data(server):
r = await asks.post(
server.http_test_url,
files={"file_1": TEST_FILE1, "data_1": "watwatwatwat=yesyesyes"},
)
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
assert any(form_data["name"] == "data_1" for form_data in j["forms"])
file_data_1 = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
assert file_data_1["file"] == "Compooper"
form_data_1 = next(
form_data for form_data in j["forms"] if form_data["name"] == "data_1"
)
assert form_data_1["form_data"] == "watwatwatwat=yesyesyes"
# File send test new multipart API
TEST_DIR = path.dirname(path.abspath(__file__))
TEST_FILE1 = path.join(TEST_DIR, "test_file1.txt")
TEST_FILE2 = path.join(TEST_DIR, "test_file2")
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_multipart_send_single(server):
r = await asks.post(server.http_test_url, multipart={"file_1": Path(TEST_FILE1)})
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
file_data = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
assert file_data["file"] == "Compooper"
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_multipart_send_single_already_open(server):
with open(TEST_FILE1, "rb") as f:
r = await asks.post(server.http_test_url, multipart={"file_1": f})
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
file_data = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
assert file_data["file"] == "Compooper"
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_multipart_send_single_already_open_async(server):
async with await open_file(TEST_FILE1, "rb") as f:
r = await asks.post(server.http_test_url, multipart={"file_1": f})
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
file_data = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
assert file_data["file"] == "Compooper"
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_multipart_send_raw_bytes(server):
r = await asks.post(
server.http_test_url,
multipart={
"file_1": asks.multipart.MultipartData(
b"Compooper", basename="in_memory.txt",
)
},
)
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
file_data = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
assert file_data["file"] == "Compooper"
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_multipart_send_double(server):
r = await asks.post(
server.http_test_url,
multipart={"file_1": Path(TEST_FILE1), "file_2": Path(TEST_FILE2)},
)
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
assert any(file_data["name"] == "file_2" for file_data in j["files"])
file_data_1 = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
file_data_2 = next(
file_data for file_data in j["files"] if file_data["name"] == "file_2"
)
assert file_data_1["file"] == "Compooper"
assert file_data_2["file"] == "My slug <3"
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_multipart_send_file_and_form_data(server):
r = await asks.post(
server.http_test_url,
multipart={"file_1": Path(TEST_FILE1), "data_1": "watwatwatwat=yesyesyes"},
)
j = r.json()
assert any(file_data["name"] == "file_1" for file_data in j["files"])
assert any(form_data["name"] == "data_1" for form_data in j["forms"])
file_data_1 = next(
file_data for file_data in j["files"] if file_data["name"] == "file_1"
)
assert file_data_1["file"] == "Compooper"
form_data_1 = next(
form_data for form_data in j["forms"] if form_data["name"] == "data_1"
)
assert form_data_1["form_data"] == "watwatwatwat=yesyesyes"
# JSON send test
@pytest.mark.parametrize('server', [dict(steps=[send_request_as_json, finish])], indirect=True)
async def test_json_send(server):
r = await asks.post(
server.http_test_url, json={"key_1": True, "key_2": "cheesestring"}
)
j = r.json()
json_1 = next(iter(j["json"]))
assert json_1["json"]["key_1"] is True
assert json_1["json"]["key_2"] == "cheesestring"
# Test decompression
@pytest.mark.parametrize('server', [
dict(steps=[partial(send_gzip, data="wolowolowolo"), finish])
], indirect=True)
async def test_gzip(server):
r = await asks.get(server.http_test_url)
assert r.text == "wolowolowolo"
@pytest.mark.parametrize('server', [
dict(steps=[partial(send_deflate, data="wolowolowolo"), finish])
], indirect=True)
async def test_deflate(server):
r = await asks.get(server.http_test_url)
assert r.text == "wolowolowolo"
# Test chunks and streaming
@pytest.mark.parametrize('server', [
dict(steps=[partial(send_chunked, data=["ham "] * 10), finish])
], indirect=True)
async def test_chunked(server):
r = await asks.get(server.http_test_url)
assert r.text == "ham ham ham ham ham ham ham ham ham ham "
@pytest.mark.parametrize('server', [
dict(steps=[partial(send_chunked, data=["ham "] * 10), finish])
], indirect=True)
async def test_stream(server):
data = b""
r = await asks.get(server.http_test_url, stream=True)
async for chunk in r.body:
data += chunk
assert data == b"ham ham ham ham ham ham ham ham ham ham "
# Test callback
@pytest.mark.parametrize('server', [
dict(steps=[partial(send_chunked, data=["ham "] * 10), finish])
], indirect=True)
async def test_callback(server):
async def callback_example(chunk):
nonlocal callback_data
callback_data += chunk
callback_data = b""
await asks.get(server.http_test_url, callback=callback_example)
assert callback_data == b"ham ham ham ham ham ham ham ham ham ham "
# Test connection close without content-length and transfer-encoding
@pytest.mark.parametrize('server', [
dict(
steps=[partial(send_200_blank_headers, headers=[("connection", "close")]), finish],
)
], indirect=True)
async def test_connection_close_no_content_len(server):
r = await asks.get(server.http_test_url)
assert r.text == "200"
# Session Tests
# =============
# Test Session with two pooled connections on ten get requests.
@pytest.mark.parametrize('server', [
dict(
steps=[partial(send_200_blank_headers, headers=[("connection", "close")]), finish],
max_requests=10,
)
], indirect=True)
async def test_session_smallpool(server):
async def worker(s):
r = await s.get(path="/get")
assert r.status_code == 200
s = asks.Session(server.http_test_url, connections=2)
async with create_task_group() as g:
for _ in range(10):
await g.spawn(worker, s)
# Test stateful Session
# TODO check the "" quoting of cookies here (probably in overly)
@pytest.mark.parametrize('server', [
dict(steps=[accept_cookies_and_respond, finish])
], indirect=True)
async def test_session_stateful(server):
s = asks.Session(server.http_test_url, persist_cookies=True)
await s.get(cookies={"Test-Cookie": "Test Cookie Value"})
assert ":".join(str(x) for x in _TEST_LOC) in s._cookie_tracker.domain_dict.keys()
assert (
s._cookie_tracker.domain_dict[":".join(str(x) for x in _TEST_LOC)][0].value
== '"Test Cookie Value"'
)
# Test session instantiates outside event loop
def test_instantiate_session_outside_of_event_loop():
try:
asks.Session()
except RuntimeError:
pytest.fail("Could not instantiate Session outside of event loop")
async def test_session_unknown_kwargs():
with pytest.raises(TypeError, match=r"request\(\) got .*"):
session = asks.Session("https://httpbin.org/get")
await session.request("GET", ko=7, foo=0, bar=3, shite=3)
pytest.fail("Passing unknown kwargs does not raise TypeError")
async def test_recv_event_anyio2_end_of_stream():
class MockH11Connection:
def __init__(self):
self.data = None
def next_event(self):
if self.data == b"":
return h11.PAUSED
else:
return h11.NEED_DATA
def receive_data(self, data):
self.data = data
class MockSock:
def receive(self):
raise EndOfStream
req = RequestProcessor(None, "get", "toot-toot", None)
req.sock = MockSock()
h11_connection = MockH11Connection()
event = await req._recv_event(h11_connection)
assert event is h11.PAUSED
assert h11_connection.data == b""
| |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.Dataset.from_tensors()."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class FromTensorsTest(test_base.DatasetTestBase):
def testFromTensors(self):
"""Test a dataset that represents a single tuple of tensors."""
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(
[c.shape for c in components],
nest.flatten(dataset_ops.get_legacy_output_shapes(dataset)))
self.assertDatasetProduces(dataset, expected_output=[components])
def testFromTensorsTensorArray(self):
"""Test a dataset that represents a TensorArray."""
components = (
tensor_array_ops.TensorArray(dtypes.float32, element_shape=(), size=2)
.unstack([1.0, 2.0]))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertDatasetProduces(
dataset, expected_output=[[1.0, 2.0]], requires_initialization=True)
def testFromTensorsSparse(self):
"""Test a dataset that represents a single tuple of tensors."""
components = (sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(
[tensor_shape.TensorShape(c.dense_shape) for c in components],
[shape for shape in dataset_ops.get_legacy_output_shapes(dataset)])
self.assertDatasetProduces(dataset, expected_output=[components])
def testFromTensorsMixed(self):
"""Test an dataset that represents a single tuple of tensors."""
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0),
sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual([
tensor_shape.TensorShape(c.dense_shape)
if sparse_tensor.is_sparse(c) else c.shape for c in components
], [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)])
self.assertDatasetProduces(dataset, expected_output=[components])
def testFromTensorsRagged(self):
components = (
ragged_factory_ops.constant_value([[[0]], [[1]], [[2]]]),
ragged_factory_ops.constant_value([[[3]], [[4]], [[5]]]),
)
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertDatasetProduces(dataset, expected_output=[components])
def testFromTensorsMixedRagged(self):
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0),
sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])),
ragged_factory_ops.constant_value([[[0]], [[1]], [[2]]]))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertDatasetProduces(dataset, expected_output=[components])
# pylint: disable=g-long-lambda,unnecessary-lambda
def testNestedStructure(self):
components = (np.array([1, 2, 3], dtype=np.int64),
(np.array([4., 5.]), np.array([6., 7.])),
np.array([8, 9, 10], dtype=np.int64))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual(([3], ([2], [2]), [3]),
dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.shuffle(10, 10)
self.assertEqual((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual(([3], ([2], [2]), [3]),
dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.repeat(-1)
self.assertEqual((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual(([3], ([2], [2]), [3]),
dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.filter(lambda x, y, z: True)
self.assertEqual((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual(([3], ([2], [2]), [3]),
dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.take(5)
self.assertEqual((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual(([3], ([2], [2]), [3]),
dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.map(lambda x, y, z: ((x, z), (y[0], y[1])))
self.assertEqual(((dtypes.int64, dtypes.int64),
(dtypes.float64, dtypes.float64)),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual((([3], [3]), ([2], [2])),
dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.flat_map(
lambda x, y: dataset_ops.Dataset.from_tensors(((x[0], x[1]),
(y[0], y[1])))
)
self.assertEqual(((dtypes.int64, dtypes.int64),
(dtypes.float64, dtypes.float64)),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual((([3], [3]), ([2], [2])),
dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.batch(32)
self.assertEqual(((dtypes.int64, dtypes.int64),
(dtypes.float64, dtypes.float64)),
dataset_ops.get_legacy_output_types(dataset))
dataset_output_shapes = dataset_ops.get_legacy_output_shapes(dataset)
self.assertEqual((([None, 3], [None, 3]), ([None, 2], [None, 2])),
nest.pack_sequence_as(dataset_output_shapes, [
s.as_list()
for s in nest.flatten(dataset_output_shapes)
]))
# Define a separate set of components with matching leading
# dimension for the from-slices constructor.
components_for_slices = (np.array([1, 2, 3], dtype=np.int64),
(np.array([4., 5., 6.]), np.array([7., 8., 9.])),
np.array([10, 11, 12], dtype=np.int64))
dataset = dataset_ops.Dataset.from_tensor_slices(components_for_slices)
self.assertEqual((dtypes.int64,
(dtypes.float64, dtypes.float64), dtypes.int64),
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual(([], ([], []), []),
dataset_ops.get_legacy_output_shapes(dataset))
# TODO(b/117581999): more specific shapes in eager mode.
@test_util.run_deprecated_v1
def testSkipEagerNestedStructure(self):
components = (np.array([1, 2, 3], dtype=np.int64), (np.array([4., 5.]),
np.array([6., 7.])),
np.array([8, 9, 10], dtype=np.int64))
dataset = dataset_ops.Dataset.from_tensors(components)
dataset = dataset.map(lambda x, y, z: ((x, z), (y[0], y[1])))
dataset = dataset.flat_map(
lambda x, y: dataset_ops.Dataset.from_tensors(
((x[0], x[1]), (y[0], y[1])))).batch(32)
get_next = self.getNext(dataset)
(w, x), (y, z) = get_next()
self.assertEqual(dtypes.int64, w.dtype)
self.assertEqual(dtypes.int64, x.dtype)
self.assertEqual(dtypes.float64, y.dtype)
self.assertEqual(dtypes.float64, z.dtype)
self.assertEqual([None, 3], w.shape.as_list())
self.assertEqual([None, 3], x.shape.as_list())
self.assertEqual([None, 2], y.shape.as_list())
self.assertEqual([None, 2], z.shape.as_list())
get_next = self.getNext(dataset)
(w, x), (y, z) = get_next()
self.assertEqual(dtypes.int64, w.dtype)
self.assertEqual(dtypes.int64, x.dtype)
self.assertEqual(dtypes.float64, y.dtype)
self.assertEqual(dtypes.float64, z.dtype)
self.assertEqual([None, 3], w.shape.as_list())
self.assertEqual([None, 3], x.shape.as_list())
self.assertEqual([None, 2], y.shape.as_list())
self.assertEqual([None, 2], z.shape.as_list())
def testNestedDict(self):
components = {"a": {"aa": 1, "ab": [2.0, 2.0]}, "b": [3, 3, 3]}
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(dtypes.int32,
dataset_ops.get_legacy_output_types(dataset)["a"]["aa"])
self.assertEqual(dtypes.float32,
dataset_ops.get_legacy_output_types(dataset)["a"]["ab"])
self.assertEqual(dtypes.int32,
dataset_ops.get_legacy_output_types(dataset)["b"])
self.assertEqual([],
dataset_ops.get_legacy_output_shapes(dataset)["a"]["aa"])
self.assertEqual([2],
dataset_ops.get_legacy_output_shapes(dataset)["a"]["ab"])
self.assertEqual([3],
dataset_ops.get_legacy_output_shapes(dataset)["b"])
def testNonSequenceNestedStructure(self):
components = np.array([1, 2, 3], dtype=np.int64)
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([3], dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.filter(
lambda x: math_ops.reduce_all(math_ops.equal(x, components)))
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([3], dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.map(lambda x: array_ops.stack([x, x]))
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([2, 3], dataset_ops.get_legacy_output_shapes(dataset))
dataset = dataset.flat_map(
lambda x: dataset_ops.Dataset.from_tensor_slices(x))
self.assertEqual(dtypes.int64,
dataset_ops.get_legacy_output_types(dataset))
self.assertEqual([3], dataset_ops.get_legacy_output_shapes(dataset))
get_next = self.getNext(dataset)
self.assertEqual(dtypes.int64, get_next().dtype)
self.assertEqual([3], get_next().shape)
# TODO(b/121264236): needs mechanism for multiple device in eager mode.
def testSkipEagerSplitPipelineFailsWithPlacementError(self):
with session.Session(
target="",
config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
dataset = dataset_ops.Dataset.from_tensors(0)
# Define a pipeline that attempts to use variables on two
# different devices.
#
# Initialize the variables before creating to iterator, to avoid the
# placement algorithm overriding the DT_RESOURCE colocation constraints.
with ops.device("/cpu:0"):
var_0 = resource_variable_ops.ResourceVariable(initial_value=0)
dataset = dataset.map(lambda x: x + var_0.read_value())
sess.run(var_0.initializer)
with ops.device("/cpu:1"):
var_1 = resource_variable_ops.ResourceVariable(initial_value=0)
dataset = dataset.map(lambda x: x + var_1.read_value())
sess.run(var_1.initializer)
iterator = dataset_ops.make_initializable_iterator(dataset)
sess.run(iterator.initializer)
with self.assertRaisesRegexp(
errors.FailedPreconditionError,
"Error while reading resource variable Variable"):
sess.run(iterator.get_next())
if __name__ == "__main__":
test.main()
| |
from pyparsing import *
from itertools import *
import os
import logging
#~ Local libraries
from syntax.syntax import Syntax
from queries.cq import Variable
from queries.ucq import UCQ
class ECQ:
#~ The string expressions used to build an ECQ can be:
#~ - "(not ECQ)"
#~ - "(exists (vars) ECQ)"
#~ - "(and ECQ ... ECQ)"
#~ - "(mko-eq var1 var2)"
#~ - "(mko UCQ)"
#~ - "(:True)"
#~ The equivalent expressed as a list (or tuple) are:
#~ - ["not", ECQ]
#~ - ["exists", [vars], ECQ]
#~ - ["and", ECQ, ... , ECQ]
#~ - ["mko-eq", var1, var2]
#~ - ["mko", UCQ]
#~ - self.__trueQuery = True
def __init__(self, queryToParse, conceptList, roleList, individualList):
self.__ecqs = set()
self.__ucq = None
self.__freeVars = set()
self.__existentialVars = set()
self.__equalityVar1 = None # First of the two vars comprising the equality assertion of the type "(mko-eq ?x ?y)"
self.__equalityVar2 = None # Second of the two vars comprising the equality assertion of the type "(mko-eq ?x ?y)"
self.__negated = False
self.__trueQuery = False # Represents the fact that the query is simply True, thus it is a boolean query that always evaluates to True
self.__trueQueryElement = None # Represent the individual used to create the ADL translation of the query (:True)
#~ Check the input
self.__checkInput(queryToParse, conceptList, roleList, individualList)
#~ Parse the input
self.__parseQuery(queryToParse, conceptList, roleList, individualList)
#~ Select self.__trueQueryElement
for ind in individualList:
self.__trueQueryElement = ind
break
def __checkInput(self, queryToParse, conceptList, roleList, individualList):
if not isinstance(queryToParse, (str, ParseResults, tuple, list)):
#~ Formato non valido
raise Exception("The provided CQ is not in a valid format: " + str(type(queryToParse)) + "\n" + str(queryToParse))
#~ conceptList, roleList, and individualList, if not None, must be lists of strings
if not (isinstance(conceptList, (list, tuple, set)) and \
all(isinstance(concept, str) for concept in conceptList)):
raise Exception("The concept list provided is not valid. It must be a list of strings.")
if not (isinstance(roleList, (list, tuple, set)) and \
all(isinstance(role, str) for role in roleList)):
raise Exception("The role list provided is not valid. It must be a list of strings.")
if not (isinstance(individualList, (list, tuple, set)) and \
all(isinstance(individual, str) for individual in individualList)):
raise Exception("The individual list provided is not valid. It must be a list of strings.")
def __parseQuery(self, queryToParse, conceptList, roleList, individualList):
syntax = Syntax()
#~ If queryToParse is a string, we parse it by considering the parenthesis that close every expression
#~ and analyse the resulting pyparsing.ParseResults.
#~ If queryToParse is already a pyparsing.ParseResults, then we analise it directly.
result = None
if isinstance(queryToParse, str):
parser = StringStart() + nestedExpr() + StringEnd()
result = parser.parseString(queryToParse)
#~ We consider only the first element of result, since, if it is a valid list,
#~ then result is a nested list, and thus result[0] is the actual list we
#~ are interested in.
result = result[0]
elif isinstance(queryToParse, (ParseResults, tuple, list)):
result = queryToParse
#~ The expressions used to build an ECQ can be:
#~ - "(not ECQ)"
#~ - "(exists (vars) ECQ)"
#~ - "(and ECQ list)"
#~ - "(mko-eq var1 var2)"
#~ - "(mko UCQ)"
#~ - ":True"
#~ We check the first element (result[0]),
#~ which has to be a special word identifying which type of expression is used
if result[0] == syntax.true:
#~ The expression must be "(:True)", thus result must contain exactly 1 element
if len(result) != 1:
raise Exception("The expression \"(" + syntax.true + ")\" must contain only the keyword " + syntax.true + ".")
#~ Set the current ECQ as True
self.__trueQuery = True
elif result[0] == syntax.neg:
#~ The expression must be "(not ECQ)", thus result must contain exactly 2 elements
if len(result) != 2:
raise Exception("The expression \"(not ECQ )\" must contain only one ECQ.")
self.__ecqs.add(ECQ(result[1], conceptList, roleList, individualList))
#~ Set the current ECQ as negated
self.__negated = True
elif result[0] == syntax.exists:
#~ The element result[1] must contain the existential variables,
#~ while result[2] the inner ECQ.
#~ result must contain exactly 3 elements
if len(result) != 3:
raise Exception("The expression \"(exists (Vars) (ECQ) )\" must contain only a set of existential variables and one ECQ.")
self.__ecqs.add(ECQ(result[2], conceptList, roleList, individualList)) # Add the ECQ
#~ Check that variables are syntactically valid (i.e. "?"+ a valid string)
#~ and add them to self.__existentialVars
varName = (StringStart() + syntax.variable.setResultsName("varName") + StringEnd()).leaveWhitespace()
for varString in result[1]:
var = Variable(varName.parseString(varString)[0])
#~ Check that var is a free variable in the inner ECQ, otherwise raise an Exception
for ecq in self.__ecqs:
if not (var in ecq.freeVars()):
raise Exception("The variable " + str(var) + " does not appear in the inner ECQ.")
self.__existentialVars.add(var)
elif result[0] == syntax.queryAnd:
#~ The list must contain "and" plus at least two inner ECQs,
#~ starting from element result[1].
if len(result) <= 2:
raise Exception("The expression \"(and ... )\" must contain at least 2 internal ECQs.")
for counter in range(1,len(result)):
self.__ecqs.add(ECQ(result[counter], conceptList, roleList, individualList))
#~ Check that all the inner ECQs have the same free variables
#~ for check in combinations(self.__ecqs, 2):
#~ print(check)
#~ if check[0].freeVars() != check[1].freeVars():
#~ raise Exception("The ECQs have different free variables in them!\n"+str(check[0])+"\n"+str(check[1]))
elif result[0] == syntax.mkoEq:
print(queryToParse)
print(result)
#~ This represent an equality assertion. It must contain exactly 2 variable
#~ (thus, considering 'mko-eq', the lenght can't be more than 3).
if len(result) != 3:
raise Exception("An equality assertion must contain exactly 2 variables. " + str(len(result[1:])) + " provided instead: " + str(result[1:]))
#~ Check that variables are syntactically valid (i.e. "?"+ a valid string)
#~ and add them to self.__existentialVars
varName = (StringStart() + syntax.variable.setResultsName("varName") + StringEnd()).leaveWhitespace()
print(varName.parseString(result[1]))
self.__equalityVar1 = Variable(varName.parseString(result[1])[0])
self.__equalityVar2 = Variable(varName.parseString(result[2])[0])
#~ The variables have to be different from each other, we don't
#~ accept an equality of the type (= ?x ?x).
#~ If this is the case, we raise an Exception.
if self.__equalityVar1 == self.__equalityVar2:
raise Exception("The variables provided in the equality statement are the same. Please adjust/remove the atom: (= ?" + \
str(result[1]) + " ?" + str(result[2]) + ")")
elif result[0] == syntax.mko:
#~ This represent an UCQ inside a minimal knowledge operator.
#~ An UCQ must be contained between parenthesis ( ), like:
#~ (mko (...))
#~ even if the UCQ is a single atom, so result is like:
#~ ['mko', [...]]
#~ thus result can't contain more than two elements.
if len(result)>2:
raise Exception("A minimal knowledge operator axiom should contain only an UCQ between parenthesys \"()\".")
self.__ucq = UCQ(result[1], conceptList, roleList, individualList)
else:
raise Exception("No valid word found: " + str(result[0]))
#~ Find the freeVars of the current ECQ.
#~ To do so, we collect all the free vars appearing in the inner ECQs,
#~ or in the UCQ, and remove the one that are set as existential
#~ in the current ECQ.
#~ If the current ECQ is an equality, we consider the variables involved
#~ as free variables.
for ecq in self.__ecqs:
self.__freeVars.update(ecq.freeVars())
if not self.__ucq is None:
self.__freeVars.update(self.__ucq.freeVars())
if isinstance(self.__equalityVar1, Variable):
self.__freeVars.add(self.__equalityVar1)
if isinstance(self.__equalityVar2, Variable):
self.__freeVars.add(self.__equalityVar2)
self.__freeVars = self.__freeVars.difference(self.__existentialVars)
def freeVars(self):
#~ Return the free vars appearing in the current ECQ.
return self.__freeVars
def existentialVars(self):
#~ Return the existential vars appearing in the current ECQ.
return self.__existentialVars
def equalityVar1(self):
#~ Return __equalityVar1
return self.__equalityVar1
def equalityVar2(self):
#~ Return __equalityVar2
return self.__equalityVar2
def terms(self):
#~ Returns the terms used in the atoms of the ECQ
terms = set()
for ecq in self.__ecqs:
terms.update(ecq.terms())
terms.update(self.__ucq.terms())
return terms
def concepts(self):
#~ Returns the concepts used in the atoms
concepts = set()
for ecq in self.__ecqs:
concepts.update(ecq.concepts())
if not self.__ucq is None:
concepts.update(self.__ucq.concepts())
return concepts
def roles(self):
#~ Returns the roles used in the atoms
roles = set()
for ecq in self.__ecqs:
roles.update(ecq.roles())
if not self.__ucq is None:
roles.update(self.__ucq.roles())
return roles
def isNegated(self):
return self.__negated
def isTrue(self):
return self.__trueQuery
def ecqs(self):
return self.__ecqs
def ucq(self):
return self.__ucq
def __repr__(self):
ecqStr = ""
if self.__trueQuery:
ecqStr = "( True )"
elif self.__negated:
ecqStr = "(not"
for ecq in self.__ecqs:
ecqStr += " " + str(ecq)
ecqStr += " )"
elif len(self.__existentialVars ) > 0:
ecqStr = "(exists ("
for var in self.__existentialVars:
ecqStr += " " + str(var)
ecqStr += " ) "
for ecq in self.__ecqs:
ecqStr += " " + str(ecq)
ecqStr += " )"
elif len(self.__ecqs ) > 0:
ecqStr = "(and"
for ecq in self.__ecqs:
ecqStr += " " + str(ecq)
ecqStr += " )"
elif not self.__ucq is None:
ecqStr += "(mko " + str(self.__ucq) + " )"
elif isinstance(self.__equalityVar1, Variable) and isinstance(self.__equalityVar2, Variable):
ecqStr = "(mko-eq " + str(self.__equalityVar1) + " " + str(self.__equalityVar2) + " )"
return ecqStr
def toDL(self):
ecqStr = ""
if self.__trueQuery:
ecqStr = "True"
elif self.__negated:
ecqStr = "not"
for ecq in self.__ecqs:
ecqStr += " " + ecq.toDL()
ecqStr += " "
elif len(self.__existentialVars ) > 0:
ecqStr = "exists "
for var in self.__existentialVars:
ecqStr += str(var) + ","
ecqStr = ecqStr[:-1] + "."
for ecq in self.__ecqs:
ecqStr += " " + ecq.toDL()
ecqStr += " "
elif len(self.__ecqs ) > 0:
ecqStr = "("
for ecq in self.__ecqs:
ecqStr += " " + ecq.toDL() + " and "
ecqStr = ecqStr[:-5] + " )"
elif not self.__ucq is None:
ecqStr += "[ " + self.__ucq.toDL() + " ]"
elif isinstance(self.__equalityVar1, Variable) and isinstance(self.__equalityVar2, Variable):
ecqStr = "[ " + str(self.__equalityVar1) + " = " + str(self.__equalityVar2) + " ]"
return ecqStr
def toADL(self, indentLevel = 0):
if not isinstance(indentLevel, int) or indentLevel < 0:
raise Exception("The parameter indentLevel must be a positive integer.")
def __indent(indentLevel):
syntax = Syntax()
return syntax.indent*indentLevel
ecqStr = ""
if self.__trueQuery:
#~ In ADL we have to translate a query with a formula that is always satisfied.
#~ To achieve this, we select a random individual, and create an equality (= ind ind)
ecqStr = __indent(indentLevel) + "(= {0} {1})\n".format(self.__trueQueryElement,self.__trueQueryElement)
elif self.__negated:
ecqStr = __indent(indentLevel) + "(not\n"
for ecq in self.__ecqs:
ecqStr += ecq.toADL(indentLevel+1)
ecqStr += __indent(indentLevel) + ")\n"
elif len(self.__existentialVars ) > 0:
ecqStr = __indent(indentLevel) + "(exists ("
for var in self.__existentialVars:
ecqStr += " " + str(var)
ecqStr += " )\n"
for ecq in self.__ecqs:
ecqStr += ecq.toADL(indentLevel+1)
ecqStr += __indent(indentLevel) + ")\n"
elif len(self.__ecqs ) > 0:
ecqStr = __indent(indentLevel) + "(and\n"
for ecq in self.__ecqs:
ecqStr += ecq.toADL(indentLevel+1)
ecqStr += __indent(indentLevel) + ")\n"
elif not self.__ucq is None:
ecqStr += self.__ucq.toADL(indentLevel)
elif isinstance(self.__equalityVar1, Variable) and isinstance(self.__equalityVar2, Variable):
ecqStr = __indent(indentLevel) + "(=" + str(self.__equalityVar1) + " " + str(self.__equalityVar2) + " )\n"
return ecqStr
def toSQL(self, indentLevel = 0, state = 0, additionalVarsEqualities = {}, substitutions = {}):
#~ additionalVarsEqualities represent a dictionary in which are stored additional
#~ WHERE statements for variables appearing in the query (among the free variables).
#~ Such statements are used to link a query which appears inside another, and
#~ which results affect the external ones (e.g. in the case of NOT(ecq)).
#~ Example of an entry of additionalVarsEqualities:
#~ additionalVarsEqualities["x"] = "externalTable.column
#~ substitutions is a dictionary containing additional WHERE statements
#~ for variables appearing in the query.
#~ Such statements are used to ground the values that a variable can assume,
#~ and usually comes from the execution of a condition-action rule, which
#~ returns some results, and the action's parameters must be grounded with them.
if not isinstance(indentLevel, int) or indentLevel < 0:
raise Exception("The parameter indentLevel must be a positive integer.")
if not isinstance(state, int) or state < 0:
raise Exception("The parameter state must be a positive integer.")
if not isinstance(additionalVarsEqualities, dict):
raise Exception("The parameter additionalVarsEqualities must be a dictionary.")
logger = logging.getLogger("sqlPlanner")
logger.info("ecq.toSQL")
logger.info("Query: " + str(self))
logger.info(str(self.__negated))
logger.info(str(additionalVarsEqualities))
logger.info(str(substitutions))
logger.info(" ")
def __indent(indentLevel):
syntax = Syntax()
return syntax.indent*indentLevel
ecqString = ""
if self.__trueQuery:
#~ In SQL we have to translate a query with a query that return always 1
ecqStr = __indent(indentLevel) + "SELECT COUNT(*) > 0 AS booleanValue FROM _domain"
return ecqStr
elif self.__negated:
#~ The negation of an ECQ is translated by considering all possible
#~ combinations of individuals for the free variables of the ECQ,
#~ and removing the combinations that appear in the inner ECQ for the
#~ same variables.
#~ We achieve this by using NOT EXISTS and pushing in the inner ECQ
#~ some additional equalities (through the dictionary additionalVarsEqualities)
#~ in order to link the external and internalj queries.
#~ If the inner ECQ is again a negation, than we remove both of them,
#~ as it wouldn't affect the results.
for ecq in self.__ecqs:
if ecq.isNegated():
for innerEcq in ecq.ecqs():
return innerEcq.toSQL(indentLevel, state = state, substitutions = substitutions)
#~ If there are no free variables, then we have a boolean query,
#~ and this means that the internal ECQ is boolean as well.
#~ To return the negated result, we use the formula
#~ IF(innerECQs.booleanValue = 1, 0, 1) as booleanValue
if len(self.__freeVars) == 0:
ecqString = __indent(indentLevel) + "SELECT IF(innerECQ.booleanValue = 1, 0, 1) AS booleanValue\n"
ecqString += __indent(indentLevel) + "FROM (\n"
ecqString += "".join([ecq.toSQL(indentLevel+1, state = state, substitutions = substitutions) for ecq in self.__ecqs])
ecqString += "\n" + __indent(indentLevel) + ") innerECQ\n"
else:
#~ Define, for each free variable that appears in the query,
#~ a link to the table _domain.
#~ As these links will also be used to create the dictionary
#~ additionalVarsEqualities to be passed in the inner ECQ, we must
#~ create a unique naming of the tables (if we have two nested negations
#~ both using the same names for the tables, it could pose a problem).
#~ We achieve this by using the nesting level.
#~ We need this both the SELECT statement (we use aliases for the
#~ variables, e.g. SELECT _domain.individual as x) and FROM (as we
#~ need a different _domain table for each variable).
counter = 0
newAdditionalVarsEqualities = {}
aliases = {}
for variable in self.__freeVars:
aliases[variable.toSQL()] = "dom{0}_{1}".format(str(counter),str(indentLevel))
newAdditionalVarsEqualities[variable.toSQL()] = aliases[variable.toSQL()] + ".individual"
counter += 1
ecqString = __indent(indentLevel) + "SELECT DISTINCT " + \
(",\n"+__indent(indentLevel+1)).join(["{0}.individual AS {1}".format(aliases[var], var) for var in aliases.keys()]) + "\n" + \
__indent(indentLevel) + "FROM " + \
(",\n"+__indent(indentLevel+1)).join(["_domain {0}".format(aliases[var]) for var in aliases.keys()]) + "\n" + \
__indent(indentLevel) + "WHERE NOT EXISTS (\n" + \
"".join([innerECQ.toSQL(indentLevel+1, state = state, additionalVarsEqualities=newAdditionalVarsEqualities, substitutions = substitutions) for innerECQ in self.__ecqs]) + "\n" + \
__indent(indentLevel) + ")\n"
logger.info(" ")
#~ We add the statements from additionalVarsEqualities
for var in additionalVarsEqualities.keys():
ecqString += __indent(indentLevel) + "AND {0} = {1}\n".format(newAdditionalVarsEqualities[var], additionalVarsEqualities[var])
return ecqString
elif len(self.__existentialVars ) > 0:
#~ An ECQ that uses some existential variables can be translated
#~ in the following way:
#~ SELECT innerECQ.freeVariables
#~ FROM ( inner ECQ tranlsated to SQL) innerECQ
#~ If there are no free variables, then we have a boolean query
#~ and we simply count whether the internal ECQ returns any result.
if len(self.__freeVars) == 0:
ecqString = __indent(indentLevel) + "SELECT COUNT(*) > 0 AS booleanValue\n"
ecqString += __indent(indentLevel) + "FROM (\n"
ecqString += "".join([ecq.toSQL(indentLevel+1, state = state, substitutions = substitutions) for ecq in self.__ecqs])
ecqString += "\n" + __indent(indentLevel) + ") innerECQs\n"
else:
#~ First, we generate the aliases for each variable.
#~ We need this in case there are elements in additionalVarsEqualities
alias = {}
for var in self.__freeVars:
alias[var.toSQL()] = "innerECQ." + var.toSQL()
ecqString = __indent(indentLevel) + "SELECT DISTINCT " + ",".join(["{0} AS {1}".format(alias[var], var) for var in alias.keys()]) + "\n"
ecqString += __indent(indentLevel) + "FROM (\n"
ecqString += "".join([ecq.toSQL(indentLevel+1, state = state, substitutions = substitutions) for ecq in self.__ecqs])
ecqString += "\n" + __indent(indentLevel) + ") innerECQs\n"
#~ We add the statements from additionalVarsEqualities and substitutions
if len(additionalVarsEqualities) > 0:
ecqString += __indent(indentLevel) + "WHERE " + \
(__indent(indentLevel) + "AND ").join(["{0} = {1}\n".format(alias[var], additionalVarsEqualities[var]) for var in additionalVarsEqualities.keys()])
elif len(self.__ecqs ) > 0:
#~ Conjunction of ECQs is done by translating the first ECQ to SQL, and
#~ requiring its results to exist in the result set of the other ECQs.
#~ To achieve this, we use EXISTS
#~ If the ECQ is boolean (thus len(self.__freeVars) == 0),
#~ we need to create a special SQL query.
#~ All the ECQs in self.__ecqs are boolean, and their translations
#~ yeald SQL queries that return either 1 or 0 (to be interpreted as
#~ boolean values). If at least one ECQ returns 1 (True), then the
#~ whole ECQ must return 1, 0 otherwise.
#~ To do this we encapsulate the internal ECQs in a nested query,
#~ while the external one takes care of checking if the nested one
#~ contains at least one 1. Example:
#~ SELECT COUNT(*) > 0 AS booleanValue
#~ FROM ( inner ECQs SQL query) innerECQs
#~ WHERE innerECQs.booleanValue = 1
if len(self.__freeVars) == 0:
ecqString = __indent(indentLevel) + "SELECT COUNT(*) > 0 AS booleanValue\n"
ecqString += __indent(indentLevel) + "FROM (\n"
ecqString += str("\n\n" + __indent(indentLevel+1) + "UNION\n\n").join([ecq.toSQL(indentLevel+1, state = state, substitutions = substitutions) for ecq in self.__ecqs])
ecqString += "\n" + __indent(indentLevel) + ") innerECQs\n"
ecqString += "\n" + __indent(indentLevel) + "WHERE innerECQs.booleanValue = 1\n"
else:
#~ ecqString = str("\n\n" + __indent(indentLevel) + "INTERSECT\n\n").join([ecq.toSQL(indentLevel, state = state, additionalVarsEqualities=additionalVarsEqualities, substitutions = substitutions) for ecq in self.__ecqs])
#~ As these links will also be used to create the dictionary
#~ additionalVarsEqualities to be passed in the inner ECQ, we must
#~ create a unique naming of the tables (if we have two nested negations
#~ both using the same names for the tables, it could pose a problem).
#~ We achieve this by using the nesting level.
#~ We need this both the SELECT statement (we use aliases for the
#~ variables, e.g. SELECT _domain.individual as x) and FROM (as we
#~ need a different _domain table for each variable).
counter = 0
newAdditionalVarsEqualities = {}
aliases = {}
for variable in self.__freeVars:
aliases[variable.toSQL()] = "dom{0}_{1}".format(str(counter),str(indentLevel))
newAdditionalVarsEqualities[variable.toSQL()] = aliases[variable.toSQL()] + ".individual"
counter += 1
whereStatements = set()
whereStatements.update(["EXISTS (\n" + \
innerECQ.toSQL(indentLevel+1, state = state, additionalVarsEqualities=newAdditionalVarsEqualities, substitutions = substitutions) + \
"\n" + __indent(indentLevel) + ")" for innerECQ in self.__ecqs])
ecqString = __indent(indentLevel) + "SELECT DISTINCT " + \
(",\n"+__indent(indentLevel+1)).join(["{0}.individual AS {1}".format(aliases[var], var) for var in aliases.keys()]) + "\n" + \
__indent(indentLevel) + "FROM " + \
(",\n"+__indent(indentLevel+1)).join(["_domain {0}".format(aliases[var]) for var in aliases.keys()]) + "\n" + \
__indent(indentLevel) + "WHERE " + ("\n" + __indent(indentLevel) + "AND ").join(whereStatements)
#~ We add the statements from additionalVarsEqualities
#~ for var in additionalVarsEqualities.keys():
#~ ecqString += __indent(indentLevel) + "AND {0} = {1}\n".format(newAdditionalVarsEqualities[var], additionalVarsEqualities[var])
#~ We add the statements from additionalVarsEqualities and substitutions
if len(additionalVarsEqualities) > 0:
ecqString += __indent(indentLevel) + "AND " + \
(__indent(indentLevel) + "AND ").join(["{0} = {1}\n".format(newAdditionalVarsEqualities[var], additionalVarsEqualities[var]) for var in additionalVarsEqualities.keys()])
return ecqString
#~ return ecqString
---------------------------------------------------
#~ Create a copy of the inner ECQs list and add an alias for each of them.
#~ To create an alias, we just add a counter at the end of the name "innerECQ".
#~ We do not consider equalities.
innerECQAliases = dict()
counter = 0
for innerECQ in self.__ecqs:
innerECQAliases[("innerECQ"+str(counter))] = innerECQ
counter += 1
#~ Define, for each variable that appears in the query (both free and
#~ existential), to which innerECQ they are linked.
#~ We need this both for the SELECT statement (we use aliases for the
#~ variables, e.g. SELECT innerECQ1.x as x), and for inner equalities
#~ (e.g. we translate x = y in innerECQ1.x != innerECQ3.y)
varsStatement = dict()
for (variable, alias) in product(self.__freeVars, innerECQAliases.keys()):
if variable in innerECQAliases[alias].freeVars() or \
variable in innerECQAliases[alias].existentialVars():
#~ The variable appears in free vars of this query atom
varsStatement[variable.toSQL()] = alias + "." + variable.toSQL()
#~ Pass to the next variable
break
for (variable, alias) in product(self.__existentialVars, innerECQAliases.keys()):
if variable in innerECQAliases[alias].freeVars() or \
variable in innerECQAliases[alias].existentialVars():
#~ The variable appears in free vars of this query atom
varsStatement[variable.toSQL()] = alias + "." + variable.toSQL()
#~ Pass to the next variable
break
#~ Definition of the SELECT section of the SQL query
if len(self.__freeVars) > 0:
#~ The query contains free variables, thus the SELECT section
#~ of the SQL query is composed by all the variables appearing
#~ as aliases of the respective tables and columns
sqlSelect = __indent(indentLevel) + "SELECT DISTINCT " + \
str(",\n" + __indent(indentLevel+1)).join([(varsStatement[var.toSQL()] + " AS " + var.toSQL()) for var in self.__freeVars])
else:
#~ The query is boolean, thus there are no free variables.
#~ To represent this in SQL, we make the SQL query
#~ return either 1 or 0, depending if there are results or not.
#~ To achieve this, we use the function COUNT(*) > 0, and
#~ give it the alias of "booleanValue"
sqlSelect = __indent(indentLevel) + "SELECT IF(COUNT(*) > 0, 0, 1) AS booleanValue"
#~ sqlSelect = __indent(indentLevel) + "SELECT DISTINCT COUNT(*) > 0 AS booleanValue"
#~ The FROM section of the SQL query is composed by all the
#~ query atoms terms and their aliases, i.e.:
#~ FROM (ECQ1) Alias1,
#~ (ECQ2) Alias2,
#~ (ECQ3) Alias3,
#~ ...
sqlFrom = __indent(indentLevel) + "FROM " + \
str(",\n" + __indent(indentLevel+1)).join( \
[("(\n" + \
innerECQAliases[alias].toSQL(indentLevel+2, state = state, substitutions = substitutions) + \
"\n" + __indent(indentLevel+1) + ") " + str(alias)) for alias in innerECQAliases.keys()] \
)
#~ The WHERE and AND section of the SQL query is done by:
#~ - checking whether each combination of internal ecqs (we do not
#~ consider couples of the same ecq) share one of the free variables.
#~ In such a case we generate a properly built equality.
whereStatements = set()
for var in additionalVarsEqualities.keys():
#~ The equality regards a variable (e.g., a parameter of an action)
#~ that appears in two SQL queries (one external, and one internal)
#~ and has to be linked in these queries through the aliases used for
#~ that variable in the queries.
whereStatements.add("{0} = {1}".format(varsStatement[var], str(additionalVarsEqualities[var])) )
for (alias1, alias2) in combinations(innerECQAliases.keys(),2):
#~ Check if the two internal ecqs share a variable.
for var in self.__freeVars:
if var in innerECQAliases[alias1].freeVars() and var in innerECQAliases[alias2].freeVars():
#~ They share a variable.
#~ We generate the equality:
#~ alias1.var = alias2.var
whereStatements.add("{1}.{0} = {2}.{0}".format(var.toSQL(), alias1, alias2) )
for var in self.__existentialVars:
if var in innerECQAliases[alias1].freeVars() and var in innerECQAliases[alias2].freeVars():
#~ They share a variable.
#~ We generate the equality:
#~ alias1.var = alias2.var
whereStatements.add("{1}.{0} = {2}.{0}".format(var.toSQL(), alias1, alias2) )
#~ If the query is boolean, then the internal ecqs are boolean as well.
#~ We force the internal ecqs to return results only if it is 0 (thus, they are False),
#~ so that the query can count if any internal ecq is False, and thus return False (as
#~ it is a conjunction)
if len(self.__freeVars) == 0:
for alias in innerECQAliases.keys():
whereStatements.add("{0}.booleanValue = 0".format(alias))
#~ Create the WHERE statement
sqlWhere = __indent(indentLevel) + "WHERE " + str("\n" + __indent(indentLevel) + "AND ").join(whereStatements)
return sqlSelect + "\n" + sqlFrom + "\n" + sqlWhere
elif not self.__ucq is None:
ecqString = __indent(indentLevel) + "SELECT DISTINCT " + \
(",\n"+__indent(indentLevel+1)).join(["innerUCQ.{0} AS {1}".format(var.toSQL(),var.toSQL()) for var in self.__freeVars]) + "\n" + \
__indent(indentLevel) + "FROM (\n" + \
self.__ucq.toSQL(indentLevel = indentLevel+1, state = state, substitutions = substitutions) + \
"\n" + __indent(indentLevel) + ") innerUCQ"
#~ We add the statements from additionalVarsEqualities
#~ if len(additionalVarsEqualities) > 0:
#~ ecqString += __indent(indentLevel) + "WHERE " + \
#~ (__indent(indentLevel) + "AND ").join(["innerUCQ.{0} = {1}\n".format(var, additionalVarsEqualities[var]) for var in additionalVarsEqualities.keys()])
#~ We add the statements from additionalVarsEqualities and substitutions
if len(additionalVarsEqualities) > 0:
ecqString += "\n" + __indent(indentLevel) + "WHERE " + \
(__indent(indentLevel) + "AND ").join(["innerUCQ.{0} = {1}\n".format(var, additionalVarsEqualities[var]) for var in additionalVarsEqualities.keys()])
elif isinstance(self.__equalityVar1, Variable) and isinstance(self.__equalityVar2, Variable):
ecqString = __indent(indentLevel) + "SELECT DISTINCT dom1.individual AS {0},\n".format(self.__equalityVar1.toSQL()) + \
__indent(indentLevel+1) + "dom2.individual as {0}\n".format(self.__equalityVar2.toSQL()) + \
__indent(indentLevel) + "FROM _domain dom1, _domain dom2\n" + \
__indent(indentLevel) + "WHERE dom1.individual = dom2.individual"
if len(additionalVarsEqualities) > 0:
#~ We add the statements from additionalVarsEqualities
#~ Since the inequality only contains two variables, then additionalVarsEqualities
#~ can contain only two keys.
if len(additionalVarsEqualities) != 2:
raise Exception("Something is wrong. The dictionary additionalVarsEqualities does not contain 2 values, but {0}.\n".format(len(additionalVarsEqualities)) + \
"This is not possible since it is passed to the equality: {0}".format(self.toDL()))
ecqString += "\n" + __indent(indentLevel) + "AND dom1.individual = {0}\n".format(additionalVarsEqualities[self.__equalityVar1.toSQL()])
ecqString += __indent(indentLevel) + "AND dom2.individual = {0}\n".format(additionalVarsEqualities[self.__equalityVar2.toSQL()])
#~ We add the statements from substitutions
if len(substitutions) > 0:
#~ We need to check if the variables of the eqaulity appears in the substitutions,
#~ because it could be the case that they are not involved.
if self.__equalityVar1.toSQL() in substitutions.keys():
ecqString += "\n" + __indent(indentLevel) + "AND dom1.individual = {0}\n".format(substitutions[self.__equalityVar1.toSQL()])
if self.__equalityVar2.toSQL() in substitutions.keys():
ecqString += __indent(indentLevel) + "AND dom2.individual = {0}\n".format(substitutions[self.__equalityVar2.toSQL()])
return ecqString
| |
########################################
# A brief introduction to numpy arrays #
########################################
#
# Prereqs: Basic python. "import", built-in data types (numbers, lists,
# strings), range
#
# This short tutorial is mostly about introducing numpy arrays, how they're
# different from basic python lists/tuples, and the various ways you can
# manipulate them. It's intended to be both a runnable python script, and
# a step by step tutorial.
#
# This tutorial does NOT cover
# 1) Installing numpy/dependencies. For that see
#
# http://docs.scipy.org/doc/numpy/user/install.html
#
# 2) Basic python. This includes getting, installing, running the python
# interpreter, the basic python data types (strings, numbers, sequences),
# if statements, or for loops. If you're new to python an excellent place
# to start is here:
#
# http://docs.python.org/2/tutorial/
#
# 3) Any numpy libraries in depth. It may include references to utility
# functions where necessary, but this is strictly a tutorial for
# beginners. More advanced documentation is available here:
#
# (Users guide)
# http://docs.scipy.org/doc/numpy/user/index.html
# (Reference documentation)
# http://docs.scipy.org/doc/numpy/reference/
#
#
#
#
## Lets get started!
print("Importing numpy")
import numpy as np
## This loads the numpy library and lets us refer to it by the shorthand "np",
## which is the convention used in the numpy documentation and in many
## online tutorials/examples
print("Creating arrays")
## Now lets make an array to play around with. You can make numpy arrays in
## a number of ways,
## Filled with zeros:
zeroArray = np.zeros( (2,3) ) # [[ 0. 0. 0.]
print(zeroArray) # [ 0. 0. 0.]]
## Or ones:
oneArray = np.ones( (2,3) ) # [[ 1. 1. 1.]
print(oneArray) # [ 1. 1. 1.]]
## Or filled with junk:
emptyArray = np.empty( (2,3) )
print(emptyArray)
## Note, emptyArray might look random, but it's just uninitialized which means
## you shouldn't count on it having any particular data in it, even random
## data! If you do want random data you can use random():
randomArray = np.random.random( (2,3) )
print(randomArray)
## If you're following along and trying these commands out, you should have
## noticed that making randomArray took a lot longer than emptyArray. That's
## because np.random.random(...) is actually using a random number generator
## to fill in each of the spots in the array with a randomly sampled number
## from 0 to 1.
## You can also create an array by hand:
foo = [ [1,2,3],
[4,5,6]]
myArray = np.array(foo) # [[1 2 3]
print(myArray) # [4 5 6]]
print("Reshaping arrays")
## Of course, if you're typing out a range for a larger matrix, it's easier to
## use arange(...):
rangeArray = np.arange(6,12).reshape( (2,3) ) # [[ 6 7 8]
print(rangeArray) # [ 9 10 11]]
## there's two things going on here. First, the arange(...) function returns a
## 1D array similar to what you'd get from using the built-in python function
## range(...) with the same arguments, except it returns a numpy array
## instead of a list.
print(np.arange(6,12)) # [ 6 7 8 9 10 11 12]
## the reshape method takes the data in an existing array, and stuffs it into
## an array with the given shape and returns it.
print(rangeArray.reshape( (3,2) )) # [[ 6 7]
# [ 8 9]
# [10 11]]
#The original array doesn't change though.
print(rangeArray) # [[ 6 7 8]
# [ 9 10 11]
## When you use reshape(...) the total number of things in the array must stay
## the same. So reshaping an array with 2 rows and 3 columns into one with
## 3 rows and 2 columns is fine, but 3x3 or 1x5 won't work
#print rangeArray.reshape( (3,3) ) #ERROR
squareArray = np.arange(1,10).reshape( (3,3) ) #this is fine, 9 elements
print("Accessing array elements")
## Accessing an array is also pretty straight forward. You access a specific
## spot in the table by referring to its row and column inside square braces
## after the array:
print(rangeArray[0,1]) #7
## Note that row and column numbers start from 0, not 1! Numpy also lets you
## refer to ranges inside an array:
print(rangeArray[0,0:2]) #[6 7]
print(squareArray[0:2,0:2]) #[[1 2] # the top left corner of squareArray
# [4 5]]
## These ranges work just like slices and python lists. n:m:t specifies a range
## that starts at n, and stops before m, in steps of size t. If any of these
## are left off, they're assumed to be the start, the end+1, and 1 respectively
print(squareArray[:,0:3:2]) #[[1 3] #skip the middle column
# [4 6]
# [7 9]]
## Also like python lists, you can assign values to specific positions, or
## ranges of values to slices
squareArray[0,:] = np.array(list(range(1,4))) #set the first row to 1,2,3
squareArray[1,1] = 0 # set the middle spot to zero
squareArray[2,:] = 1 # set the last row to ones
print(squareArray) # [[1 2 3]
# [4 0 6]
# [1 1 1]]
## Something new to numpy arrays is indexing using an array of indices:
fibIndices = np.array( [1, 1, 2, 3] )
randomRow = np.random.random( (10,1) ) # an array of 10 random numbers
print(randomRow)
print(randomRow[fibIndices]) # the first, first, second and third element of
# randomRow
## You can also use an array of true/false values to index:
boolIndices = np.array( [[ True, False, True],
[False, True, False],
[ True, False, True]] )
print(squareArray[boolIndices]) # a 1D array with the selected values
# [1 3 0 1 1]
## It gets a little more complicated with 2D (and higher) arrays. You need
## two index arrays for a 2D array:
rows = np.array( [[0,0],[2,2]] ) #get the corners of our square array
cols = np.array( [[0,2],[0,2]] )
print(squareArray[rows,cols]) #[[1 3]
# [1 1]]
boolRows = np.array( [False, True, False] ) # just the middle row
boolCols = np.array( [True, False, True] ) # Not the middle column
print(squareArray[boolRows,boolCols]) # [4 6]
print("Operations on arrays")
## One useful trick is to create a boolean matrix based on some test and use
## that as an index in order to get the elements of a matrix that pass the
## test:
sqAverage = np.average(squareArray) # average(...) returns the average of all
# the elements in the given array
betterThanAverage = squareArray > sqAverage
print(betterThanAverage) #[[False False True]
# [ True False True]
# [False False False]]
print(squareArray[betterThanAverage]) #[3 4 6]
## Indexing like this can also be used to assign values to elements of the
## array. This is particularly useful if you want to filter an array, say by
## making sure that all of its values are above/below a certain threshold:
sqStdDev = np.std(squareArray) # std(...) returns the standard deviation of
# all the elements in the given array
clampedSqArray = np.array(squareArray.copy(), dtype=float)
# make a copy of squareArray that will
# be "clamped". It will only contain
# values within one standard deviation
# of the mean. Values that are too low
# or to high will be set to the min
# and max respectively. We set
# dtype=float because sqAverage
# and sqStdDev are floating point
# numbers, and we don't want to
# truncate them down to integers.
clampedSqArray[ (squareArray-sqAverage) > sqStdDev ] = sqAverage+sqStdDev
clampedSqArray[ (squareArray-sqAverage) < -sqStdDev ] = sqAverage-sqStdDev
print(clampedSqArray) # [[ 1. 2. 3. ]
# [ 3.90272394 0.31949828 3.90272394]
# [ 1. 1. 1. ]]
## Multiplying and dividing arrays by numbers does what you'd expect. It
## multiples/divides element-wise
print(squareArray * 2) # [[ 2 4 6]
# [ 8 0 12]
# [ 2 2 2]]
## Addition works similarly:
print(squareArray + np.ones( (3,3) )) #[[2 3 4]
# [5 1 7]
# [2 2 2]]
## Multiplying two arrays together (of the same size) is also element wise
print(squareArray * np.arange(1,10).reshape( (3,3) )) #[[ 1 4 9]
# [16 0 36]
# [ 7 8 9]]
## Unless you use the dot(...) function, which does matrix multiplication
## from linear algebra:
matA = np.array( [[1,2],[3,4]] )
matB = np.array( [[5,6],[7,8]] )
print(np.dot(matA,matB)) #[[19 22]
# [43 50]]
## And thats it! There's a lot more to the numpy library, and there are a few
## things I skipped over here, such as what happens when array dimensions
## don't line up when you're indexing or multiplying them together, so if
## you're interested, I strongly suggest you head over to the scipy wiki's
## numpy tutorial for a more in depth look at using numpy arrays:
##
## http://www.scipy.org/Tentative_NumPy_Tutorial
| |
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.shtiker.EffectsPage
from panda3d.core import Shader, TextNode, Vec4
from direct.gui.DirectGui import *
from direct.showbase import PythonUtil
from toontown.toonbase import TTLocalizer
from toontown.toontowngui.FrameColorPicker import FrameColorPicker
from BookElements import *
import ShtikerPage
PageMode = PythonUtil.Enum('Bloom, Ink, Invert, Sharpen')
normalColor = (1.0, 1.0, 1.0, 1.0)
clickColor = (0.8, 0.8, 0.0, 1.0)
rolloverColor = (0.15, 0.82, 1.0, 1.0)
diabledColor = (1.0, 0.98, 0.15, 1.0)
class EffectsPage(ShtikerPage.ShtikerPage):
def __init__(self):
ShtikerPage.ShtikerPage.__init__(self)
self.loaded = False
self.mode = PageMode.Bloom
self.tabPages = []
def load(self):
if self.loaded:
return
else:
gui = loader.loadModel('phase_3.5/models/gui/fishingBook')
self.title = DirectLabel(parent=self, relief=None, text=TTLocalizer.EffectsPageTitle, text_scale=0.12, pos=(0, 0, 0.62))
self.tabs = [DirectButton(parent=self, relief=None, pos=(-0.75, 0, 0.775), text=TTLocalizer.EffectsBloom, text_scale=0.08, text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface1'), image_pos=(0.55, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[PageMode.Bloom]),
DirectButton(parent=self, relief=None, pos=(-0.33, 0, 0.775), text=TTLocalizer.EffectsInk, text_scale=0.07, text_pos=(-0.05, 0), text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface2'), image_pos=(0.12, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[PageMode.Ink]),
DirectButton(parent=self, relief=None, pos=(0.09, 0, 0.775), text=TTLocalizer.EffectsInvert, text_scale=0.07, text_pos=(-0.0275, 0), text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface3'), image_pos=(-0.28, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[PageMode.Invert]),
DirectButton(parent=self, relief=None, pos=(0.51, 0, 0.775), text=TTLocalizer.EffectsSharpen, text_scale=0.08, text_align=TextNode.ALeft, image=gui.find('**/tabs/polySurface3'), image_pos=(-0.28, 1, -0.91), image_hpr=(0, 0, -90), image_scale=(0.033, 0.033, 0.035), image_color=normalColor, image1_color=clickColor, image2_color=rolloverColor, image3_color=diabledColor, text_fg=Vec4(0.2, 0.1, 0, 1), command=self.setMode, extraArgs=[PageMode.Sharpen])]
self.tabPages = [BloomTabPage(self),
InkTabPage(self),
InvertTabPage(self),
SharpenTabPage(self)]
for page in self.tabPages:
page.hide()
gui.removeNode()
self.loaded = True
return
def enter(self):
self.setMode(self.mode, True)
self.show()
def exit(self):
for page in self.tabPages:
page.exit()
self.ignoreAll()
self.hide()
def unload(self):
if not self.loaded:
return
self.title.destroy()
for tab in self.tabs:
tab.destroy()
for page in self.tabPages:
page.unload()
del self.tabs
self.loaded = False
ShtikerPage.ShtikerPage.unload(self)
def setMode(self, mode, force = False):
if self.mode == mode and not force:
return
messenger.send('wakeup')
self.mode = mode
for i, tab in enumerate(self.tabs):
tab['state'] = DGG.DISABLED if i == mode else DGG.NORMAL
for i, page in enumerate(self.tabPages):
page.enter() if i == mode else page.exit()
self.title['text'] = self.tabs[mode]['text']
class ShaderTabPage(DirectFrame):
def __init__(self, parent = aspect2d):
DirectFrame.__init__(self, parent=parent, relief=None)
self.parent = parent
self.loaded = False
self.load()
return
def destroy(self):
self.parent = None
DirectFrame.destroy(self)
return
def load(self):
if self.loaded:
return
self.enabledLabel = Label(parent=self, row=0)
self.enabledButton = Button(parent=self, row=0, command=self.__toggleEnable)
self.loaded = True
return True
def unload(self):
if not self.loaded:
return
self.enabledLabel.destroy()
self.enabledButton.destroy()
del self.enabledLabel
del self.enabledButton
self.loaded = False
return True
def enter(self):
self.show()
self.__updateEnabled()
def exit(self):
self.hide()
def __updateEnabled(self):
if settings[self.shader].get('enabled', False):
self.enabledLabel['text'] = self.enabledText
self.enabledButton['text'] = TTLocalizer.OptionsPageToggleOff
else:
self.enabledLabel['text'] = self.disabledText
self.enabledButton['text'] = TTLocalizer.OptionsPageToggleOn
def __toggleEnable(self):
base.shaderMgr.setValue(self.shader, 'enabled', not settings[self.shader].get('enabled', False))
self.__updateEnabled()
class BloomTabPage(ShaderTabPage):
shader = 'bloom'
enabledText = TTLocalizer.EffectsBloomEnabled
disabledText = TTLocalizer.EffectsBloomDisabled
def load(self):
if not ShaderTabPage.load(self):
return
self.minLabel = Label(parent=self, row=1, text=TTLocalizer.EffectsBloomMinTrigger)
self.minSlider = Slider(parent=self, row=1, value=settings[self.shader].get('minTrigger', 0.6), range=(0.0, 1.0), command=self.__editMinTrigger)
self.maxLabel = Label(parent=self, row=2, text=TTLocalizer.EffectsBloomMaxTrigger)
self.maxSlider = Slider(parent=self, row=2, value=settings[self.shader].get('maxTrigger', 1.0), range=(0.0, 1.0), command=self.__editMaxTrigger)
self.desatLabel = Label(parent=self, row=3, text=TTLocalizer.EffectsBloomDesaturation)
self.desatSlider = Slider(parent=self, row=3, value=settings[self.shader].get('desaturation', 0.6), range=(0.0, 1.0), command=self.__editDesaturation)
self.intensityLabel = Label(parent=self, row=4, text=TTLocalizer.EffectsBloomIntensity)
self.intensitySlider = Slider(parent=self, row=4, value=settings[self.shader].get('intensity', 1.0), range=(0.0, 1.0), command=self.__editIntensity)
def unload(self):
if not ShaderTabPage.unload(self):
return
self.minLabel.destroy()
self.minSlider.destroy()
self.maxLabel.destroy()
self.maxSlider.destroy()
self.desatLabel.destroy()
self.desatSlider.destroy()
self.intensityLabel.destroy()
self.intensitySlider.destroy()
del self.minLabel
del self.minSlider
del self.maxLabel
del self.maxSlider
del self.desatLabel
del self.desatSlider
del self.intensityLabel
del self.intensitySlider
def exit(self):
ShaderTabPage.exit(self)
base.shaderMgr.reloadBloom()
def __editMinTrigger(self):
base.shaderMgr.setValue(self.shader, 'minTrigger', self.minSlider['value'])
def __editMaxTrigger(self):
base.shaderMgr.setValue(self.shader, 'maxTrigger', self.maxSlider['value'])
def __editDesaturation(self):
base.shaderMgr.setValue(self.shader, 'desaturation', self.desatSlider['value'])
def __editIntensity(self):
base.shaderMgr.setValue(self.shader, 'intensity', self.intensitySlider['value'])
class InkTabPage(ShaderTabPage):
shader = 'ink'
enabledText = TTLocalizer.EffectsInkEnabled
disabledText = TTLocalizer.EffectsInkDisabled
def load(self):
if not ShaderTabPage.load(self):
return
else:
self.colorPicker = None
self.widthLabel = Label(parent=self, row=1, text=TTLocalizer.EffectsInkWidth)
self.widthSlider = Slider(parent=self, row=1, value=settings[self.shader].get('width', 1.0), range=(0.3, 2.0), command=self.__editWidth)
self.colorLabel = Label(parent=self, row=2, text=TTLocalizer.EffectsInkColor)
self.colorButton = Button(parent=self, row=2, text=TTLocalizer.EffectsInkChoose, command=self.__openColor)
return
def unloadColorPicker(self):
if self.colorPicker:
self.colorPicker.destroy()
self.colorPicker = None
return
def unload(self):
if not ShaderTabPage.unload(self):
return
self.unloadColorPicker()
self.widthLabel.destroy()
self.widthSlider.destroy()
self.colorLabel.destroy()
self.colorButton.destroy()
del self.widthLabel
del self.widthSlider
del self.colorLabel
del self.colorButton
def exit(self):
ShaderTabPage.exit(self)
self.unloadColorPicker()
base.shaderMgr.reloadInk()
def __editWidth(self):
base.shaderMgr.setValue(self.shader, 'width', self.widthSlider['value'])
def __openColor(self):
if not self.colorPicker:
self.colorPicker = FrameColorPicker(0, 1, 0, 1, self.__editColor)
def __editColor(self, color):
if color:
base.shaderMgr.setValue(self.shader, 'color', color)
self.colorPicker = None
return
class InvertTabPage(ShaderTabPage):
shader = 'invert'
enabledText = TTLocalizer.EffectsInvertEnabled
disabledText = TTLocalizer.EffectsInvertDisabled
def exit(self):
ShaderTabPage.exit(self)
base.shaderMgr.reloadInvert()
class SharpenTabPage(ShaderTabPage):
shader = 'sharpen'
enabledText = TTLocalizer.EffectsSharpenEnabled
disabledText = TTLocalizer.EffectsSharpenDisabled
def load(self):
if not ShaderTabPage.load(self):
return
self.amountLabel = Label(parent=self, row=1, text=TTLocalizer.EffectsSharpenAmount)
self.amountSlider = Slider(parent=self, row=1, value=settings[self.shader].get('amount', 0.0), range=(-10, 10), command=self.__editAmount)
def unload(self):
if not ShaderTabPage.unload(self):
return
self.amountLabel.destroy()
self.amountSlider.destroy()
del self.amountLabel
del self.amountSlider
def exit(self):
ShaderTabPage.exit(self)
base.shaderMgr.reloadSharpen()
def __editAmount(self):
base.shaderMgr.setValue(self.shader, 'amount', self.amountSlider['value'])
| |
from __future__ import unicode_literals
import logging
from captcha.fields import CaptchaField
from django.conf import settings
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db import models
from django.shortcuts import render
from modelcluster.fields import ParentalKey
from modelcluster.tags import ClusterTaggableManager
from taggit.models import TaggedItemBase
from wagtail.contrib.wagtailroutablepage.models import RoutablePageMixin, route
from wagtail.wagtailadmin.edit_handlers import (FieldPanel, FieldRowPanel,
InlinePanel, MultiFieldPanel,
StreamFieldPanel)
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailcore.models import Page
from wagtail.wagtailforms.edit_handlers import FormSubmissionsPanel
from wagtail.wagtailforms.forms import FormBuilder
from wagtail.wagtailforms.models import AbstractEmailForm, AbstractFormField
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailsearch import index
from wagtail.wagtailsnippets.edit_handlers import SnippetChooserPanel
from .behaviours import WithContactFields, WithFeedImage, WithStreamField
from .snippets import WorkCategory
logger = logging.getLogger(__name__)
def _paginate(request, items):
# Pagination
page = request.GET.get('page', 1)
paginator = Paginator(items, settings.ITEMS_PER_PAGE)
try:
items = paginator.page(page)
except EmptyPage:
items = paginator.page(paginator.num_pages)
except PageNotAnInteger:
items = paginator.page(1)
return items
class HomePage(Page, WithStreamField):
search_fields = Page.search_fields + [
index.SearchField('body'),
]
subpage_types = [
'BlogIndexPage', 'FormPage', 'IndexPage', 'OrganisationIndexPage',
'PersonIndexPage', 'RichTextPage', 'WorkIndexPage',
'sup.PublicationIdeaPage'
]
HomePage.content_panels = [
FieldPanel('title', classname='full title'),
StreamFieldPanel('body'),
]
HomePage.promote_panels = Page.promote_panels
class IndexPage(Page, WithStreamField):
search_fields = Page.search_fields + [
index.SearchField('body'),
]
subpage_types = [
'FormPage', 'IndexPage', 'RichTextPage', 'sup.PublicationIdeaPage'
]
IndexPage.content_panels = [
FieldPanel('title', classname='full title'),
StreamFieldPanel('body'),
]
IndexPage.promote_panels = Page.promote_panels
class RichTextPage(Page, WithStreamField):
search_fields = Page.search_fields + [
index.SearchField('body'),
]
subpage_types = []
RichTextPage.content_panels = [
FieldPanel('title', classname='full title'),
StreamFieldPanel('body'),
]
RichTextPage.promote_panels = Page.promote_panels
class PersonIndexPage(Page, WithStreamField):
search_fields = Page.search_fields + [
index.SearchField('body'),
]
subpage_types = ['PersonPage']
PersonIndexPage.content_panels = [
FieldPanel('title', classname='full title'),
StreamFieldPanel('body'),
]
PersonIndexPage.promote_panels = Page.promote_panels
class PersonPage(Page, WithContactFields, WithFeedImage, WithStreamField):
subtitle = models.CharField(max_length=256)
first_name = models.CharField(max_length=256)
last_name = models.CharField(max_length=256)
intro = RichTextField(blank=True)
search_fields = Page.search_fields + [
index.SearchField('subtitle'),
index.SearchField('first_name'),
index.SearchField('last_name'),
index.SearchField('intro'),
index.SearchField('body'),
]
subpage_types = []
PersonPage.content_panels = [
FieldPanel('title', classname='full title'),
FieldPanel('subtitle', classname='full title'),
FieldPanel('first_name'),
FieldPanel('last_name'),
FieldPanel('intro', classname='full'),
StreamFieldPanel('body'),
MultiFieldPanel(WithContactFields.panels, 'Contact information'),
]
PersonPage.promote_panels = Page.promote_panels + [
ImageChooserPanel('feed_image'),
]
class OrganisationIndexPage(Page, WithStreamField):
search_fields = Page.search_fields + [
index.SearchField('body'),
]
subpage_types = ['OrganisationPage']
OrganisationIndexPage.content_panels = [
FieldPanel('title', classname='full title'),
StreamFieldPanel('body'),
]
OrganisationIndexPage.promote_panels = Page.promote_panels
class OrganisationPage(Page, WithContactFields, WithFeedImage,
WithStreamField):
intro = RichTextField(blank=True)
search_fields = Page.search_fields + [
index.SearchField('intro'),
index.SearchField('body'),
]
subpage_types = []
OrganisationPage.content_panels = [
FieldPanel('title', classname='full title'),
FieldPanel('intro', classname='full'),
StreamFieldPanel('body'),
MultiFieldPanel(WithContactFields.panels, 'Contact information'),
]
OrganisationPage.promote_panels = Page.promote_panels + [
ImageChooserPanel('feed_image'),
]
class WorkIndexPage(RoutablePageMixin, Page, WithStreamField):
search_fields = Page.search_fields + [
index.SearchField('body'),
]
subpage_types = ['WorkPage']
@property
def works(self):
works = WorkPage.objects.live().descendant_of(self)
return works
@route(r'^$')
def all_works(self, request):
works = self.works
return render(request, self.get_template(request),
{'self': self, 'works': _paginate(request, works)})
@route(r'^tag/(?P<tag>[\w\- ]+)/$')
def tag(self, request, tag=None):
if not tag:
# Invalid tag filter
logger.error('Invalid tag filter')
return self.all_works(request)
works = self.works.filter(tags__name=tag)
return render(
request, self.get_template(request), {
'self': self, 'works': _paginate(request, works),
'filter_type': 'tag', 'filter': tag
}
)
WorkIndexPage.content_panels = [
FieldPanel('title', classname='full title'),
StreamFieldPanel('body'),
]
WorkIndexPage.promote_panels = Page.promote_panels
class WorkPageTag(TaggedItemBase):
content_object = ParentalKey('WorkPage', related_name='tagged_items')
class WorkPage(Page, WithStreamField, WithFeedImage):
subtitle = models.CharField(max_length=256)
category = models.ForeignKey(WorkCategory, blank=True, null=True,
on_delete=models.SET_NULL,)
tags = ClusterTaggableManager(through=WorkPageTag, blank=True)
search_fields = Page.search_fields + [
index.SearchField('body'),
index.SearchField('subtitle'),
index.SearchField('category'),
index.RelatedFields('tags', [
index.SearchField('name'),
]),
]
subpage_types = []
def get_index_page(self):
# Find closest ancestor which is a blog index
return WorkIndexPage.objects.ancestor_of(self).last()
WorkPage.content_panels = [
FieldPanel('title', classname='full title'),
FieldPanel('subtitle', classname='full title'),
SnippetChooserPanel('category'),
StreamFieldPanel('body'),
]
WorkPage.promote_panels = Page.promote_panels + [
FieldPanel('tags'),
ImageChooserPanel('feed_image'),
]
class BlogIndexPage(RoutablePageMixin, Page, WithStreamField):
search_fields = Page.search_fields + [
index.SearchField('body'),
]
subpage_types = ['BlogPost']
@property
def posts(self):
posts = BlogPost.objects.live().descendant_of(self)
posts = posts.order_by('-date')
return posts
@route(r'^$')
def all_posts(self, request):
posts = self.posts
return render(request, self.get_template(request),
{'self': self, 'posts': _paginate(request, posts)})
@route(r'^tag/(?P<tag>[\w\- ]+)/$')
def tag(self, request, tag=None):
if not tag:
# Invalid tag filter
logger.error('Invalid tag filter')
return self.all_posts(request)
posts = self.posts.filter(tags__name=tag)
return render(
request, self.get_template(request), {
'self': self, 'posts': _paginate(request, posts),
'filter_type': 'tag', 'filter': tag
}
)
BlogIndexPage.content_panels = [
FieldPanel('title', classname='full title'),
StreamFieldPanel('body'),
]
BlogIndexPage.promote_panels = Page.promote_panels
class BlogPostTag(TaggedItemBase):
content_object = ParentalKey('BlogPost', related_name='tagged_items')
class BlogPost(Page, WithStreamField, WithFeedImage):
date = models.DateField()
tags = ClusterTaggableManager(through=BlogPostTag, blank=True)
search_fields = Page.search_fields + [
index.SearchField('body'),
index.SearchField('date'),
index.RelatedFields('tags', [
index.SearchField('name'),
index.SearchField('slug'),
]),
]
subpage_types = []
def get_index_page(self):
# Find closest ancestor which is a blog index
return BlogIndexPage.objects.ancestor_of(self).last()
BlogPost.content_panels = [
FieldPanel('title', classname='full title'),
FieldPanel('date'),
StreamFieldPanel('body'),
]
BlogPost.promote_panels = Page.promote_panels + [
FieldPanel('tags'),
ImageChooserPanel('feed_image'),
]
class CaptchaFormBuilder(FormBuilder):
CAPTCHA_FIELD_NAME = 'captcha'
def __init__(self, fields):
super(CaptchaFormBuilder, self).__init__(fields)
self.FIELD_TYPES.update(
{self.CAPTCHA_FIELD_NAME: self.create_captcha_field})
def create_captcha_field(self, field, options):
return CaptchaField(**options)
@property
def formfields(self):
fields = super(CaptchaFormBuilder, self).formfields
fields[self.CAPTCHA_FIELD_NAME] = CaptchaField(required=True)
return fields
class FormField(AbstractFormField):
page = ParentalKey('FormPage',
on_delete=models.CASCADE, related_name='form_fields')
class FormPage(AbstractEmailForm):
intro = RichTextField(blank=True)
followup_text = RichTextField(blank=True)
subtitle = RichTextField(default='Get in touch')
thank_you_text = RichTextField(blank=True)
content_panels = AbstractEmailForm.content_panels + [
FormSubmissionsPanel(),
FieldPanel('subtitle', classname="full"),
FieldPanel('intro', classname="full"),
InlinePanel('form_fields', label="Form fields"),
FieldPanel('followup_text', classname="full"),
FieldPanel('thank_you_text', classname="full"),
MultiFieldPanel([
FieldRowPanel([
FieldPanel('from_address', classname="col6"),
FieldPanel('to_address', classname="col6"),
]),
FieldPanel('subject'),
], "Email"),
]
form_builder = CaptchaFormBuilder
| |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigip_profile_oneconnect
short_description: Manage OneConnect profiles on a BIG-IP
description:
- Manage OneConnect profiles on a BIG-IP system.
version_added: "1.0.0"
options:
name:
description:
- Specifies the name of the OneConnect profile.
type: str
required: True
parent:
description:
- Specifies the profile from which this profile inherits settings.
- When creating a new profile, if this parameter is not specified, the default
is the system-supplied C(oneconnect) profile.
type: str
source_mask:
description:
- Specifies a value the system applies to the source address to determine
its eligibility for reuse.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
- The system applies the value of this setting to the server-side source address to
determine its eligibility for reuse.
- A mask of C(0) causes the system to share reused connections across all source
addresses. A host mask of C(32) causes the system to share only those reused
connections originating from the same source address.
- When you are using a SNAT or SNAT pool, the server-side source address is
translated first and then the OneConnect mask is applied to the translated address.
type: str
description:
description:
- Description of the profile.
type: str
maximum_size:
description:
- Specifies the maximum number of connections the system holds in the
connection reuse pool.
- If the pool is already full, a server-side connection closes after the
response is completed.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: int
maximum_age:
description:
- Specifies the maximum number of seconds allowed for a connection in the connection
reuse pool.
- For any connection with an age higher than this value, the system removes that
connection from the re-use pool.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: int
maximum_reuse:
description:
- Specifies the maximum number of times that a server-side connection can be reused.
- When creating a new profile, if this parameter is not specified, the
default is provided by the parent profile.
type: int
idle_timeout_override:
description:
- Specifies the number of seconds a connection is idle before the connection
flow is eligible for deletion.
- When creating a new profile, if this parameter is not specified, the default
is provided by the parent profile.
- You may specify a number of seconds for the timeout override.
- When C(disabled), specifies there is no timeout override for the connection.
- When C(indefinite), specifies a connection may be idle with no timeout
override.
type: str
limit_type:
description:
- When C(none), simultaneous in-flight requests and responses over TCP connections
to a pool member are counted toward the limit. This is the historical behavior.
- When C(idle), idle connections will be dropped as the TCP connection limit is
reached. For short intervals, during the overlap of the idle connection being
dropped and the new connection being established, the TCP connection limit may
be exceeded.
- When C(strict), the TCP connection limit is honored with no exceptions. This means
that idle connections will prevent new TCP connections from being made until
they expire, even if they could otherwise be reused.
- C(strict) is not a recommended configuration except in very special cases with
short expiration timeouts.
- When creating a new profile, if this parameter is not specified, the default
is provided by the parent profile.
type: str
choices:
- none
- idle
- strict
share_pools:
description:
- Indicates connections may be shared not only within a virtual server, but
also among similar virtual servers.
- When C(yes), all virtual servers that use the same OneConnect and other internal
network profiles can share connections.
- When creating a new profile, if this parameter is not specified, the default
is provided by the parent profile.
type: bool
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
state:
description:
- When C(present), ensures the profile exists.
- When C(absent), ensures the profile is removed.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5networks.f5_modules.f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create a OneConnect profile
bigip_profile_oneconnect:
name: foo
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
source_mask:
description: Value the system applies to the source address to determine its eligibility for reuse.
returned: changed
type: str
sample: 255.255.255.255
description:
description: Description of the profile.
returned: changed
type: str
sample: My profile
maximum_size:
description: Maximum number of connections the system holds in the connection reuse pool.
returned: changed
type: int
sample: 3000
maximum_age:
description: Maximum number of seconds allowed for a connection in the connection reuse pool.
returned: changed
type: int
sample: 2000
maximum_reuse:
description: Maximum number of times a server-side connection can be reused.
returned: changed
type: int
sample: 1000
idle_timeout_override:
description: The new idle timeout override.
returned: changed
type: str
sample: disabled
limit_type:
description: New limit type of the profile.
returned: changed
type: str
sample: idle
share_pools:
description: Share connections among similar virtual servers.
returned: changed
type: bool
sample: yes
'''
from datetime import datetime
from ansible.module_utils.basic import (
AnsibleModule, env_fallback
)
from ..module_utils.bigip import F5RestClient
from ..module_utils.common import (
F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec, fq_name
)
from ..module_utils.ipaddress import is_valid_ip
from ..module_utils.icontrol import tmos_version
from ..module_utils.teem import send_teem
class Parameters(AnsibleF5Parameters):
api_map = {
'sourceMask': 'source_mask',
'maxSize': 'maximum_size',
'maxReuse': 'maximum_reuse',
'maxAge': 'maximum_age',
'defaultsFrom': 'parent',
'limitType': 'limit_type',
'idleTimeoutOverride': 'idle_timeout_override',
'sharePools': 'share_pools',
}
api_attributes = [
'sourceMask',
'maxSize',
'defaultsFrom',
'description',
'limitType',
'idleTimeoutOverride',
'maxAge',
'maxReuse',
'sharePools',
]
returnables = [
'description',
'source_mask',
'maximum_size',
'maximum_age',
'maximum_reuse',
'limit_type',
'idle_timeout_override',
'share_pools',
'parent',
]
updatables = [
'description',
'source_mask',
'maximum_size',
'maximum_age',
'maximum_reuse',
'limit_type',
'idle_timeout_override',
'share_pools',
'parent',
]
class ApiParameters(Parameters):
@property
def source_mask(self):
if self._values['source_mask'] is None:
return None
elif self._values['source_mask'] == 'any':
return 0
return self._values['source_mask']
@property
def idle_timeout_override(self):
if self._values['idle_timeout_override'] is None:
return None
try:
return int(self._values['idle_timeout_override'])
except ValueError:
return self._values['idle_timeout_override']
class ModuleParameters(Parameters):
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def idle_timeout_override(self):
if self._values['idle_timeout_override'] is None:
return None
try:
return int(self._values['idle_timeout_override'])
except ValueError:
return self._values['idle_timeout_override']
@property
def source_mask(self):
if self._values['source_mask'] is None:
return None
elif self._values['source_mask'] == 'any':
return 0
try:
int(self._values['source_mask'])
raise F5ModuleError(
"'source_mask' must not be in CIDR format."
)
except ValueError:
pass
if is_valid_ip(self._values['source_mask']):
return self._values['source_mask']
@property
def share_pools(self):
if self._values['share_pools'] is None:
return None
elif self._values['share_pools'] is True:
return 'enabled'
return 'disabled'
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def idle_timeout_override(self):
try:
return int(self._values['idle_timeout_override'])
except ValueError:
return self._values['idle_timeout_override']
@property
def share_pools(self):
if self._values['idle_timeout_override'] is None:
return None
elif self._values['idle_timeout_override'] == 'enabled':
return 'yes'
elif self._values['idle_timeout_override'] == 'disabled':
return 'no'
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
start = datetime.now().isoformat()
version = tmos_version(self.client)
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
send_teem(start, self.module, version)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/one-connect/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
errors = [401, 403, 409, 500, 501, 502, 503, 504]
if resp.status in errors or 'code' in response and response['code'] in errors:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/one-connect/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['selfLink']
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/one-connect/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/one-connect/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/one-connect/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
parent=dict(),
source_mask=dict(),
maximum_size=dict(type='int'),
maximum_reuse=dict(type='int'),
maximum_age=dict(type='int'),
limit_type=dict(
choices=['none', 'idle', 'strict']
),
idle_timeout_override=dict(),
share_pools=dict(type='bool'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| |
#!/usr/bin/env python
"""
Copyright (c) 2021 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import itertools
import logging
import os
import random
import cocotb_test.simulator
import pytest
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge
from cocotb.regression import TestFactory
from cocotbext.axi import AxiStreamBus, AxiStreamFrame, AxiStreamSource, AxiStreamSink
class TB(object):
def __init__(self, dut):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.start_soon(Clock(dut.clk, 10, units="ns").start())
self.source = AxiStreamSource(AxiStreamBus.from_prefix(dut, "s_axis"), dut.clk, dut.rst)
self.sink = AxiStreamSink(AxiStreamBus.from_prefix(dut, "m_axis"), dut.clk, dut.rst)
def set_idle_generator(self, generator=None):
if generator:
self.source.set_pause_generator(generator())
def set_backpressure_generator(self, generator=None):
if generator:
self.sink.set_pause_generator(generator())
async def reset(self):
self.dut.rst.setimmediatevalue(0)
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst.value = 1
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst.value = 0
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
async def run_test(dut, payload_lengths=None, payload_data=None, idle_inserter=None, backpressure_inserter=None):
tb = TB(dut)
id_count = 2**len(tb.source.bus.tid)
cur_id = 1
await tb.reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
test_frames = []
for test_data in [payload_data(x) for x in payload_lengths()]:
test_frame = AxiStreamFrame(test_data)
test_frame.tid = cur_id
test_frame.tdest = cur_id
test_frames.append(test_frame)
await tb.source.send(test_frame)
cur_id = (cur_id + 1) % id_count
for test_frame in test_frames:
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_frame.tdata
assert rx_frame.tid == test_frame.tid
assert rx_frame.tdest == test_frame.tdest
assert not rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_tuser_assert(dut):
tb = TB(dut)
await tb.reset()
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 32))
test_frame = AxiStreamFrame(test_data, tuser=1)
await tb.source.send(test_frame)
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_data
assert rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_init_sink_pause(dut):
tb = TB(dut)
await tb.reset()
tb.sink.pause = True
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 32))
test_frame = AxiStreamFrame(test_data)
await tb.source.send(test_frame)
for k in range(64):
await RisingEdge(dut.clk)
tb.sink.pause = False
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_data
assert not rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_init_sink_pause_reset(dut):
tb = TB(dut)
await tb.reset()
tb.sink.pause = True
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 32))
test_frame = AxiStreamFrame(test_data)
await tb.source.send(test_frame)
for k in range(64):
await RisingEdge(dut.clk)
await tb.reset()
tb.sink.pause = False
for k in range(64):
await RisingEdge(dut.clk)
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_overflow(dut):
tb = TB(dut)
await tb.reset()
tb.sink.pause = True
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 2048))
test_frame = AxiStreamFrame(test_data)
await tb.source.send(test_frame)
for k in range(2048):
await RisingEdge(dut.clk)
tb.sink.pause = False
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_data
assert not rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_stress_test(dut, idle_inserter=None, backpressure_inserter=None):
tb = TB(dut)
byte_lanes = tb.source.byte_lanes
id_count = 2**len(tb.source.bus.tid)
cur_id = 1
await tb.reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
test_frames = []
for k in range(128):
length = random.randint(1, byte_lanes*16)
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), length))
test_frame = AxiStreamFrame(test_data)
test_frame.tid = cur_id
test_frame.tdest = cur_id
test_frames.append(test_frame)
await tb.source.send(test_frame)
cur_id = (cur_id + 1) % id_count
for test_frame in test_frames:
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_frame.tdata
assert rx_frame.tid == test_frame.tid
assert rx_frame.tdest == test_frame.tdest
assert not rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
def cycle_pause():
return itertools.cycle([1, 1, 1, 0])
def size_list():
data_width = len(cocotb.top.m_axis_tdata)
byte_width = data_width // 8
return list(range(1, byte_width*4+1))+[512]+[1]*64
def incrementing_payload(length):
return bytearray(itertools.islice(itertools.cycle(range(256)), length))
if cocotb.SIM_NAME:
factory = TestFactory(run_test)
factory.add_option("payload_lengths", [size_list])
factory.add_option("payload_data", [incrementing_payload])
factory.add_option("idle_inserter", [None, cycle_pause])
factory.add_option("backpressure_inserter", [None, cycle_pause])
factory.generate_tests()
for test in [
run_test_tuser_assert,
run_test_init_sink_pause,
run_test_init_sink_pause_reset,
run_test_overflow
]:
factory = TestFactory(test)
factory.generate_tests()
factory = TestFactory(run_stress_test)
factory.add_option("idle_inserter", [None, cycle_pause])
factory.add_option("backpressure_inserter", [None, cycle_pause])
factory.generate_tests()
# cocotb-test
tests_dir = os.path.dirname(__file__)
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
@pytest.mark.parametrize("data_width", [8, 16, 32, 64])
def test_axis_srl_fifo(request, data_width):
dut = "axis_srl_fifo"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
]
parameters = {}
parameters['DEPTH'] = 1024
parameters['DATA_WIDTH'] = data_width
parameters['KEEP_ENABLE'] = int(parameters['DATA_WIDTH'] > 8)
parameters['KEEP_WIDTH'] = parameters['DATA_WIDTH'] // 8
parameters['LAST_ENABLE'] = 1
parameters['ID_ENABLE'] = 1
parameters['ID_WIDTH'] = 8
parameters['DEST_ENABLE'] = 1
parameters['DEST_WIDTH'] = 8
parameters['USER_ENABLE'] = 1
parameters['USER_WIDTH'] = 1
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| |
from rest_framework import serializers
from django.contrib.auth.models import User
from rest_framework import status
from crowdsourcing.models import Transaction, FinancialAccount, PayPalFlow, UserProfile
from crowdsourcing.serializers.dynamic import DynamicFieldsModelSerializer
from crowdsourcing.validators.utils import InequalityValidator, ConditionallyRequiredValidator
from crowdsourcing.utils import PayPalBackend, get_model_or_none
class FinancialAccountSerializer(DynamicFieldsModelSerializer):
class Meta:
model = FinancialAccount
fields = ('id', 'owner', 'type', 'is_active', 'balance')
class TransactionSerializer(DynamicFieldsModelSerializer):
class Meta:
model = Transaction
fields = ('id', 'sender_type', 'amount', 'currency', 'state', 'method',
'sender', 'recipient', 'reference', 'created_timestamp', 'last_updated')
read_only_fields = ('created_timestamp', 'last_updated')
def create(self, *args, **kwargs):
transaction = Transaction.objects.create(**self.validated_data)
transaction.recipient.balance += transaction.amount
transaction.recipient.save()
if transaction.sender.type not in ['paypal_external', 'paypal_deposit']:
transaction.sender.balance -= transaction.amount
transaction.sender.save()
return transaction
class PayPalFlowSerializer(DynamicFieldsModelSerializer):
class Meta:
model = PayPalFlow
fields = ('id', 'paypal_id', 'state', 'recipient', 'redirect_url', 'payer_id')
read_only_fields = ('state', 'recipient')
def create(self, *args, **kwargs):
flow = PayPalFlow.objects.create(
state='created',
recipient=kwargs['recipient'],
paypal_id=self.validated_data['paypal_id'],
redirect_url=self.validated_data['redirect_url']
)
return flow
def execute(self, *args, **kwargs):
paypalbackend = PayPalBackend()
payment = paypalbackend.paypalrestsdk.Payment.find(self.validated_data['paypal_id'])
if payment['state'] == 'approved' and not Transaction.objects.filter(reference=payment["id"]):
return self.create_transaction(payment, self.validated_data['payer_id'])
if payment.execute({"payer_id": self.validated_data['payer_id']}):
return self.create_transaction(payment, self.validated_data['payer_id'])
else:
if Transaction.objects.filter(reference=payment["id"]):
return payment.error['message'], status.HTTP_400_BAD_REQUEST
else:
return self.create_transaction(payment, self.validated_data['payer_id'])
def create_transaction(self, payment, payer_id):
flow = PayPalFlow.objects.get(paypal_id=payment['id'])
flow.state = 'approved'
flow.payer_id = payer_id
flow.save()
sender = FinancialAccount.objects.filter(is_system=True, type="paypal_deposit").first()
transaction = {
"amount": payment["transactions"][0]["amount"]["total"],
"currency": payment["transactions"][0]["amount"]["currency"],
"recipient": flow.recipient.id,
"reference": payment["id"],
"state": "approved",
"method": payment["payer"]["payment_method"],
"sender": sender.id
}
if not self.context['request'].user.is_anonymous():
transaction["sender_type"] = "self"
else:
transaction["sender_type"] = "other"
serializer = TransactionSerializer(data=transaction)
if serializer.is_valid():
serializer.create()
return 'Payment executed successfully', status.HTTP_201_CREATED
return serializer.errors, status.HTTP_400_BAD_REQUEST
class CreditCardSerializer(serializers.Serializer):
type = serializers.ChoiceField(choices=['visa', 'mastercard', 'discover', 'american_express'])
number = serializers.CharField(min_length=13, max_length=19)
expire_month = serializers.IntegerField(min_value=1, max_value=12)
expire_year = serializers.IntegerField()
cvv2 = serializers.RegexField(regex='^[0-9]{3,4}$', required=True)
first_name = serializers.CharField()
last_name = serializers.CharField()
class Meta:
fields = ('type', 'number', 'expire_month', 'expire_year', 'cvv2', 'first_name', 'last_name')
class PayPalPaymentSerializer(serializers.Serializer):
amount = serializers.DecimalField(max_digits=19, decimal_places=4)
type = serializers.ChoiceField(choices=['self', 'other'])
username = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field='username', allow_null=True,
required=False)
method = serializers.ChoiceField(choices=['paypal', 'credit_card'])
credit_card = CreditCardSerializer(many=False, required=False)
class Meta:
validators = [
InequalityValidator(
field='amount', operator='gt', value=0
),
ConditionallyRequiredValidator(field='method', field2='credit_card', value='credit_card')
]
def build_payment(self, *args, **kwargs):
host = ''
if self.context['request'].is_secure():
host = 'https://'
else:
host = 'http://'
host += self.context['request'].META['HTTP_HOST']
payment = {"intent": "sale",
"payer": {
"payment_method": self.validated_data['method']
},
"redirect_urls": {
"return_url": host + "/payment-success",
"cancel_url": host + "/payment-cancelled"
},
"transactions": [{
"item_list": {
"items": [{
"name": "Daemo Deposit",
"sku": "DMO-7CA000",
"price": str(self.validated_data['amount']),
"currency": "USD",
"quantity": 1}]},
"amount": {
"total": str(self.validated_data['amount']),
"currency": "USD"},
"description": "Daemo Deposit"}]
}
if self.validated_data['method'] == 'credit_card':
payment['payer']['funding_instruments'] = [{
"credit_card": self.validated_data['credit_card']
}]
return payment
def create(self, *args, **kwargs):
recipient = None
recipient_profile = None
payment_data = self.build_payment()
if self.validated_data['type'] == 'self':
recipient_profile = self.context['request'].user.userprofile
else:
recipient_profile = get_model_or_none(UserProfile, user__username=self.validated_data['username'])
recipient = get_model_or_none(FinancialAccount, owner=recipient_profile, type='requester')
paypalbackend = PayPalBackend()
payment = paypalbackend.paypalrestsdk.Payment(payment_data)
if payment.create():
redirect_url = next((link for link in payment['links'] if link['method'] == 'REDIRECT'), '#')
if payment['payer']['payment_method'] == 'credit_card':
redirect_url = {
'href': '#'
}
flow_data = {
"redirect_url": redirect_url['href'],
"paypal_id": payment.id
}
payment_flow = PayPalFlowSerializer(data=flow_data, fields=('redirect_url', 'paypal_id',),
context={'request': self.context['request']})
if payment_flow.is_valid():
payment_flow.create(recipient=recipient)
if payment['payer']['payment_method'] == 'credit_card':
data = {
"paypal_id": payment['id'],
"payer_id": "UNKNOWN_CC"
}
execute_serializer = PayPalFlowSerializer(fields=('paypal_id', 'payer_id'), data=data,
context={"request": self.context['request']})
if execute_serializer.is_valid():
message, https_status = execute_serializer.execute()
return {"message": message}, https_status
else:
return execute_serializer.errors, status.HTTP_400_BAD_REQUEST
return payment_flow.data, status.HTTP_201_CREATED
else:
return payment_flow.errors, status.HTTP_400_BAD_REQUEST
else:
return payment.error, status.HTTP_400_BAD_REQUEST
| |
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the view for GSoC invitation page.
"""
from google.appengine.ext import db
from google.appengine.api import users
from django import forms as djangoforms
from django.utils.translation import ugettext
from soc.logic import accounts
from soc.logic import cleaning
from soc.logic.helper import notifications
from soc.models.request import Request
from soc.models.user import User
from soc.views.helper import url_patterns
from soc.views.helper.access_checker import isSet
from soc.tasks import mailer
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.models.profile import GSoCProfile
from soc.modules.gsoc.views import forms as gsoc_forms
from soc.modules.gsoc.views.helper.url_patterns import url
DEF_STATUS_FOR_USER_MSG = ugettext(
'You are now %s for this organization.')
DEF_STATUS_FOR_ADMIN_MSG = ugettext(
'This user is now %s with your organization.')
class InviteForm(gsoc_forms.GSoCModelForm):
"""Django form for the invite page.
"""
link_id = gsoc_forms.CharField(label='Link ID/Email')
class Meta:
model = Request
css_prefix = 'gsoc_intivation'
fields = ['message']
def __init__(self, request_data, *args, **kwargs):
super(InviteForm, self).__init__(*args, **kwargs)
# store request object to cache results of queries
self.request_data = request_data
# reorder the fields so that link_id is the first one
field = self.fields.pop('link_id')
self.fields.insert(0, 'link_id', field)
field.help_text = ugettext(
'The link_id or email address of the invitee, '
' separate multiple values with a comma')
def clean_link_id(self):
"""Accepts link_id of users which may be invited.
"""
assert isSet(self.request_data.organization)
link_ids = self.cleaned_data.get('link_id', '').split(',')
self.request_data.invited_user = []
for link_id in link_ids:
self.cleaned_data['link_id'] = link_id.strip()
self._clean_one_link_id()
def _clean_one_link_id(self):
invited_user = None
link_id_cleaner = cleaning.clean_link_id('link_id')
try:
link_id = link_id_cleaner(self)
except djangoforms.ValidationError, e:
if e.code != 'invalid':
raise
email_cleaner = cleaning.clean_email('link_id')
try:
email_address = email_cleaner(self)
except djangoforms.ValidationError, e:
if e.code != 'invalid':
raise
msg = ugettext(u'Enter a valid link_id or email address.')
raise djangoforms.ValidationError(msg, code='invalid')
account = users.User(email_address)
user_account = accounts.normalizeAccount(account)
invited_user = User.all().filter('account', user_account).get()
if not invited_user:
raise djangoforms.ValidationError(
'There is no user with that email address')
# get the user entity that the invitation is to
if not invited_user:
existing_user_cleaner = cleaning.clean_existing_user('link_id')
invited_user = existing_user_cleaner(self)
self.request_data.invited_user.append(invited_user)
# check if the organization has already sent an invitation to the user
query = db.Query(Request)
query.filter('type', 'Invitation')
query.filter('user', invited_user)
query.filter('role', self.request_data.kwargs['role'])
query.filter('org', self.request_data.organization)
if query.get():
raise djangoforms.ValidationError(
'An invitation to this user has already been sent.')
# check if the user that is invited does not have the role
key_name = '/'.join([
self.request_data.program.key().name(),
invited_user.link_id])
profile = self.request_data.invite_profile = GSoCProfile.get_by_key_name(
key_name, parent=invited_user)
if not profile:
msg = ('The specified user has a User account (the link_id is valid), '
'but they do not yet have a profile for this %s. '
'You cannot invite them until they create a profile.')
raise djangoforms.ValidationError(msg % self.request_data.program.name)
if profile.student_info:
raise djangoforms.ValidationError('That user is a student')
if self.request_data.kwargs['role'] == 'org_admin':
role_for = profile.org_admin_for
else:
role_for = set(profile.org_admin_for + profile.mentor_for)
if self.request_data.organization.key() in role_for:
raise djangoforms.ValidationError('That user already has this role.')
class InvitePage(RequestHandler):
"""Encapsulate all the methods required to generate Invite page.
"""
def templatePath(self):
return 'v2/modules/gsoc/invite/base.html'
def djangoURLPatterns(self):
return [
url(r'invite/%s$' % url_patterns.INVITE,
self, name='gsoc_invite')
]
def checkAccess(self):
"""Access checks for GSoC Invite page.
"""
self.check.isProgramVisible()
self.check.isOrgAdmin()
def context(self):
"""Handler to for GSoC Invitation Page HTTP get request.
"""
role = 'Org Admin' if self.data.kwargs['role'] == 'org_admin' else 'Mentor'
invite_form = InviteForm(self.data, self.data.POST or None)
return {
'logout_link': self.data.redirect.logout(),
'page_name': 'Invite a new %s' % role,
'program': self.data.program,
'invite_form': invite_form
}
def _createFromForm(self):
"""Creates a new invitation based on the data inserted in the form.
Returns:
a newly created Request entity or None
"""
assert isSet(self.data.organization)
invite_form = InviteForm(self.data, self.data.POST)
if not invite_form.is_valid():
return None
assert isSet(self.data.invited_user)
assert self.data.invited_user
# create a new invitation entity
invite_form.cleaned_data['org'] = self.data.organization
invite_form.cleaned_data['role'] = self.data.kwargs['role']
invite_form.cleaned_data['type'] = 'Invitation'
def create_invite_txn():
invite = invite_form.create(commit=True)
context = notifications.inviteContext(self.data, invite)
sub_txn = mailer.getSpawnMailTaskTxn(context, parent=invite)
sub_txn()
return invite
for user in self.data.invited_user:
invite_form.instance = None
invite_form.cleaned_data['user'] = user
db.run_in_transaction(create_invite_txn)
return True
def post(self):
"""Handler to for GSoC Invitation Page HTTP post request.
"""
if self._createFromForm():
self.redirect.invite()
self.redirect.to('gsoc_invite')
else:
self.get()
class ShowInvite(RequestHandler):
"""Encapsulate all the methods required to generate Show Invite page.
"""
ACTIONS = {
'accept': 'Accept',
'reject': 'Reject',
'resubmit': 'Resubmit',
'withdraw': 'Withdraw',
}
def templatePath(self):
return 'v2/soc/request/base.html'
def djangoURLPatterns(self):
return [
url(r'invitation/%s$' % url_patterns.ID, self,
name='gsoc_invitation')
]
def checkAccess(self):
self.check.isProfileActive()
invite_id = int(self.data.kwargs['id'])
self.data.invite = Request.get_by_id(invite_id)
self.check.isInvitePresent(invite_id)
self.data.organization = self.data.invite.org
self.data.invited_user = self.data.invite.user
if self.data.POST:
self.data.action = self.data.POST['action']
if self.data.action == self.ACTIONS['accept']:
self.check.canRespondToInvite()
elif self.data.action == self.ACTIONS['reject']:
self.check.canRespondToInvite()
elif self.data.action == self.ACTIONS['resubmit']:
self.check.canResubmitInvite()
else:
self.check.canViewInvite()
self.mutator.canRespondForUser()
if self.data.user.key() == self.data.invited_user.key():
self.data.invited_profile = self.data.profile
return
key_name = '/'.join([
self.data.program.key().name(),
self.data.invited_user.link_id])
self.data.invited_profile = GSoCProfile.get_by_key_name(
key_name, parent=self.data.invited_user)
def context(self):
"""Handler to for GSoC Show Invitation Page HTTP get request.
"""
assert isSet(self.data.invite)
assert isSet(self.data.can_respond)
assert isSet(self.data.organization)
assert isSet(self.data.invited_user)
assert isSet(self.data.invited_profile)
assert self.data.invited_profile
# This code is dupcliated between request and invite
status = self.data.invite.status
can_accept = can_reject = can_withdraw = can_resubmit = False
if self.data.can_respond:
# invitee speaking
if status == 'pending':
can_accept = True
can_reject = True
if status == 'rejected':
can_accept = True
else:
# admin speaking
if status == 'withdrawn':
can_resubmit = True
if status == 'pending':
can_withdraw = True
show_actions = can_accept or can_reject or can_withdraw or can_resubmit
org_key = self.data.organization.key()
status_msg = None
if self.data.invited_profile.key() == self.data.profile.key():
if org_key in self.data.invited_profile.org_admin_for:
status_msg = DEF_STATUS_FOR_USER_MSG % 'an organization administrator'
elif org_key in self.data.invited_profile.mentor_for:
status_msg = DEF_STATUS_FOR_USER_MSG % 'a mentor'
else:
if org_key in self.data.invited_profile.org_admin_for:
status_msg = DEF_STATUS_FOR_ADMIN_MSG % 'an organization administrator'
elif org_key in self.data.invited_profile.mentor_for:
status_msg = DEF_STATUS_FOR_ADMIN_MSG % 'a mentor'
return {
'request': self.data.invite,
'page_name': 'Invite',
'org': self.data.organization,
'actions': self.ACTIONS,
'status_msg': status_msg,
'user_name': self.data.invited_profile.name(),
'user_link_id': self.data.invited_user.link_id,
'user_email': accounts.denormalizeAccount(
self.data.invited_user.account).email(),
'show_actions': show_actions,
'can_accept': can_accept,
'can_reject': can_reject,
'can_withdraw': can_withdraw,
'can_resubmit': can_resubmit,
}
def post(self):
"""Handler to for GSoC Show Invitation Page HTTP post request.
"""
assert self.data.action
assert self.data.invite
if self.data.action == self.ACTIONS['accept']:
self._acceptInvitation()
elif self.data.action == self.ACTIONS['reject']:
self._rejectInvitation()
elif self.data.action == self.ACTIONS['resubmit']:
self._resubmitInvitation()
elif self.data.action == self.ACTIONS['withdraw']:
self._withdrawInvitation()
self.redirect.dashboard()
self.redirect.to()
def _acceptInvitation(self):
"""Accepts an invitation.
"""
assert isSet(self.data.organization)
if not self.data.profile:
self.redirect.program()
self.redirect.to('edit_gsoc_profile')
invite_key = self.data.invite.key()
profile_key = self.data.profile.key()
organization_key = self.data.organization.key()
def accept_invitation_txn():
invite = db.get(invite_key)
profile = db.get(profile_key)
invite.status = 'accepted'
if invite.role != 'mentor':
profile.is_org_admin = True
profile.org_admin_for.append(organization_key)
profile.org_admin_for = list(set(profile.org_admin_for))
profile.is_mentor = True
profile.mentor_for.append(organization_key)
profile.mentor_for = list(set(profile.mentor_for))
invite.put()
profile.put()
accept_invitation_txn()
# TODO(SRabbelier): run in txn as soon as we make User Request's parent
# db.run_in_transaction(accept_invitation_txn)
def _rejectInvitation(self):
"""Rejects a invitation.
"""
assert isSet(self.data.invite)
invite_key = self.data.invite.key()
def reject_invite_txn():
invite = db.get(invite_key)
invite.status = 'rejected'
invite.put()
db.run_in_transaction(reject_invite_txn)
def _resubmitInvitation(self):
"""Resubmits a invitation.
"""
assert isSet(self.data.invite)
invite_key = self.data.invite.key()
def resubmit_invite_txn():
invite = db.get(invite_key)
invite.status = 'pending'
invite.put()
context = notifications.handledInviteContext(self.data)
sub_txn = mailer.getSpawnMailTaskTxn(context, parent=invite)
sub_txn()
db.run_in_transaction(resubmit_invite_txn)
def _withdrawInvitation(self):
"""Withdraws an invitation.
"""
assert isSet(self.data.invite)
invite_key = self.data.invite.key()
def withdraw_invite_txn():
invite = db.get(invite_key)
invite.status = 'withdrawn'
invite.put()
context = notifications.handledInviteContext(self.data)
sub_txn = mailer.getSpawnMailTaskTxn(context, parent=invite)
sub_txn()
db.run_in_transaction(withdraw_invite_txn)
| |
# Copyright 2008-2011 WebDriver committers
# Copyright 2008-2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
from subprocess import Popen, PIPE, STDOUT
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common import utils
import time
class FirefoxBinary(object):
NO_FOCUS_LIBRARY_NAME = "x_ignore_nofocus.so"
def __init__(self, firefox_path=None, log_file=None):
"""
Creates a new instance of Firefox binary.
:Args:
- firefox_path - Path to the Firefox executable. By default, it will be detected from the standard locations.
- log_file - A file object to redirect the firefox process output to. It can be sys.stdout.
Please note that with parallel run the output won't be synchronous.
By default, it will be redirected to subprocess.PIPE.
"""
self._start_cmd = firefox_path
self._log_file = log_file or PIPE
self.command_line = None
if self._start_cmd is None:
self._start_cmd = self._get_firefox_start_cmd()
# Rather than modifying the environment of the calling Python process
# copy it and modify as needed.
self._firefox_env = os.environ.copy()
self._firefox_env["MOZ_CRASHREPORTER_DISABLE"] = "1"
self._firefox_env["MOZ_NO_REMOTE"] = "1"
self._firefox_env["NO_EM_RESTART"] = "1"
def add_command_line_options(self, *args):
self.command_line = args
def launch_browser(self, profile):
"""Launches the browser for the given profile name.
It is assumed the profile already exists.
"""
self.profile = profile
self._start_from_profile_path(self.profile.path)
self._wait_until_connectable()
def kill(self):
"""Kill the browser.
This is useful when the browser is stuck.
"""
if self.process:
self.process.kill()
self.process.wait()
def _start_from_profile_path(self, path):
self._firefox_env["XRE_PROFILE_PATH"] = path
if platform.system().lower() == 'linux':
self._modify_link_library_path()
command = [self._start_cmd, "-silent"]
if self.command_line is not None:
for cli in self.command_line:
command.append(cli)
Popen(command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env).communicate()
command[1] = '-foreground'
self.process = Popen(
command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env)
def _get_firefox_output(self):
return self.process.communicate()[0]
def _wait_until_connectable(self):
"""Blocks until the extension is connectable in the firefox."""
count = 0
while not utils.is_connectable(self.profile.port):
if self.process.poll() is not None:
# Browser has exited
raise WebDriverException("The browser appears to have exited "
"before we could connect. The output was: %s" %
self._get_firefox_output())
if count == 30:
self.kill()
raise WebDriverException("Can't load the profile. Profile "
"Dir: %s Firefox output: %s" % (
self.profile.path, self._get_firefox_output()))
count += 1
time.sleep(1)
return True
def _find_exe_in_registry(self):
try:
from _winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE
except ImportError:
from winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE
import shlex
keys = (
r"SOFTWARE\Classes\FirefoxHTML\shell\open\command",
r"SOFTWARE\Classes\Applications\firefox.exe\shell\open\command"
)
command = ""
for path in keys:
try:
key = OpenKey(HKEY_LOCAL_MACHINE, path)
command = QueryValue(key, "")
break
except OSError:
pass
else:
return ""
if not command:
return ""
return shlex.split(command)[0]
def _get_firefox_start_cmd(self):
"""Return the command to start firefox."""
start_cmd = ""
if platform.system() == "Darwin":
start_cmd = ("/Applications/Firefox.app/Contents/MacOS/firefox-bin")
elif platform.system() == "Windows":
start_cmd = (self._find_exe_in_registry() or
self._default_windows_location())
elif platform.system() == 'Java' and os._name == 'nt':
start_cmd = self._default_windows_location()
else:
for ffname in ["firefox", "iceweasel"]:
start_cmd = self.which(ffname)
if start_cmd is not None:
break
else:
# couldn't find firefox on the system path
raise RuntimeError("Could not find firefox in your system PATH." +
" Please specify the firefox binary location or install firefox")
return start_cmd
def _default_windows_location(self):
ff=os.path.join(os.getenv("LOCALAPPDATA"), "Mozilla Firefox\\firefox.exe")
if os.path.exists(ff): return ff
program_files = [os.getenv("PROGRAMFILES", r"C:\Program Files"),
os.getenv("PROGRAMFILES(X86)", r"C:\Program Files (x86)")]
for path in program_files:
binary_path = os.path.join(path, r"Mozilla Firefox\firefox.exe")
if os.access(binary_path, os.X_OK):
return binary_path
return ""
def _modify_link_library_path(self):
existing_ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '')
new_ld_lib_path = self._extract_and_check(
self.profile, self.NO_FOCUS_LIBRARY_NAME, "x86", "amd64")
new_ld_lib_path += existing_ld_lib_path
self._firefox_env["LD_LIBRARY_PATH"] = new_ld_lib_path
self._firefox_env['LD_PRELOAD'] = self.NO_FOCUS_LIBRARY_NAME
def _extract_and_check(self, profile, no_focus_so_name, x86, amd64):
paths = [x86, amd64]
built_path = ""
for path in paths:
library_path = os.path.join(profile.path, path)
os.makedirs(library_path)
import shutil
shutil.copy(os.path.join(os.path.dirname(__file__), path,
self.NO_FOCUS_LIBRARY_NAME),
library_path)
built_path += library_path + ":"
return built_path
def which(self, fname):
"""Returns the fully qualified path by searching Path of the given
name"""
for pe in os.environ['PATH'].split(os.pathsep):
checkname = os.path.join(pe, fname)
if os.access(checkname, os.X_OK) and not os.path.isdir(checkname):
return checkname
return None
| |
#! /usr/bin/env python
""" Module for the abstract graph search/traversal function.
Clients can use this module to perform various search or traversal based graph
algorithms without having to worry about the internals.
The interface consists of one function (search), which performs highly
customizable graph traversals.
Alternatively clients can also use the GraphSearch class for more control.
Author: Larion Garaczi
Date: 2014
"""
from algoyoga_test import BaseTest
from collections import deque
# TODO:
# SearchOptions object?
# inorder, postorder, preorder
# implicit graphs!
# Update docstring for graphsearch (process_vertex_early and late)
### interface ###
def search(*args, **kwargs):
""" A wrapper around GraphSearch.search() to perform graph searches. """
graphsearch = GraphSearch(*args, **kwargs)
return graphsearch.search()
#################
class GraphSearchError(Exception):
""" GraphSearch exception class """
def __init__(self, message):
self.message = message
class GraphSearch(object):
""" Class for graph traversals. The main public method of this class is
search, which performs a traversal based on the way the GraphSearch object
was initialized.
"""
def __init__(self, graph, node=None, search_type="bfs",
process_vertex_early=None, process_vertex_late=None, process_edge=None, new_component=None):
""" Initialize graph search. The only mandatory argument is the graph
itself, which is represented as a dictionary mapping nodes to the
list of their neighbours.
Node is the initial node to begin the traversal with (by default this
is arbitrary).
Search type can either be "bfs" (Breadth First Search) or "dfs" (Depth
First Search). "bfs" is the default.
There are 3 functional parameters (all of them are optinal):
- process_vertex(searchstate, vertex_id)
- process_edge(searchstate, x, y)
- new_component(searchstate, vertex_id)
In case any of these functions returns a value other than None, the
search terminates with that value. The new_component function gets
called whenever the traversal of one component is finished and a new
node outside of this component is found ready to be traversed (and also
at the beginning of the search). The argument for new_component is the
name of the first node of the new component. The parameter searchstate
is a GraphSearchState object representing the current state of the
search. """
### initialize search constants ###
assert search_type in ["bfs", "dfs"]
# initial node should be in the graph (when specified)
assert node in graph if node else True
self.search_type=search_type
# set all ommited processing functions to the placeholder
# function do_nothing:
def do_nothing(*args):
""" Do absolutely nothing. """
if process_vertex_early is None: process_vertex_early=do_nothing
if process_vertex_late is None: process_vertex_late=do_nothing
if process_edge is None: process_edge=do_nothing
if new_component is None: new_component=do_nothing
self.process_vertex_early = process_vertex_early
self.process_vertex_late = process_vertex_late
self.process_edge = process_edge
self.new_component = new_component
self.initial_node = node
self.graph = graph
### initialize search state ###
self.searchstate = self.get_search_state()
def search(self):
""" Traverse the graph. """
state = self.searchstate
# go through the nodes in the graph
for node in state._to_process:
if node in state.processed:
# already processed
continue
else: # traverse component
# call client defined function for new components
# return if new_component wants us to.
newcomp = self.new_component(state, node)
if newcomp is not None:
return newcomp
state.discovered.add(node)
self.add_children(node, state)
# early processing of root vertex
proc_vertex = self.process_vertex_early(state, node)
if proc_vertex is not None:
return proc_vertex
while state.frontier:
# Get signal to process
message, value = state._pop()
# Handle signals.
#
# "processed" means that the
# node given in value is ready for late processing (in DFS
# this means that the subtree under it is completely
# traversed).
#
# "edge" means that an edge of the graph needs to be traversed
#
if message == "processed":
# late processing of vertex
proc_vertex = self.process_vertex_late(state, value)
if proc_vertex is not None:
return proc_vertex
state.processed.add(value)
continue
elif message == "edge": # traverse an edge
node_from , node_to = value
# edge processing
proc_edge = self.process_edge(state, node_from, node_to)
if proc_edge is not None:
return proc_edge
if node_to not in state.discovered:
state.discovered.add(node_to)
state.parents[node_to] = node_from
self.add_children(node_to, state)
# early processing of vertex
proc_vertex = self.process_vertex_early(state, node_to)
if proc_vertex is not None:
return proc_vertex
else: # unknown signal
raise GraphSearchError("Unknown signal encountered.", current)
def add_children(self, node, state):
""" Add all children of node to frontier """
# We have to push a signal into the queue so that we
# can know when the subtree under current will be processed.
# We will add the signal before the children of the current
# node if we are doing DFS and after them in case we are doing BFS.
processed_signal = GraphSearchSignal("processed", node)
state._push(processed_signal)
for neighbour in self.graph[node]:
signal = GraphSearchSignal("edge", (node, neighbour))
state._push(signal)
def get_search_state(self):
""" Return a new GraphSearchState object. """
return GraphSearchState(self.graph, self.initial_node, self.search_type)
class GraphSearchState(object):
""" Class representing search states for GraphSearch. The main (public)
attributes are (you will probably only need these):
processed The set of nodes that are already processed by the search.
This implies that all of their children are discovered and
in case of DFS that the whole subtree under them is
processed.
discovered The set of nodes that are discovered by the search
frontier the current frontier. This is the list of nodes scheduled
for traversal.
parents A dictionary containing the parents of all the vertices in
the traversal tree
object methods:
_pop() gets a single node from the frontier
_push(node) pushes a signal to the frontier
These methods are useful to abstract away from the underlying datastructure
used for the frontier (i. e. a FIFO queue for BFS search and a stack for DFS
search). """
def __init__(self, graph, initial_node, search_type):
if search_type=="bfs":
self.frontier = deque()
elif search_type=="dfs":
self.frontier = list()
self._search_type = search_type
self.processed = set()
self.discovered = set()
self.parents = {node: None for node in graph.iterkeys()}
self._to_process = list() # TODO find a better name for this
nodes = list(graph.iterkeys())
if initial_node is not None:
self._to_process.append(initial_node)
nodes.remove(node)
self._to_process.extend(nodes)
def _pop(self):
""" Pop node from frontier. """
if self._search_type=="bfs":
return self.frontier.popleft()
elif self._search_type=="dfs":
return self.frontier.pop()
def _push(self, node):
""" Push node to frontier. """
self.frontier.append(node)
self.discovered.add(node)
class GraphSearchSignal(object):
""" Container object used for signals in the search
queue. """
def __init__(self, message, val):
self.message = message
self.val = val
def __repr__(self):
return " ".join((self.message, str(self.val)))
def __str__(self):
return self.__repr__()
def __iter__(self):
yield self.message
yield self.val
class GraphSearchTest(BaseTest):
def __init__(self):
testlist = [self.test_search]
super(GraphSearchTest,self).__init__("graph traversal", testlist)
def test_search(self):
""" test the search (bfs + dfs traversal of graphs) function """
tree = { # test case 1 - a tree
1: [2, 3],
2: [4, 5],
3: [9],
4: [6, 7],
5: [8],
6: [],
7: [],
8: [],
9: [10, 11],
10: [],
11: [],
}
cycle = { # test case 2 - a 4-cycle
1: [2],
2: [3],
3: [4],
4: [1],
}
trivial_graph = dict() # test case 3 - empty graph
# assume postorder traversal
tree_bfsorder = [1, 2, 3, 4, 5, 9, 6, 7, 8, 10, 11]
tree_dfsorder = [1, 3, 9, 11, 10, 2, 5, 8, 4, 7, 6]
tree_expected = {"bfs": tree_bfsorder, "dfs": tree_dfsorder}
cycle_expected = {"bfs": [1,2,3,4], "dfs": [1,2,3,4]}
trivial_expected = {"bfs": list(), "dfs": list()}
listing = []
testcases = [(tree, tree_expected), (cycle, cycle_expected), (trivial_graph, trivial_expected)]
def collect_nodes(s_state, node):
listing.append(node)
for mode in ["bfs", "dfs"]:
for testobject, expected in testcases:
print "testing {!s} on {!s} ".format(mode, testobject),
search(testobject, process_vertex_early = collect_nodes, search_type=mode)
assert listing == expected[mode]
print "OK"
del listing[:]
return "test pass"
if __name__ == "__main__":
tester = GraphSearchTest()
tester.run_tests()
| |
import acm
import ael
import FHTI_EDD_OTC_Util
import HTI_ExcelReport2
import HTI_Util
import HTI_FeedTrade_EDD_Util
import os
from shutil import copyfile
ttSaveToFile = "Check this to save the report instead of showing it."
ttCSV = "Check this to export the report in CSV format"
ttFileName = "File name and path of the report. YYYYMMDD in the file name will be replaced by the valuation date."
ttSendMail = "Send report as email attachment."
Client_Code = 0
Client_Name = 1
Trade_Reference = 2
Trade_Date = 3
Type = 4
Currency = 5
Amount = 6
EXT_REF = 7
def getFx(dt, fm_ccy, to_ccy, currclspricemkt, histclspricemkt):
ins_fm_ccy = ael.Instrument[fm_ccy]
ins_to_ccy = ael.Instrument[to_ccy]
ins_usd = ael.Instrument['USD']
try:
if dt == ael.date_today():
fm_usd_rate = ins_fm_ccy.used_price(ael.date_today(), ins_usd.insid, 'Last', 0, currclspricemkt)
to_usd_rate = ins_usd.used_price(ael.date_today(), ins_to_ccy.insid, 'Last', 0, currclspricemkt)
fx_rate = fm_usd_rate * to_usd_rate
else:
fm_usd_rate = ins_fm_ccy.used_price(dt, ins_usd.insid, 'Close', 0, histclspricemkt)
to_usd_rate = ins_usd.used_price(dt, ins_to_ccy.insid, 'Close', 0, histclspricemkt)
fx_rate = fm_usd_rate * to_usd_rate
except:
fx_rate = 0.0
return fx_rate
def report_compare(x, y):
return 1
'''
if x[Contract_Date] == '':
return -1
if y[Contract_Date] == '':
return 1
if ael.date(x[Contract_Date]).to_string('%Y%m%d') > ael.date(y[Contract_Date]).to_string('%Y%m%d'):
return 1
elif ael.date(x[Contract_Date]).to_string('%Y%m%d') < ael.date(y[Contract_Date]).to_string('%Y%m%d'):
return -1
if x[Contract_No] > y[Contract_No]:
return 1
elif x[Contract_No] < y[Contract_No]:
return -1
if x[Counterparty] > y[Counterparty]:
return 1
elif x[Counterparty] < y[Counterparty]:
return -1
return 0
'''
def disable_variables(variables, enable = 0):
for i in variables:
for j in ael_variables:
if i == j[0]:
j[9] = enable
def cb(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'saveToFile':
disable_variables(('fileName',), fieldValues[index])
return fieldValues
def cb2(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'sendEmail':
disable_variables(('emaillist',), fieldValues[index])
disable_variables(('subject',), fieldValues[index])
return fieldValues
def cb3(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'saveToCSV':
disable_variables(('fileName',), fieldValues[index])
return fieldValues
def cb4(index, fieldValues):
global ael_variables
if ael_variables[index][0] == 'saveToNetwork':
disable_variables(('networkDriveLocation',), fieldValues[index])
return fieldValues
ael_variables = [['asofdate', 'Date', 'string', [str(ael.date_today()), 'Today'], 'Today', 1, 0, 'Report Date', None, 1], \
['saveToFile', 'Save to file', 'int', [1, 0], 0, 0, 0, ttSaveToFile, cb, None], \
['fileName', 'File name', 'string', None, 'c:\\temp\\SecurityLoan_YYYYMMDD', 0, 0, ttFileName, None, 0], \
['sendEmail', 'Send mail', 'int', [1, 0], 0, 0, 0, ttSendMail, cb2, None], \
['emaillist', 'Email', 'string', None, 'louis.ck.wong@htisec.com', 0, 0, 'Email List', None, 0], \
['acquirers', 'Acquirer(s)', 'string', HTI_Util.getAllAcquirers(), None, 1, 1, 'Acquirer(s)', None, 1], \
['counterparties', 'Counterparty(s)', 'string', HTI_Util.getAllParties(), None, 0, 1, 'Counterparty(s)', None, 1], \
['portfolio', 'Portfolio', 'string', HTI_Util.getAllPortfolios(), None, 1, 1, 'Portfolio', None, 1], \
['subject', 'Email subject', 'string', None, 'FA4 (PROD) EDD Security Loan Report (TRS) asof @date', 1, 0, 'Email Subject', None, 0], \
['saveToCSV', 'CSV format', 'int', [1, 0], 0, 0, 0, ttCSV, None, None], \
['title', 'Report title', 'string', None, 'Cash Entry Report as of @date', 1, 0, 'Report Title', None, 1],
['period', 'Report Period', 'string', ['Inception', 'MTD'], 'Inception', 1, 0, 'Report Period', None, 1],
['currclspricemkt', 'Current Closing Price Market', 'string', None, '', 1, 0, 'Current Closing Price Market', None, 1],
['histclspricemkt', 'Historical Closing Price Market', 'string', None, '', 1, 0, 'Historical Closing Price Market', None, 1],
['base_ccy', 'Base Ccy', 'string', None, '', 1, 0, 'Base Ccy', None, 1], \
['fileperpty', 'Separte File for Counterparty', 'int', [1, 0], 0, 0, 0, 'Separte File for Counterparty', None, None], \
['saveToNetwork', 'Copy to Network', 'int', [1, 0], 0, 0, 0, 'Copy to Network Drive', cb4, None], \
['networkDriveLocation', 'Network Drive Location', 'string', None, 'C:\\temp\\PositionReport', 0, 0, 'Network Drive Location', None, 0]]
def ael_main(dict):
asofdate = dict['asofdate']
if asofdate == 'Today':
asofdate = ael.date_today()
asofdate = ael.date(asofdate)
title = dict['title'].replace('@date', str(asofdate))
period = dict['period']
subject = dict['subject'].replace('@date', str(asofdate))
saveToFile = dict['saveToFile']
saveToCSV = dict['saveToCSV']
fileName = dict['fileName']
sendEmail = dict['sendEmail']
emailList = dict['emaillist']
recipients = emailList.split(',')
fileName = fileName.replace("YYYYMMDD", asofdate.to_string('%Y%m%d'))
fileperpty = dict['fileperpty']
saveToNetwork = dict['saveToNetwork']
networkDriveLocation = dict['networkDriveLocation']
# Portfolios
portfolios = dict['portfolio']
portfolioList2 = []
pf_list = ''
portfolioList2.extend(portfolios)
for port in portfolioList2:
prfid = port
pfarr = []
pPf = ael.Portfolio[prfid]
HTI_FeedTrade_EDD_Util.getChildPortfolio(pPf, pfarr)
if len(pfarr) > 0:
for pf in pfarr:
if len(pf_list) != 0:
pf_list = pf_list + ','
pf_list = pf_list + "'" + pf + "'"
else:
if len(pf_list) != 0:
pf_list = pf_list + ','
pf_list = pf_list + "'" + prfid + "'"
# Acquirers
acq_array_list = dict['acquirers']
acq_list = ''
for acq in acq_array_list:
if acq_list == '':
acq_list = "'" + acq + "'"
else:
acq_list = acq_list + ",'" + acq + "'"
# Counterparties
pty_array_list = dict['counterparties']
pty_list = ''
for pty in pty_array_list:
if pty_list == '':
pty_list = "'" + pty + "'"
else:
pty_list = pty_list + ",'" + pty + "'"
currclspricemkt = dict['currclspricemkt']
histclspricemkt = dict['histclspricemkt']
base_ccy = dict['base_ccy']
if not fileperpty:
genCashEntryRpt(asofdate, pf_list, acq_list, pty_list, base_ccy, currclspricemkt, histclspricemkt, title, subject, saveToFile, saveToCSV, sendEmail, emailList, fileName, fileperpty, period)
else:
strSql = getReportSql()
strSql = strSql.replace('@dt', asofdate.add_days(1).to_string('%Y-%m-%d'))
strSql = strSql.replace('@portfolio_list', pf_list)
strSql = strSql.replace('@accquirer_list', acq_list)
if pty_list != '':
counterparty_list_sql = 'and cpty.ptyid in (@counterparty_list)'
counterparty_list_sql = counterparty_list_sql.replace("@counterparty_list", pty_list)
strSql = strSql.replace("@counterparty_list_sql", counterparty_list_sql)
else:
strSql = strSql.replace("@counterparty_list_sql", ' ')
'''
if period == 'MTD':
fm_date = asofdate.first_day_of_month()
print 'fm_date', fm_date, ael.date(fm_date).to_string('%Y-%m-%d')
strSql = strSql.replace("@start_date", "and t.time >= '%s'" % (ael.date(fm_date).to_string('%Y-%m-%d')))
else:
strSql = strSql.replace("@start_date", ' ')
'''
strSql = strSql.replace("@start_date", ' ')
strSql = "select distinct cpty.ptyid " + strSql
print strSql
rs = ael.asql(strSql)
columns, buf = rs
rptContent = []
for table in buf:
for row in table:
ptyid = row[Client_Code]
ptyfileName = fileName.replace('[ptyid]', ptyid)
print 'ptyfileName', ptyfileName
dir_path = os.path.dirname(os.path.realpath(ptyfileName))
if os.path.exists(dir_path) == False:
os.mkdir(dir_path)
exact_ptyfileName = os.path.basename(ptyfileName)
print 'exact_ptyfileName',exact_ptyfileName
genCashEntryRpt(asofdate, pf_list, acq_list, "'"+ptyid+"'", base_ccy, currclspricemkt, histclspricemkt, title, subject, saveToFile, saveToCSV, sendEmail, emailList, ptyfileName, fileperpty, period)
#print ptyfileName, destination_fileName
if saveToNetwork:
destination_fileName = networkDriveLocation + '\\' + exact_ptyfileName
#print 'destination_fileName', destination_fileName
destination_fileName = destination_fileName.replace('[ptyid]', ptyid)
#print 'destination_fileName', destination_fileName
dir_path = os.path.dirname(os.path.realpath(destination_fileName))
#print 'dir_path', dir_path
if os.path.exists(dir_path) == False:
#print 'dir_path exist', dir_path
os.mkdir(dir_path)
copyfile(ptyfileName+'.xlsx', destination_fileName+'.xlsx')
def monthCode(intMonth):
switcher = {
1: "Jan",
2: "Feb",
3: "Mar",
4: "Apr",
5: "May",
6: "Jun",
7: "Jul",
8: "Aug",
9: "Sep",
10: "Oct",
11: "Nov",
12: "Dec",
}
return switcher.get(intMonth, "")
def getReportSql():
strSql = """from trade t, instrument i, payment p, party cpty, party a, portfolio pf, instrument c
where t.insaddr = i.insaddr
and i.instype = 'Curr'
and t.type = 'Cash Entry'
and t.trdnbr = p.trdnbr
and t.counterparty_ptynbr = cpty.ptynbr
and t.acquirer_ptynbr = a.ptynbr
and p.curr = c.insaddr
and a.ptyid in (@accquirer_list)
@counterparty_list_sql
and t.prfnbr = pf.prfnbr
and pf.prfid in (@portfolio_list)
@start_date
and t.time < '@dt'
and t.status not in ('Void', 'Simulated')
"""
print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!', strSql
return strSql
def genCashEntryRpt(asofdate, pf_list, acq_list, pty_list, base_ccy, currclspricemkt, histclspricemkt, title, subject, saveToFile, saveToCSV, sendEmail, emailList, fileName, fileperpty, period):
report = ReportLayout(title)
font = HTI_ExcelReport2.Font()
font.bold = True
reportData = HTI_ExcelReport2.ReportData()
reportData.newSheet = True
reportData.headerText = ['Trade Date', 'Client Code', 'Client Name', 'Trade Reference', 'Type', 'Currency', 'Amount', 'External Reference']
strSql = getReportSql()
strSql = "select cpty.ptyid, cpty.fullname, t.trdnbr, t.time, p.type, c.insid, p.amount, add_info(t, 'External Reference') 'ext_ref' " + strSql
strSql = strSql.replace('@dt', asofdate.add_days(1).to_string('%Y-%m-%d'))
strSql = strSql.replace('@portfolio_list', pf_list)
strSql = strSql.replace('@accquirer_list', acq_list)
if pty_list != '':
counterparty_list_sql = 'and cpty.ptyid in (@counterparty_list)'
counterparty_list_sql = counterparty_list_sql.replace("@counterparty_list", pty_list)
strSql = strSql.replace("@counterparty_list_sql", counterparty_list_sql)
else:
strSql = strSql.replace("@counterparty_list_sql", ' ')
if period == 'MTD':
fm_date = asofdate.first_day_of_month()
strSql = strSql.replace("@start_date", "and t.time >= '%s'" % (ael.date(fm_date).to_string('%Y-%m-%d')))
else:
strSql = strSql.replace("@start_date", ' ')
print strSql
rs = ael.asql(strSql)
columns, buf = rs
rptContent = []
for table in buf:
for row in table:
client_code = row[Client_Code]
client_name = row[Client_Name]
trade_ref = row[Trade_Reference]
type = row[Type]
currency = row[Currency]
amt = row[Amount]
ext_ref = row[EXT_REF]
acm_trd = acm.FTrade[trade_ref]
if acm_trd != None:
print acm_trd
trade_date = acm.Time.DateFromTime(acm_trd.TradeTime())
rptRow = [str(trade_date), client_code, client_name, str(trade_ref), type, currency, str(amt), ext_ref]
rptContent.append(rptRow)
rptContent.sort(report_compare)
reportData.rows = rptContent
report.addReportData(reportData, {'SUM': [], 'COL_TEXT': [], 'CUSTOM_TEXT': {'COL': [], 'TEXT': []}})
if saveToFile:
if not saveToCSV:
try:
fileName = fileName + '.xlsx'
if os.path.exists(fileName):
os.remove(fileName)
except:
pass
if sendEmail and len(emailList) != 0:
report.saveNoQuit(fileName)
else:
report.save(fileName)
else:
fileName = fileName + '.csv'
if os.path.isfile(fileName):
os.remove(fileName)
csvData = []
csvData.append(reportData.headerText)
csvData = csvData + reportData.rows
print fileName
try:
outPutFile = open(fileName,'wb')
csvWriter = csv.writer(outPutFile, delimiter=',', quotechar='"')
for row in csvData:
csvWriter.writerow(row)
outPutFile.flush()
finally:
outPutFile.close()
else:
report.show()
class ReportLayout(HTI_ExcelReport2.CommonLayoutReport):
title = ''
Amount = 7
def __init__(self, title):
self.title = title
HTI_ExcelReport2.CommonLayoutReport.__init__(self)
def reportHeader(self, currentRow, reportIndex, excelApp):
# Write title
excelApp.Cells(currentRow[self.ROW], 1).Value = self.title
excelApp.Cells(currentRow[self.ROW], 1).Font.Bold = True
excelApp.Cells(currentRow[self.ROW], 1).Font.Size = 12
currentRow[self.ROW] = currentRow[self.ROW] + 1
HTI_ExcelReport2.CommonLayoutReport.reportHeader(self, currentRow, reportIndex, excelApp)
excelApp.Columns(self.Amount).NumberFormat = "#,##0.00"
def groupFooter(self, currentRow, reportIndex, group, excelApp):
HTI_ExcelReport2.CommonLayoutReport.groupFooter(self, currentRow, reportIndex, group, excelApp)
def reportEnd(self, excelApp):
HTI_ExcelReport2.CommonLayoutReport.reportEnd(self, excelApp)
#excelApp.Columns(self.Trade_Date).Select()
#excelApp.Selection.HorizontalAlignment = HTI_ExcelReport2.ExcelConstant.xlLeft
excelApp.Cells(1, 1).Select()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.