gt stringclasses 1 value | context stringlengths 2.49k 119k |
|---|---|
import cython
import rmgpy.molecule.generator as generator
import rmgpy.molecule.parser as parser
from .graph import Vertex, Edge, Graph, getVertexConnectivityValue
from .molecule import Atom, Bond, Molecule
import rmgpy.molecule.pathfinder as pathfinder
def generateResonanceIsomers(mol):
"""
Generate and return all of the resonance isomers of this molecule.
"""
cython.declare(isomers=list, newIsomers=list, index=cython.int, atom=Atom)
cython.declare(isomer=Molecule, newIsomer=Molecule, isom=Molecule)
isomers = [mol]
# Iterate over resonance isomers
index = 0
while index < len(isomers):
isomer = isomers[index]
newIsomers = []
for algo in populate_resonance_generation_algorithm():
newIsomers.extend(algo(isomer))
for newIsomer in newIsomers:
# Append to isomer list if unique
for isom in isomers:
if isom.isIsomorphic(newIsomer):
break
else:
isomers.append(newIsomer)
# Move to next resonance isomer
index += 1
return isomers
def generateAdjacentResonanceIsomers(mol):
"""
Generate all of the resonance isomers formed by one allyl radical shift.
"""
cython.declare(isomers=list, paths=list, index=cython.int, isomer=Molecule)
cython.declare(atom=Atom, atom1=Atom, atom2=Atom, atom3=Atom, bond12=Bond, bond23=Bond)
cython.declare(v1=Vertex, v2=Vertex)
isomers = []
# Radicals
if mol.isRadical():
# Iterate over radicals in structure
for atom in mol.vertices:
paths = pathfinder.findAllDelocalizationPaths(atom)
for atom1, atom2, atom3, bond12, bond23 in paths:
# Adjust to (potentially) new resonance isomer
atom1.decrementRadical()
atom3.incrementRadical()
bond12.incrementOrder()
bond23.decrementOrder()
# Make a copy of isomer
isomer = mol.copy(deep=True)
# Also copy the connectivity values, since they are the same
# for all resonance forms
for index in range(len(mol.vertices)):
v1 = mol.vertices[index]
v2 = isomer.vertices[index]
v2.connectivity1 = v1.connectivity1
v2.connectivity2 = v1.connectivity2
v2.connectivity3 = v1.connectivity3
v2.sortingLabel = v1.sortingLabel
# Restore current isomer
atom1.incrementRadical()
atom3.decrementRadical()
bond12.decrementOrder()
bond23.incrementOrder()
# Append to isomer list if unique
isomer.updateAtomTypes()
isomers.append(isomer)
return isomers
def generateLonePairRadicalResonanceIsomers(mol):
"""
Generate all of the resonance isomers formed by lone electron pair - radical shifts.
"""
cython.declare(isomers=list, paths=list, index=cython.int, isomer=Molecule)
cython.declare(atom=Atom, atom1=Atom, atom2=Atom)
cython.declare(v1=Vertex, v2=Vertex)
isomers = []
# Radicals
if mol.isRadical():
# Iterate over radicals in structure
for atom in mol.vertices:
paths = pathfinder.findAllDelocalizationPathsLonePairRadical(atom)
for atom1, atom2 in paths:
# Adjust to (potentially) new resonance isomer
atom1.decrementRadical()
atom1.incrementLonePairs()
atom1.updateCharge()
atom2.incrementRadical()
atom2.decrementLonePairs()
atom2.updateCharge()
# Make a copy of isomer
isomer = mol.copy(deep=True)
# Also copy the connectivity values, since they are the same
# for all resonance forms
for index in range(len(mol.vertices)):
v1 = mol.vertices[index]
v2 = isomer.vertices[index]
v2.connectivity1 = v1.connectivity1
v2.connectivity2 = v1.connectivity2
v2.connectivity3 = v1.connectivity3
v2.sortingLabel = v1.sortingLabel
# Restore current isomer
atom1.incrementRadical()
atom1.decrementLonePairs()
atom1.updateCharge()
atom2.decrementRadical()
atom2.incrementLonePairs()
atom2.updateCharge()
# Append to isomer list if unique
isomer.updateAtomTypes()
isomers.append(isomer)
return isomers
def generateN5dd_N5tsResonanceIsomers(mol):
"""
Generate all of the resonance isomers formed by shifts between N5dd and N5ts.
"""
cython.declare(isomers=list, paths=list, index=cython.int, isomer=Molecule)
cython.declare(atom=Atom, atom1=Atom, atom2=Atom, atom3=Atom)
cython.declare(bond12=Bond, bond13=Bond)
cython.declare(v1=Vertex, v2=Vertex)
isomers = []
# Iterate over nitrogen atoms in structure
for atom in mol.vertices:
paths = pathfinder.findAllDelocalizationPathsN5dd_N5ts(atom)
for atom1, atom2, atom3, bond12, bond13, direction in paths:
# from N5dd to N5ts
if direction == 1:
# Adjust to (potentially) new resonance isomer
bond12.decrementOrder()
bond13.incrementOrder()
atom2.incrementLonePairs()
atom3.decrementLonePairs()
atom1.updateCharge()
atom2.updateCharge()
atom3.updateCharge()
# Make a copy of isomer
isomer = mol.copy(deep=True)
# Also copy the connectivity values, since they are the same
# for all resonance forms
for index in range(len(mol.vertices)):
v1 = mol.vertices[index]
v2 = isomer.vertices[index]
v2.connectivity1 = v1.connectivity1
v2.connectivity2 = v1.connectivity2
v2.connectivity3 = v1.connectivity3
v2.sortingLabel = v1.sortingLabel
# Restore current isomer
bond12.incrementOrder()
bond13.decrementOrder()
atom2.decrementLonePairs()
atom3.incrementLonePairs()
atom1.updateCharge()
atom2.updateCharge()
atom3.updateCharge()
# Append to isomer list if unique
isomer.updateAtomTypes()
isomers.append(isomer)
# from N5ts to N5dd
if direction == 2:
# Adjust to (potentially) new resonance isomer
bond12.decrementOrder()
bond13.incrementOrder()
atom2.incrementLonePairs()
atom3.decrementLonePairs()
atom1.updateCharge()
atom2.updateCharge()
atom3.updateCharge()
# Make a copy of isomer
isomer = mol.copy(deep=True)
# Also copy the connectivity values, since they are the same
# for all resonance forms
for index in range(len(mol.vertices)):
v1 = mol.vertices[index]
v2 = isomer.vertices[index]
v2.connectivity1 = v1.connectivity1
v2.connectivity2 = v1.connectivity2
v2.connectivity3 = v1.connectivity3
v2.sortingLabel = v1.sortingLabel
# Restore current isomer
bond12.incrementOrder()
bond13.decrementOrder()
atom2.decrementLonePairs()
atom3.incrementLonePairs()
atom1.updateCharge()
atom2.updateCharge()
atom3.updateCharge()
# Append to isomer list if unique
isomer.updateAtomTypes()
isomers.append(isomer)
return isomers
def generateAromaticResonanceIsomers(mol):
"""
Generate the aromatic form of the molecule.
Returns it as a single element of a list.
If there's an error (eg. in RDKit) it just returns an empty list.
"""
cython.declare(molecule=Molecule, rdAtomIndices=dict, aromatic=cython.bint, aromaticBonds=list)
cython.declare(rings=list, ring0=list, i=cython.int, atom1=Atom, atom2=Atom, bond=Bond)
from rdkit.Chem.rdchem import BondType
AROMATIC = BondType.AROMATIC
# Radicals
if not mol.isCyclic():
return []
molecule = mol.copy(deep=True)
# In RMG, only 6-member rings can be considered aromatic, so ignore all other rings
rings = [ring0 for ring0 in molecule.getSmallestSetOfSmallestRings() if len(ring0) == 6]
if not rings:
return []
try:
rdkitmol, rdAtomIndices = generator.toRDKitMol(molecule, removeHs=False, returnMapping=True)
except ValueError:
return []
aromatic = False
for ring0 in rings:
aromaticBonds = []
# Figure out which atoms and bonds are aromatic and reassign appropriately:
for i, atom1 in enumerate(ring0):
if not atom1.isCarbon():
# all atoms in the ring must be carbon in RMG for our definition of aromatic
break
for atom2 in ring0[i + 1:]:
if molecule.hasBond(atom1, atom2):
if rdkitmol.GetBondBetweenAtoms(rdAtomIndices[atom1], rdAtomIndices[atom2]).GetBondType() is AROMATIC:
aromaticBonds.append(molecule.getBond(atom1, atom2))
else: # didn't break so all atoms are carbon
if len(aromaticBonds) == 6:
aromatic = True
# Only change bonds if there are all 6 are aromatic. Otherwise don't do anything
for bond in aromaticBonds:
bond.order = 'B'
if aromatic:
try:
molecule.updateAtomTypes()
except:
# Something incorrect has happened, ie. 2 double bonds on a Cb atomtype
# Do not add the new isomer since it is malformed
return []
else:
# nothing bad happened
return [molecule]
else:
return []
def generateKekulizedResonanceIsomers(mol):
"""
Generate a kekulized (single-double bond) form of the molecule.
Returns a single Kekule form, as an element of a list of length 1.
If there's an error (eg. in RDKit) then it just returns an empty list.
"""
cython.declare(atom=Atom)
for atom in mol.atoms:
if atom.atomType.label == 'Cb' or atom.atomType.label == 'Cbf':
break
else:
return []
try:
rdkitmol = generator.toRDKitMol(mol) # This perceives aromaticity
isomer = parser.fromRDKitMol(Molecule(), rdkitmol) # This step Kekulizes the molecule
except ValueError:
return []
isomer.updateAtomTypes()
return [isomer]
def generate_isomorphic_isomers(mol):
"""
Select the resonance isomer that is isomorphic to the parameter isomer, with the lowest unpaired
electrons descriptor.
We generate over all resonance isomers (non-isomorphic as well as isomorphic) and retain isomorphic
isomers.
WIP: do not generate aromatic resonance isomers.
"""
cython.declare(isomorphic_isomers=list,\
isomers=list,
)
cython.declare(isomer=Molecule,\
newIsomer=Molecule,\
isom=Molecule
)
cython.declare(index=int)
isomorphic_isomers = [mol]# resonance isomers that are isomorphic to the parameter isomer.
isomers = [mol]
# Iterate over resonance isomers
index = 0
while index < len(isomers):
isomer = isomers[index]
newIsomers = []
for algo in populate_resonance_generation_algorithm():
newIsomers.extend(algo(isomer))
for newIsomer in newIsomers:
# Append to isomer list if unique
for isom in isomers:
if isom.copy(deep=True).isIsomorphic(newIsomer.copy(deep=True)):
isomorphic_isomers.append(newIsomer)
break
else:
isomers.append(newIsomer)
# Move to next resonance isomer
index += 1
return isomorphic_isomers
def populate_resonance_generation_algorithm():
"""
A list with the current set of resonance generation algorithms.
"""
algorithms = (
generateAdjacentResonanceIsomers,
generateLonePairRadicalResonanceIsomers,
generateN5dd_N5tsResonanceIsomers,
generateKekulizedResonanceIsomers,
generateAromaticResonanceIsomers,
)
return algorithms
| |
"""
Tests for `kolibri.utils.cli` module.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import copy
import logging
import os
from functools import wraps
import pytest
from mock import patch
import kolibri
from kolibri.utils import cli
from kolibri.utils import options
logger = logging.getLogger(__name__)
LOG_LOGGER = []
def version_file_restore(func):
"""
Decorator that reads contents of the version file and restores it after
calling ``func(orig_version='x.y', version_file='/path')``.
If a version file doesn't exist, it calls ``func(... version_file=None)``
This decorator is used for testing functions that trigger during upgrades
without mocking more than necessary.
"""
@wraps(func)
def wrapper(*args, **kwargs):
version_file = cli.version_file()
version_file_existed = os.path.isfile(version_file)
orig_version = kolibri.__version__
kwargs["orig_version"] = orig_version
if version_file_existed:
kwargs["version_file"] = version_file
func(*args, **kwargs)
if version_file_existed:
open(version_file, "w").write(orig_version)
return wrapper
def log_logger(logger_instance, LEVEL, msg, args, **kwargs):
"""
Monkeypatching for logging.Logger._log to scoop up log messages if we wanna
test something specific was logged.
"""
LOG_LOGGER.append((LEVEL, msg))
# Call the original function
logger_instance.__log(LEVEL, msg, args, **kwargs)
def activate_log_logger(monkeypatch):
"""
Activates logging everything to ``LOG_LOGGER`` with the monkeypatch pattern
of py.test (test accepts a ``monkeypatch`` argument)
"""
monkeypatch.setattr(logging.Logger, "__log", logging.Logger._log, raising=False)
monkeypatch.setattr(logging.Logger, "_log", log_logger)
@pytest.fixture
def conf():
from kolibri.utils import conf
old_config = copy.deepcopy(conf.config)
yield conf
conf.update(old_config)
conf.save()
def test_bogus_plugin_autoremove(conf):
"""
Checks that a plugin is auto-removed when it cannot be imported
"""
plugin_name = "giraffe.horse"
conf.config["INSTALLED_APPS"].append(plugin_name)
conf.save()
conf.autoremove_unavailable_plugins()
assert plugin_name not in conf.config["INSTALLED_APPS"]
def test_bogus_plugin_autoremove_no_path(conf):
"""
Checks that a plugin without a dotted path is also auto-removed
"""
plugin_name = "giraffehorse"
conf.config["INSTALLED_APPS"].append(plugin_name)
conf.save()
conf.autoremove_unavailable_plugins()
assert plugin_name not in conf.config["INSTALLED_APPS"]
def test_bogus_plugin_disable(conf):
installed_apps_before = conf.config["INSTALLED_APPS"][:]
cli.plugin("i_do_not_exist", disable=True)
assert installed_apps_before == conf.config["INSTALLED_APPS"]
def test_plugin_cannot_be_imported_disable(conf):
"""
A plugin may be in conf.config['INSTALLED_APPS'] but broken or uninstalled
"""
plugin_name = "giraffe.horse"
conf.config["INSTALLED_APPS"].append(plugin_name)
conf.save()
cli.plugin(plugin_name, disable=True)
assert plugin_name not in conf.config["INSTALLED_APPS"]
def test_real_plugin_disable(conf):
installed_apps_before = conf.config["INSTALLED_APPS"][:]
test_plugin = "kolibri.plugins.media_player"
assert test_plugin in installed_apps_before
# Because RIP example plugin
cli.plugin(test_plugin, disable=True)
assert test_plugin not in conf.config["INSTALLED_APPS"]
def test_real_plugin_disable_twice(conf):
installed_apps_before = conf.config["INSTALLED_APPS"][:]
test_plugin = "kolibri.plugins.media_player"
assert test_plugin in installed_apps_before
# Because RIP example plugin
cli.plugin(test_plugin, disable=True)
assert test_plugin not in conf.config["INSTALLED_APPS"]
installed_apps_before = conf.config["INSTALLED_APPS"][:]
cli.plugin(test_plugin, disable=True)
assert test_plugin not in conf.config["INSTALLED_APPS"]
def test_plugin_with_no_plugin_class(conf):
"""
Expected behavior is that nothing blows up with exceptions, user just gets
a warning and nothing is enabled or changed in the configuration.
"""
# For fun, we pass in a system library
installed_apps_before = conf.config["INSTALLED_APPS"][:]
cli.plugin("os.path")
assert installed_apps_before == conf.config["INSTALLED_APPS"]
@pytest.mark.django_db
def test_kolibri_listen_port_env(monkeypatch):
"""
Starts and stops the server, mocking the actual server.start()
Checks that the correct fallback port is used from the environment.
"""
with patch("kolibri.core.content.utils.annotation.update_channel_metadata"):
from kolibri.utils import server
def start_mock(port, *args, **kwargs):
assert port == test_port
activate_log_logger(monkeypatch)
monkeypatch.setattr(server, "start", start_mock)
test_port = 1234
os.environ["KOLIBRI_HTTP_PORT"] = str(test_port)
# force a reload of conf.OPTIONS so the environment variable will be read in
from kolibri.utils import conf
conf.OPTIONS.update(options.read_options_file(conf.KOLIBRI_HOME))
server.start = start_mock
cli.start(daemon=False)
with pytest.raises(SystemExit) as excinfo:
cli.stop()
assert excinfo.code == 0
# Stop the server AGAIN, asserting that we can call the stop command
# on an already stopped server and will be gracefully informed about
# it.
with pytest.raises(SystemExit) as excinfo:
cli.stop()
assert excinfo.code == 0
assert "Already stopped" in LOG_LOGGER[-1][1]
def status_starting_up():
raise server.NotRunning(server.STATUS_STARTING_UP)
# Ensure that if a server is reported to be 'starting up', it doesn't
# get killed while doing that.
monkeypatch.setattr(server, "get_status", status_starting_up)
with pytest.raises(SystemExit) as excinfo:
cli.stop()
assert excinfo.code == server.STATUS_STARTING_UP
assert "Not stopped" in LOG_LOGGER[-1][1]
@pytest.mark.django_db
@version_file_restore
@patch("kolibri.utils.cli.update")
@patch("kolibri.utils.cli.plugin")
@patch("kolibri.core.deviceadmin.utils.dbbackup")
def test_first_run(dbbackup, plugin, update, version_file=None, orig_version=None):
"""
Tests that the first_run() function performs as expected
"""
if version_file:
os.unlink(version_file)
cli.initialize()
update.assert_called_once()
dbbackup.assert_not_called()
# Check that it got called for each default plugin
from kolibri.core.settings import DEFAULT_PLUGINS
assert plugin.call_count == len(DEFAULT_PLUGINS)
@pytest.mark.django_db
@version_file_restore
@patch("kolibri.utils.cli.update")
def test_update(update, version_file=None, orig_version=None):
"""
Tests that update() function performs as expected
"""
version_file = cli.version_file()
open(version_file, "w").write(orig_version + "_test")
cli.initialize()
update.assert_called_once()
@pytest.mark.django_db
def test_should_back_up():
"""
Tests our db backup logic: skip for dev versions, and backup on change
"""
assert cli.should_back_up("0.10.0", "0.10.1")
assert not cli.should_back_up("0.10.0", "0.10.0")
assert not cli.should_back_up("0.10.0-dev0", "0.10.0")
assert not cli.should_back_up("0.10.0", "0.10.0-dev0")
assert not cli.should_back_up("0.10.0-dev0", "0.10.0-dev0")
@pytest.mark.django_db
@version_file_restore
@patch("kolibri.utils.cli.update")
@patch("kolibri.core.deviceadmin.utils.dbbackup")
def test_update_no_version_change(
dbbackup, update, version_file=None, orig_version=None
):
"""
Tests that when the version doesn't change, we are not doing things we
shouldn't
"""
version_file = cli.version_file()
open(version_file, "w").write(orig_version)
cli.initialize()
update.assert_not_called()
dbbackup.assert_not_called()
def test_cli_usage():
# Test the -h
with pytest.raises(SystemExit) as excinfo:
cli.main("-h")
assert excinfo.code == 0
with pytest.raises(SystemExit) as excinfo:
cli.main("--version")
assert excinfo.code == 0
def test_cli_parsing():
test_patterns = (
(["start"], {"start": True}, []),
(["stop"], {"stop": True}, []),
(["shell"], {"shell": True}, []),
(["manage", "shell"], {"manage": True, "COMMAND": "shell"}, []),
(["manage", "help"], {"manage": True, "COMMAND": "help"}, []),
(["manage", "blah"], {"manage": True, "COMMAND": "blah"}, []),
(
["manage", "blah", "--debug", "--", "--django-arg"],
{"manage": True, "COMMAND": "blah", "--debug": True},
["--django-arg"],
),
(
["manage", "blah", "--django-arg"],
{"manage": True, "COMMAND": "blah"},
["--django-arg"],
),
)
for p, docopt_expected, django_expected in test_patterns:
docopt, django = cli.parse_args(p)
for k, v in docopt_expected.items():
assert docopt[k] == v
assert django == django_expected
| |
TOP = 25
# import MySQLdb
import sys
import numpy
import csv
import datetime
import os
relay_dic = {}
top_relay_dic = {} #for plotting storage. Store daily average dev for tops
Tops = []
Bottoms = []
# path = "/Users/dzq/Desktop/CS/PURE/tor_oct.csv"
def data_selecting(path):
RS_max = 0
RS_min = 0
RS_median = 0
RS_mean = 0.0
RS_std = 0.0
R_4_max = 0
R_4_min = 0
R_4_median = 0
R_4_mean = 0.0
R_4_std = 0.0
R_4_Stat_List = []
original = file(path, 'r')
reader = csv.reader(original)
for row in reader:
if row[3] not in relay_dic:
relay_dic[row[3]] = {}
### server
if row[1] not in relay_dic[row[3]]:
relay_dic[row[3]][row[1]] = {}
### time for relay and server
if row[2] not in relay_dic[row[3]][row[1]]:
relay_dic[row[3]][row[1]][row[2]] = row[5:]
for R_ID in relay_dic.keys():
# print str(datetime.datetime.now())
print "Currently at relay: "+R_ID
num_of_servers = 0
RS_max_list = []
RS_min_list = []
RS_median_list = []
RS_mean_list = []
RS_std_list = []
for S_Name in relay_dic[R_ID].keys():
print ' Now: '+R_ID+' and '+S_Name
List_of_Positive_Dev = []
List_of_Positive_points = []
counter = 0
for R_and_S in relay_dic[R_ID][S_Name].keys():
counter+=1
ad = int(relay_dic[R_ID][S_Name][R_and_S][0])
re = int(relay_dic[R_ID][S_Name][R_and_S][1])
Dev = ad - re
if(Dev>0): #Dev>0
### the top negative ones too
List_of_Positive_Dev.append(Dev)
# List_of_Positive_points.append([ad,re])
print counter
###point_dic[R_ID] = List_of_Positive_points###Don't use it. Too big!
###For plotting, we want the points to be average of each day (24ish hours)
if(len(List_of_Positive_Dev)>0):
RS_max = numpy.amax(List_of_Positive_Dev)
RS_min = numpy.amin(List_of_Positive_Dev)
RS_median = numpy.median(List_of_Positive_Dev)
RS_mean = numpy.mean(List_of_Positive_Dev)
RS_std = numpy.std(List_of_Positive_Dev)
RS_max_list.append(RS_max)
RS_min_list.append(RS_min)
RS_median_list.append(RS_median)
RS_mean_list.append(RS_mean)
RS_std_list.append(RS_std)
# print len(RS_mean_list)
R_4_max = numpy.amax(RS_max_list)
R_4_min = numpy.amin(RS_min_list)
R_4_median = numpy.median(RS_median_list)
R_4_mean = numpy.mean(RS_mean_list)
R_4_std = numpy.std(RS_std_list)
Relay_Stat = [R_ID, R_4_max, R_4_min, R_4_median, R_4_mean, R_4_std]
R_4_Stat_List.append(Relay_Stat)
# return
Sorted_R_4_List = sorted(R_4_Stat_List, key=lambda item:item[4],reverse=True)
Tops = Sorted_R_4_List[:TOP]
# print Tops
# def building_top_relay_dic():
# print Tops
for Toppie in Tops:
R_ID = Toppie[0]
top_relay_dic[R_ID] = {}
for S_Name in relay_dic[R_ID].keys():
top_relay_dic[R_ID][S_Name] = {}
for Time in relay_dic[R_ID][S_Name].keys():
time_split = Time.split(' ')
# print time_split[0].split('-')[1]
if(time_split[0].split('-')[1]=='09'):
continue
# return
if(time_split[0] not in top_relay_dic[R_ID][S_Name].keys()):
top_relay_dic[R_ID][S_Name][time_split[0]] = [] #for one day
#counter+=1#??? counter for each day
ad = int(relay_dic[R_ID][S_Name][Time][0])
re = int(relay_dic[R_ID][S_Name][Time][1])
Dev = ad - re
if(Dev>0): #Dev>0
top_relay_dic[R_ID][S_Name][time_split[0]].append(Dev)
else:
ad = int(relay_dic[R_ID][S_Name][Time][0])
re = int(relay_dic[R_ID][S_Name][Time][1])
Dev = ad - re
if(Dev>0): #Dev>0
top_relay_dic[R_ID][S_Name][time_split[0]].append(Dev)
# print top_relay_dic
for Day in top_relay_dic[R_ID][S_Name].keys():
if(top_relay_dic[R_ID][S_Name][Day]!=[]):
# top_relay_dic[R_ID][S_Name][Day] = numpy.mean(top_relay_dic[R_ID][S_Name][Day])
top_relay_dic[R_ID][S_Name][Day] = numpy.mean(top_relay_dic[R_ID][S_Name][Day])
else:
top_relay_dic[R_ID][S_Name][Day] = 0
i = 0
for R_ID in top_relay_dic.keys():
foldername = './new/Top'+str(i)#+'_'+R_ID
i+=1
if not os.path.exists(foldername):
os.makedirs(foldername)
j=0
for S_Name in top_relay_dic[R_ID].keys():
filename = foldername+'/'+S_Name+'.csv'
j+=1
day_array = sorted(top_relay_dic[R_ID][S_Name], key=lambda x: datetime.datetime.strptime(x, '%Y-%m-%d'))
dayer = []
for ii in range(0,len(day_array)):
dayer.append(day_array[ii][-2:])
with open(filename, 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL)
for k in range(0,len(day_array)):
spamwriter.writerow([dayer[k],top_relay_dic[R_ID][S_Name][day_array[k]],R_ID])
# def making_csv():
data_selecting()
# building_top_relay_dic()
# print '\n'
# print top_relay_dic
| |
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Specific, edge-case tests for the MediaFile metadata layer.
"""
import os
import shutil
import _common
from _common import unittest
import beets.mediafile
class EdgeTest(unittest.TestCase):
def test_emptylist(self):
# Some files have an ID3 frame that has a list with no elements.
# This is very hard to produce, so this is just the first 8192
# bytes of a file found "in the wild".
emptylist = beets.mediafile.MediaFile(
os.path.join(_common.RSRC, 'emptylist.mp3'))
genre = emptylist.genre
self.assertEqual(genre, '')
def test_release_time_with_space(self):
# Ensures that release times delimited by spaces are ignored.
# Amie Street produces such files.
space_time = beets.mediafile.MediaFile(
os.path.join(_common.RSRC, 'space_time.mp3'))
self.assertEqual(space_time.year, 2009)
self.assertEqual(space_time.month, 9)
self.assertEqual(space_time.day, 4)
def test_release_time_with_t(self):
# Ensures that release times delimited by Ts are ignored.
# The iTunes Store produces such files.
t_time = beets.mediafile.MediaFile(
os.path.join(_common.RSRC, 't_time.m4a'))
self.assertEqual(t_time.year, 1987)
self.assertEqual(t_time.month, 3)
self.assertEqual(t_time.day, 31)
def test_tempo_with_bpm(self):
# Some files have a string like "128 BPM" in the tempo field
# rather than just a number.
f = beets.mediafile.MediaFile(os.path.join(_common.RSRC, 'bpm.mp3'))
self.assertEqual(f.bpm, 128)
def test_discc_alternate_field(self):
# Different taggers use different vorbis comments to reflect
# the disc and disc count fields: ensure that the alternative
# style works.
f = beets.mediafile.MediaFile(os.path.join(_common.RSRC, 'discc.ogg'))
self.assertEqual(f.disc, 4)
self.assertEqual(f.disctotal, 5)
def test_old_ape_version_bitrate(self):
f = beets.mediafile.MediaFile(os.path.join(_common.RSRC, 'oldape.ape'))
self.assertEqual(f.bitrate, 0)
_sc = beets.mediafile._safe_cast
class InvalidValueToleranceTest(unittest.TestCase):
def test_safe_cast_string_to_int(self):
self.assertEqual(_sc(int, 'something'), 0)
def test_safe_cast_int_string_to_int(self):
self.assertEqual(_sc(int, '20'), 20)
def test_safe_cast_string_to_bool(self):
self.assertEqual(_sc(bool, 'whatever'), False)
def test_safe_cast_intstring_to_bool(self):
self.assertEqual(_sc(bool, '5'), True)
def test_safe_cast_string_to_float(self):
self.assertAlmostEqual(_sc(float, '1.234'), 1.234)
def test_safe_cast_int_to_float(self):
self.assertAlmostEqual(_sc(float, 2), 2.0)
def test_safe_cast_string_with_cruft_to_float(self):
self.assertAlmostEqual(_sc(float, '1.234stuff'), 1.234)
def test_safe_cast_negative_string_to_float(self):
self.assertAlmostEqual(_sc(float, '-1.234'), -1.234)
def test_safe_cast_special_chars_to_unicode(self):
us = _sc(unicode, 'caf\xc3\xa9')
self.assertTrue(isinstance(us, unicode))
self.assertTrue(us.startswith(u'caf'))
class SafetyTest(unittest.TestCase):
def _exccheck(self, fn, exc, data=''):
fn = os.path.join(_common.RSRC, fn)
with open(fn, 'w') as f:
f.write(data)
try:
self.assertRaises(exc, beets.mediafile.MediaFile, fn)
finally:
os.unlink(fn) # delete the temporary file
def test_corrupt_mp3_raises_unreadablefileerror(self):
# Make sure we catch Mutagen reading errors appropriately.
self._exccheck('corrupt.mp3', beets.mediafile.UnreadableFileError)
def test_corrupt_mp4_raises_unreadablefileerror(self):
self._exccheck('corrupt.m4a', beets.mediafile.UnreadableFileError)
def test_corrupt_flac_raises_unreadablefileerror(self):
self._exccheck('corrupt.flac', beets.mediafile.UnreadableFileError)
def test_corrupt_ogg_raises_unreadablefileerror(self):
self._exccheck('corrupt.ogg', beets.mediafile.UnreadableFileError)
def test_invalid_ogg_header_raises_unreadablefileerror(self):
self._exccheck('corrupt.ogg', beets.mediafile.UnreadableFileError,
'OggS\x01vorbis')
def test_corrupt_monkeys_raises_unreadablefileerror(self):
self._exccheck('corrupt.ape', beets.mediafile.UnreadableFileError)
def test_invalid_extension_raises_filetypeerror(self):
self._exccheck('something.unknown', beets.mediafile.FileTypeError)
def test_magic_xml_raises_unreadablefileerror(self):
self._exccheck('nothing.xml', beets.mediafile.UnreadableFileError,
"ftyp")
def test_broken_symlink(self):
fn = os.path.join(_common.RSRC, 'brokenlink')
os.symlink('does_not_exist', fn)
try:
self.assertRaises(IOError,
beets.mediafile.MediaFile, fn)
finally:
os.unlink(fn)
class SideEffectsTest(unittest.TestCase):
def setUp(self):
self.empty = os.path.join(_common.RSRC, 'empty.mp3')
def test_opening_tagless_file_leaves_untouched(self):
old_mtime = os.stat(self.empty).st_mtime
beets.mediafile.MediaFile(self.empty)
new_mtime = os.stat(self.empty).st_mtime
self.assertEqual(old_mtime, new_mtime)
class EncodingTest(unittest.TestCase):
def setUp(self):
src = os.path.join(_common.RSRC, 'full.m4a')
self.path = os.path.join(_common.RSRC, 'test.m4a')
shutil.copy(src, self.path)
self.mf = beets.mediafile.MediaFile(self.path)
def tearDown(self):
os.remove(self.path)
def test_unicode_label_in_m4a(self):
self.mf.label = u'foo\xe8bar'
self.mf.save()
new_mf = beets.mediafile.MediaFile(self.path)
self.assertEqual(new_mf.label, u'foo\xe8bar')
class ZeroLengthMediaFile(beets.mediafile.MediaFile):
@property
def length(self):
return 0.0
class MissingAudioDataTest(unittest.TestCase):
def setUp(self):
super(MissingAudioDataTest, self).setUp()
path = os.path.join(_common.RSRC, 'full.mp3')
self.mf = ZeroLengthMediaFile(path)
def test_bitrate_with_zero_length(self):
del self.mf.mgfile.info.bitrate # Not available directly.
self.assertEqual(self.mf.bitrate, 0)
class TypeTest(unittest.TestCase):
def setUp(self):
super(TypeTest, self).setUp()
path = os.path.join(_common.RSRC, 'full.mp3')
self.mf = beets.mediafile.MediaFile(path)
def test_year_integer_in_string(self):
self.mf.year = '2009'
self.assertEqual(self.mf.year, 2009)
def test_set_replaygain_gain_to_none(self):
self.mf.rg_track_gain = None
self.assertEqual(self.mf.rg_track_gain, 0.0)
def test_set_replaygain_peak_to_none(self):
self.mf.rg_track_peak = None
self.assertEqual(self.mf.rg_track_peak, 0.0)
def test_set_year_to_none(self):
self.mf.year = None
self.assertEqual(self.mf.year, 0)
def test_set_track_to_none(self):
self.mf.track = None
self.assertEqual(self.mf.track, 0)
class SoundCheckTest(unittest.TestCase):
def test_round_trip(self):
data = beets.mediafile._sc_encode(1.0, 1.0)
gain, peak = beets.mediafile._sc_decode(data)
self.assertEqual(gain, 1.0)
self.assertEqual(peak, 1.0)
def test_decode_zero(self):
data = u' 80000000 80000000 00000000 00000000 00000000 00000000 ' \
u'00000000 00000000 00000000 00000000'
gain, peak = beets.mediafile._sc_decode(data)
self.assertEqual(gain, 0.0)
self.assertEqual(peak, 0.0)
def test_malformatted(self):
gain, peak = beets.mediafile._sc_decode(u'foo')
self.assertEqual(gain, 0.0)
self.assertEqual(peak, 0.0)
class ID3v23Test(unittest.TestCase):
def _make_test(self, ext='mp3'):
src = os.path.join(_common.RSRC, 'full.{0}'.format(ext))
self.path = os.path.join(_common.RSRC, 'test.{0}'.format(ext))
shutil.copy(src, self.path)
return beets.mediafile.MediaFile(self.path)
def _delete_test(self):
os.remove(self.path)
def test_v24_year_tag(self):
mf = self._make_test()
try:
mf.year = 2013
mf.save(id3v23=False)
frame = mf.mgfile['TDRC']
self.assertTrue('2013' in str(frame))
self.assertTrue('TYER' not in mf.mgfile)
finally:
self._delete_test()
def test_v23_year_tag(self):
mf = self._make_test()
try:
mf.year = 2013
mf.save(id3v23=True)
frame = mf.mgfile['TYER']
self.assertTrue('2013' in str(frame))
self.assertTrue('TDRC' not in mf.mgfile)
finally:
self._delete_test()
def test_v23_on_non_mp3_is_noop(self):
mf = self._make_test('m4a')
try:
mf.year = 2013
mf.save(id3v23=True)
finally:
self._delete_test()
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| |
#
# Zoom.py -- Zoom plugin for fits viewer
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import gtk
import gobject
from ginga.gtkw import GtkHelp
from ginga.misc import Bunch
from ginga.gtkw import FitsImageCanvasGtk
from ginga.gtkw import FitsImageCanvasTypesGtk as CanvasTypes
from ginga import GingaPlugin
class Zoom(GingaPlugin.GlobalPlugin):
def __init__(self, fv):
# superclass defines some variables for us, like logger
super(Zoom, self).__init__(fv)
self.zoomimage = None
self.default_radius = 30
self.default_zoom = 3
self.zoom_radius = self.default_radius
self.zoom_amount = self.default_zoom
self.zoom_x = 0
self.zoom_y = 0
self.t_abszoom = True
self.zoomtask = None
self.fitsimage_focus = None
self.lagtime = 2
fv.add_callback('add-channel', self.add_channel)
fv.add_callback('active-image', self.focus_cb)
def build_gui(self, container):
vpaned = gtk.VPaned()
width, height = 200, 200
# Uncomment to debug; passing parent logger generates too
# much noise in the main logger
#zi = FitsImageCanvasGtk.FitsImageCanvas(logger=self.logger)
zi = FitsImageCanvasGtk.FitsImageCanvas(logger=None)
zi.enable_autozoom('off')
zi.enable_autocuts('off')
zi.enable_zoom(False)
#zi.set_scale_limits(0.001, 1000.0)
zi.zoom_to(self.default_zoom, redraw=False)
zi.add_callback('zoom-set', self.zoomset)
#zi.add_callback('motion', self.showxy)
zi.set_bg(0.4, 0.4, 0.4)
zi.show_pan_mark(True, redraw=False)
self.zoomimage = zi
iw = zi.get_widget()
iw.set_size_request(width, height)
vpaned.pack1(iw, resize=True, shrink=True)
vbox = gtk.VBox()
vbox.pack_start(gtk.Label("Zoom Radius:"), padding=2,
fill=True, expand=False)
adj = gtk.Adjustment(lower=1, upper=100)
adj.set_value(self.zoom_radius)
scale = GtkHelp.HScale(adj)
scale.set_size_request(200, -1)
scale.set_digits(0)
scale.set_draw_value(True)
scale.set_value_pos(gtk.POS_BOTTOM)
#scale.set_update_policy(gtk.UPDATE_DISCONTINUOUS)
self.w_radius = scale
scale.connect('value-changed', self.set_radius_cb)
vbox.pack_start(scale, padding=0, fill=True, expand=False)
vbox.pack_start(gtk.Label("Zoom Amount:"), padding=2,
fill=True, expand=False)
adj = gtk.Adjustment(lower=-20, upper=30)
adj.set_value(self.zoom_amount)
scale = GtkHelp.HScale(adj)
scale.set_size_request(200, -1)
scale.set_digits(0)
scale.set_draw_value(True)
scale.set_value_pos(gtk.POS_BOTTOM)
#scale.set_update_policy(gtk.UPDATE_DISCONTINUOUS)
self.w_amount = scale
scale.connect('value-changed', self.set_amount_cb)
vbox.pack_start(scale, padding=0, fill=True, expand=False)
captions = (('Zoom', 'label'),
("Relative Zoom", 'checkbutton'),
("Lag Time", 'spinbutton'),
('Defaults', 'button'),
)
w, b = GtkHelp.build_info(captions)
b.zoom.set_text(self.fv.scale2text(zi.get_scale()))
self.wzoom = b
b.relative_zoom.set_active(not self.t_abszoom)
b.relative_zoom.sconnect("toggled", self.set_absrel_cb)
b.defaults.connect("clicked", lambda w: self.set_defaults())
adj = b.lag_time.get_adjustment()
adj.configure(0, 0, 20, 1, 1, 1)
adj.set_value(self.lagtime)
b.lag_time.set_digits(0)
b.lag_time.set_wrap(True)
b.lag_time.connect('value-changed', self.setlag_cb)
vbox.pack_start(w, padding=4, fill=True, expand=False)
sw = gtk.ScrolledWindow()
sw.set_border_width(2)
sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
sw.add_with_viewport(vbox)
vpaned.pack2(sw, resize=True, shrink=True)
vpaned.show_all()
vpaned.set_position(height)
container.pack_start(vpaned, padding=0, fill=True, expand=True)
def prepare(self, fitsimage):
fitsimage.add_callback('image-set', self.new_image_cb)
#fitsimage.add_callback('focus', self.focus_cb)
# TODO: should we add our own canvas instead?
fitsimage.add_callback('motion', self.motion)
fitsimage.add_callback('cut-set', self.cutset_cb)
fitsimage.add_callback('transform', self.transform_cb)
fitsimage.add_callback('rotate', self.rotate_cb)
fitsimage.add_callback('zoom-set', self.zoomset_cb)
def add_channel(self, viewer, chinfo):
self.prepare(chinfo.fitsimage)
# CALLBACKS
def new_image_cb(self, fitsimage, image):
if fitsimage != self.fv.getfocus_fitsimage():
return True
self.fitsimage_focus = fitsimage
# Reflect transforms, colormap, etc.
fitsimage.copy_attributes(self.zoomimage,
['transforms', 'cutlevels',
'rgbmap'],
redraw=False)
## data = image.get_data()
## self.set_data(data)
def focus_cb(self, viewer, fitsimage):
self.fitsimage_focus = fitsimage
# Reflect transforms, colormap, etc.
fitsimage.copy_attributes(self.zoomimage,
['transforms', 'cutlevels',
'rgbmap', 'rotation'],
redraw=False)
# TODO: redo cutout?
# Match cut-levels to the ones in the "main" image
def cutset_cb(self, fitsimage, loval, hival):
if fitsimage != self.fitsimage_focus:
return True
self.zoomimage.cut_levels(loval, hival)
return True
def transform_cb(self, fitsimage):
if fitsimage != self.fitsimage_focus:
return True
flip_x, flip_y, swap_xy = fitsimage.get_transforms()
self.zoomimage.transform(flip_x, flip_y, swap_xy)
return True
def rotate_cb(self, fitsimage, deg):
if fitsimage != self.fitsimage_focus:
return True
self.zoomimage.rotate(deg)
return True
def _zoomset(self, fitsimage, zoomlevel):
if fitsimage != self.fitsimage_focus:
return True
if self.t_abszoom:
# Did user set to absolute zoom?
myzoomlevel = self.zoom_amount
else:
# Amount of zoom is a relative amount
myzoomlevel = self.zoomimage.get_zoom()
myzoomlevel = zoomlevel + self.zoom_amount
self.logger.debug("zoomlevel=%d myzoom=%d" % (
zoomlevel, myzoomlevel))
self.zoomimage.zoom_to(myzoomlevel, redraw=True)
text = self.fv.scale2text(self.zoomimage.get_scale())
return True
def zoomset_cb(self, fitsimage, zoomlevel, scale_x, scale_y):
"""This method is called when a main FITS widget changes zoom level.
"""
fac_x, fac_y = fitsimage.get_scale_base_xy()
fac_x_me, fac_y_me = self.zoomimage.get_scale_base_xy()
if (fac_x != fac_x_me) or (fac_y != fac_y_me):
alg = fitsimage.get_zoom_algorithm()
self.zoomimage.set_zoom_algorithm(alg)
self.zoomimage.set_scale_base_xy(fac_x, fac_y)
return self._zoomset(self.fitsimage_focus, zoomlevel)
def set_amount_cb(self, rng):
"""This method is called when 'Zoom Amount' control is adjusted.
"""
val = rng.get_value()
self.zoom_amount = val
zoomlevel = self.fitsimage_focus.get_zoom()
self._zoomset(self.fitsimage_focus, zoomlevel)
def set_absrel_cb(self, w):
self.t_abszoom = not w.get_active()
zoomlevel = self.fitsimage_focus.get_zoom()
return self._zoomset(self.fitsimage_focus, zoomlevel)
def set_defaults(self):
self.t_abszoom = True
self.wzoom.relative_zoom.set_active(not self.t_abszoom)
self.w_radius.set_value(self.default_radius)
self.w_amount.set_value(self.default_zoom)
self.zoomimage.zoom_to(self.default_zoom, redraw=False)
# LOGIC
def zoomset(self, fitsimage, zoomlevel, scalefactor):
text = self.fv.scale2text(self.zoomimage.get_scale())
self.wzoom.zoom.set_text(text)
def set_radius_cb(self, rng):
val = rng.get_value()
self.set_radius(val)
def setlag_cb(self, w):
val = w.get_value()
self.logger.debug("Setting lag time to %d" % (val))
self.lagtime = val
def set_radius(self, val):
self.logger.debug("Setting radius to %d" % val)
self.zoom_radius = val
fitsimage = self.fitsimage_focus
if fitsimage == None:
return True
image = fitsimage.get_image()
wd, ht = image.get_size()
data_x, data_y = wd // 2, ht // 2
self.showxy(fitsimage, data_x, data_y)
def showxy(self, fitsimage, data_x, data_y):
# Cut and show zoom image in zoom window
self.zoom_x, self.zoom_y = data_x, data_y
image = fitsimage.get_image()
if image == None:
# No image loaded into this channel
return True
# If this is a new source, then update our widget with the
# attributes of the source
if self.fitsimage_focus != fitsimage:
self.focus_cb(self.fv, fitsimage)
# Cut out and show the zoom detail
if self.zoomtask:
gobject.source_remove(self.zoomtask)
self.zoomtask = gobject.timeout_add(self.lagtime, self.showzoom,
image, data_x, data_y)
# x1, y1, x2, y2 = self.cutdetail_radius(image, self.zoomimage,
# data_x, data_y,
# self.zoom_radius)
return True
def motion(self, fitsimage, button, data_x, data_y):
# TODO: pass _canvas_ and cut from that
self.showxy(fitsimage, data_x, data_y)
def showzoom(self, image, data_x, data_y):
# cut out and set the zoom image
x1, y1, x2, y2 = self.cutdetail_radius(image, self.zoomimage,
data_x, data_y,
self.zoom_radius, redraw=True)
self.zoomtask = None
def cutdetail_radius(self, image, dstimage, data_x, data_y,
radius, redraw=True):
data, x1, y1, x2, y2 = image.cutout_radius(int(data_x), int(data_y),
radius)
dstimage.set_data(data, redraw=redraw)
return (x1, y1, x2, y2)
def __str__(self):
return 'zoom'
#END
| |
import time
import textmagic
from textmagic.test import ONE_TEST_NUMBER
from textmagic.test import THREE_TEST_NUMBERS
from textmagic.test import MAX_GSM0338_SMS_LENGTH
from textmagic.test import MAX_GSM0338_MULTI_SMS_LENGTH
from textmagic.test import A_GSM0338_CHARACTER
from textmagic.test import MAX_UNICODE_SMS_LENGTH
from textmagic.test import MAX_UNICODE_MULTI_SMS_LENGTH
from textmagic.test import A_UNICODE_CHARACTER
from textmagic.test import TextMagicTestsBase
from textmagic.client import TextMagicError
class SendTestsBase(TextMagicTestsBase):
"""
Abstract class implementing a generic succeeding and failing "send" test
case.
"""
expected_keys = ['sent_text', 'message_id', 'parts_count']
def succeedingSendCase(self, message, numbers, expected_parts,
max_length=None, send_time=None, unicode=None):
response = self.client._send(message, numbers, max_length, send_time, unicode)
if not isinstance(numbers, list):
numbers=[numbers]
self.assertKeysEqualExpectedKeys(response, self.expected_keys)
self.assertEquals(response['sent_text'], message)
self.assertEquals(len(response['message_id']), len(numbers))
self.assertEquals(set(response['message_id'].values()), set(numbers))
for message_id in response['message_id']:
self.assertTrue(message_id.isdigit())
self.assertEquals(response['parts_count'], expected_parts)
def failingSendCase(self, message, numbers, error_code, error_message,
max_length=None, send_time=None, unicode=None):
try:
self.client._send(message, numbers, max_length, send_time, unicode)
self.fail('An error is expected to skip this line')
except TextMagicError, e:
self.assertEquals(e.error_code, error_code)
self.assertEquals(e.error_message, error_message)
class BasicSendTests(SendTestsBase):
"""
Test the very basics.
"""
def testOneShortMessageSucceeds(self):
self.succeedingSendCase(
message='Test Message',
numbers=ONE_TEST_NUMBER,
expected_parts=1)
def testThreeShortMessagesSucceed(self):
self.succeedingSendCase(
message='Test Message',
numbers=THREE_TEST_NUMBERS,
expected_parts=1)
def testOneShortUnicodeMessageSucceeds(self):
self.succeedingSendCase(
message=u'\u2800\u2801\u2802\u2803 \u27F0',
numbers=ONE_TEST_NUMBER,
expected_parts=1)
def testSendCanBeCalledWithoutOptionalParametersGsm0338(self):
message = 'Test Message'
number = ONE_TEST_NUMBER
response = self.client.send(message, number)
self.assertKeysEqualExpectedKeys(response, self.expected_keys)
self.assertEquals(response['sent_text'], message)
self.assertEquals(len(response['message_id']), 1)
def testSendCanBeCalledWithoutOptionalParametersUnicode(self):
message = u'\u2800\u2801\u2802\u2803 \u27F0'
number = ONE_TEST_NUMBER
response = self.client.send(message, number)
self.assertKeysEqualExpectedKeys(response, self.expected_keys)
self.assertEquals(response['sent_text'], message)
self.assertEquals(len(response['message_id']), 1)
class MultipartSendTests(SendTestsBase):
"""
Abstract class to test message lengths.
This class must be extended to test different character sets
"""
def succeedingSendLengthCase(self, length, expected_parts):
self.succeedingSendCase(
message=self.char*length,
numbers=ONE_TEST_NUMBER,
expected_parts=expected_parts)
def testLongestOnePartMessageSucceeds(self):
self.succeedingSendLengthCase(self.max_sms_length, 1)
def testShortestTwoPartMessageSucceeds(self):
self.succeedingSendLengthCase(self.max_sms_length+1, 2)
def testLongestTwoPartMessageSucceeds(self):
self.succeedingSendLengthCase(self.max_multi_sms_length*2, 2)
def testShortestThreePartMessageSucceeds(self):
self.succeedingSendLengthCase((self.max_multi_sms_length*2)+1, 3)
def testLongestThreePartMessageSucceeds(self):
self.succeedingSendLengthCase(self.max_multi_sms_length*3, 3)
def testTooLongMessageErrorWhenMessageIsLongerThanFourParts(self):
self.failingSendCase(
message=self.char*((self.max_multi_sms_length*3)+1),
numbers=ONE_TEST_NUMBER,
error_code=7,
error_message='Too long message')
class MultipartGsm0338SendTests(MultipartSendTests):
max_sms_length = MAX_GSM0338_SMS_LENGTH
max_multi_sms_length = MAX_GSM0338_MULTI_SMS_LENGTH
char = A_GSM0338_CHARACTER
class MultipartUnicodeSendTests(MultipartSendTests):
max_sms_length = MAX_UNICODE_SMS_LENGTH
max_multi_sms_length = MAX_UNICODE_MULTI_SMS_LENGTH
char = A_UNICODE_CHARACTER
class MaxLengthSendTests(SendTestsBase):
def testWrongParameterValueErrorWhenMaxLengthIsFour(self):
self.failingSendCase(
message=A_GSM0338_CHARACTER*MAX_GSM0338_MULTI_SMS_LENGTH*4,
numbers=ONE_TEST_NUMBER,
error_code=10,
error_message='Wrong parameter value 4 for parameter max_length',
max_length = 4)
def testTooLongMessageErrorWhenMaxLengthIsOne(self):
self.failingSendCase(
message=A_GSM0338_CHARACTER*MAX_GSM0338_MULTI_SMS_LENGTH*2,
numbers=ONE_TEST_NUMBER,
error_code=7,
error_message='Too long message',
max_length = 1)
def testTooLongMessageErrorWhenMaxLengthIsTwo(self):
self.failingSendCase(
message=A_GSM0338_CHARACTER*MAX_GSM0338_MULTI_SMS_LENGTH*3,
numbers=ONE_TEST_NUMBER,
error_code=7,
error_message='Too long message',
max_length = 2)
def testOnePartMessageFailsWhenMaxLengthIsZero(self):
self.failingSendCase(
message=A_GSM0338_CHARACTER*MAX_GSM0338_SMS_LENGTH,
numbers=ONE_TEST_NUMBER,
max_length = 0,
error_code=10,
error_message='Wrong parameter value 0 for parameter max_length')
def testTwoPartMessageFailsWhenMaxLengthIsZero(self):
self.failingSendCase(
message=A_GSM0338_CHARACTER*MAX_GSM0338_MULTI_SMS_LENGTH*2,
numbers=ONE_TEST_NUMBER,
max_length = 0,
error_code=10,
error_message='Wrong parameter value 0 for parameter max_length')
def testThreePartMessageSucceedsWhenMaxLengthIsUnspecified(self):
self.succeedingSendCase(
message=A_GSM0338_CHARACTER*MAX_GSM0338_MULTI_SMS_LENGTH*3,
numbers=ONE_TEST_NUMBER,
expected_parts=3)
class SendCharacterSetsTests(SendTestsBase):
def testEscapedCharactersLengthenMessage(self):
escaped_chars = '{}\~[]|'
for escaped_char in escaped_chars:
message='a'*(MAX_GSM0338_SMS_LENGTH-1)+escaped_char
self.assertEquals(len(message), MAX_GSM0338_SMS_LENGTH)
self.succeedingSendCase(
message=message,
numbers=ONE_TEST_NUMBER,
expected_parts=2)
class SendTimeTests(SendTestsBase):
def _time_now(self):
if textmagic.test.running_live:
return time.time()
else:
return 1245879223
def _sendTimeInFutureSucceeds(self, send_time):
message = 'Message from the future'
self.succeedingSendCase(
message=message,
numbers=ONE_TEST_NUMBER,
expected_parts=1,
send_time=send_time)
def _sendTimeInPastFails(self, send_time):
message = 'Message from the past'
time_in_message = not isinstance(send_time, time.struct_time)\
and send_time\
or time.mktime(send_time)
self.failingSendCase(
message=message,
numbers=ONE_TEST_NUMBER,
send_time=send_time,
error_code=10,
error_message='Wrong parameter value %d for parameter send_time' % time_in_message)
def testSendTimeAsStructTimeInFutureSucceeds(self):
self._sendTimeInFutureSucceeds(time.localtime(self._time_now()+120))
def testSendTimeAsStructTimeInPastFails(self):
self._sendTimeInPastFails(time.localtime(self._time_now()-300))
def testSendTimeAsUnixTimeInFutureSucceeds(self):
self._sendTimeInFutureSucceeds(self._time_now()+120)
def testSendTimeAsUnixTimeInPastFails(self):
self._sendTimeInPastFails(self._time_now()-300)
class SendErrorsTests(SendTestsBase):
"""
Test error messages on sending.
"""
def testEmptyMessageFails(self):
self.failingSendCase(
message='',
numbers=ONE_TEST_NUMBER,
error_code=1,
error_message='Messages text is empty')
def testWrongPhoneNumberFormatFails(self):
self.failingSendCase(
message='Error testing message',
numbers=['1234'],
error_code=9,
error_message='Wrong phone number format')
def testWrongPasswordFails(self):
self.client = textmagic.test.client_class(self.client.username, 'koos')
self.failingSendCase(
message='Wrong password testing message',
numbers=ONE_TEST_NUMBER,
error_code=5,
error_message='Invalid username & password combination')
def testWrongValueForUnicodeParameterFails(self):
self.failingSendCase(
message='Error testing message',
numbers=ONE_TEST_NUMBER,
unicode=10,
error_code=10,
error_message='Wrong parameter value 10 for parameter unicode')
def testUnicodeMessageThatSaysNotUnicodeReportsTooLongUnicodeMessageReturnsError(self):
self.failingSendCase(
message=u'\uABCD'*(MAX_GSM0338_MULTI_SMS_LENGTH),
numbers=ONE_TEST_NUMBER,
unicode=0,
error_code=15,
error_message='Unicode symbols detected')
def testGsm0338MessageThatSaysUnicodeSentAsGsm0338(self):
self.succeedingSendCase(
message='x'*(MAX_UNICODE_SMS_LENGTH*3),
numbers=ONE_TEST_NUMBER,
unicode=1,
expected_parts=2)
| |
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import weakref
import six
from murano.dsl import constants
from murano.dsl import dsl
from murano.dsl import dsl_types
from murano.dsl import exceptions
from murano.dsl import helpers
from murano.dsl import typespec
from murano.dsl import yaql_integration
class MuranoObject(dsl_types.MuranoObject):
def __init__(self, murano_class, owner, object_store, executor,
object_id=None, name=None, known_classes=None,
defaults=None, this=None):
if known_classes is None:
known_classes = {}
self.__owner = owner.real_this if owner else None
self.__object_id = object_id or helpers.generate_id()
self.__type = murano_class
self.__properties = {}
self.__parents = {}
self.__defaults = defaults or {}
self.__this = this
self.__name = name
self.__extension = None
self.__object_store = \
None if object_store is None else weakref.ref(object_store)
self.__executor = weakref.ref(executor)
self.__config = murano_class.package.get_class_config(
murano_class.name)
if not isinstance(self.__config, dict):
self.__config = {}
known_classes[murano_class.name] = self
for parent_class in murano_class.parents(self.real_this.type):
name = parent_class.name
if name not in known_classes:
obj = parent_class.new(
owner, object_store, executor, object_id=self.__object_id,
known_classes=known_classes, defaults=defaults,
this=self.real_this).object
self.__parents[name] = known_classes[name] = obj
else:
self.__parents[name] = known_classes[name]
self.__initialized = False
@property
def extension(self):
return self.__extension
@property
def name(self):
return self.real_this.__name
@extension.setter
def extension(self, value):
self.__extension = value
@property
def object_store(self):
return None if self.__object_store is None else self.__object_store()
@property
def executor(self):
return self.__executor()
def initialize(self, context, object_store, params):
if self.__initialized:
return
for property_name in self.__type.properties:
spec = self.__type.properties[property_name]
if spec.usage == typespec.PropertyUsages.Config:
if property_name in self.__config:
property_value = self.__config[property_name]
else:
property_value = dsl.NO_VALUE
self.set_property(property_name, property_value)
init = self.type.methods.get('.init')
used_names = set()
names = set(self.__type.properties)
if init:
names.update(six.iterkeys(init.arguments_scheme))
last_errors = len(names)
init_args = {}
while True:
errors = 0
for property_name in names:
if init and property_name in init.arguments_scheme:
spec = init.arguments_scheme[property_name]
is_init_arg = True
else:
spec = self.__type.properties[property_name]
is_init_arg = False
if property_name in used_names:
continue
if spec.usage in (typespec.PropertyUsages.Config,
typespec.PropertyUsages.Static):
used_names.add(property_name)
continue
if spec.usage == typespec.PropertyUsages.Runtime:
if not spec.has_default:
used_names.add(property_name)
continue
property_value = dsl.NO_VALUE
else:
property_value = params.get(property_name, dsl.NO_VALUE)
try:
if is_init_arg:
init_args[property_name] = property_value
else:
self.set_property(property_name, property_value)
used_names.add(property_name)
except exceptions.UninitializedPropertyAccessError:
errors += 1
except exceptions.ContractViolationException:
if spec.usage != typespec.PropertyUsages.Runtime:
raise
if not errors:
break
if errors >= last_errors:
raise exceptions.CircularExpressionDependenciesError()
last_errors = errors
executor = helpers.get_executor(context)
if ((object_store is None or not object_store.initializing) and
self.__extension is None):
method = self.type.methods.get('__init__')
if method:
filtered_params = yaql_integration.filter_parameters(
method.body, **params)
self.__extension = method.invoke(
executor, self, filtered_params[0],
filtered_params[1], context)
for parent in self.__parents.values():
parent.initialize(context, object_store, params)
if (object_store is None or not object_store.initializing) and init:
context[constants.CTX_ARGUMENT_OWNER] = self.real_this
init.invoke(executor, self.real_this, (), init_args, context)
self.__initialized = True
@property
def object_id(self):
return self.__object_id
@property
def type(self):
return self.__type
@property
def owner(self):
return self.__owner
@property
def real_this(self):
return self.__this or self
def get_property(self, name, context=None):
start_type, derived = self.__type, False
caller_class = None if not context else helpers.get_type(context)
if caller_class is not None and caller_class.is_compatible(self):
start_type, derived = caller_class, True
if name in start_type.properties:
spec = start_type.properties[name]
if spec.usage == typespec.PropertyUsages.Static:
return spec.murano_class.get_property(name, context)
else:
return self.cast(start_type)._get_property_value(name)
else:
try:
spec = start_type.find_single_property(name)
if spec.usage == typespec.PropertyUsages.Static:
return spec.murano_class.get_property(name, context)
else:
return self.cast(spec.murano_class).__properties[name]
except exceptions.NoPropertyFound:
if derived:
return self.cast(caller_class)._get_property_value(name)
else:
raise exceptions.PropertyReadError(name, start_type)
def _get_property_value(self, name):
try:
return self.__properties[name]
except KeyError:
raise exceptions.UninitializedPropertyAccessError(
name, self.__type)
def set_property(self, name, value, context=None):
start_type, derived = self.__type, False
caller_class = None if not context else helpers.get_type(context)
if caller_class is not None and caller_class.is_compatible(self):
start_type, derived = caller_class, True
declared_properties = start_type.find_properties(
lambda p: p.name == name)
if context is None:
context = self.executor.create_object_context(self)
if len(declared_properties) > 0:
declared_properties = self.type.find_properties(
lambda p: p.name == name)
values_to_assign = []
classes_for_static_properties = []
for spec in declared_properties:
if (caller_class is not None and not
helpers.are_property_modifications_allowed(context) and
(spec.usage not in typespec.PropertyUsages.Writable or
not derived)):
raise exceptions.NoWriteAccessError(name)
if spec.usage == typespec.PropertyUsages.Static:
classes_for_static_properties.append(spec.murano_class)
else:
default = self.__config.get(name, spec.default)
default = self.__defaults.get(name, default)
default = helpers.evaluate(default, context)
obj = self.cast(spec.murano_class)
values_to_assign.append((obj, spec.validate(
value, self.real_this,
self.real_this, context, default=default)))
for obj, value in values_to_assign:
obj.__properties[name] = value
for cls in classes_for_static_properties:
cls.set_property(name, value, context)
elif derived:
obj = self.cast(caller_class)
obj.__properties[name] = value
else:
raise exceptions.PropertyWriteError(name, start_type)
def cast(self, cls):
for p in helpers.traverse(self, lambda t: t.__parents.values()):
if p.type is cls:
return p
raise TypeError('Cannot cast {0} to {1}'.format(self.type, cls))
def __repr__(self):
return '<{0}/{1} {2} ({3})>'.format(
self.type.name, self.type.version, self.object_id, id(self))
def to_dictionary(self, include_hidden=False):
result = {}
for parent in self.__parents.values():
result.update(parent.to_dictionary(include_hidden))
result.update({'?': {
'type': self.type.name,
'id': self.object_id,
'name': self.name,
'classVersion': str(self.type.version),
'package': self.type.package.name
}})
if include_hidden:
result.update(self.__properties)
else:
for property_name in self.type.properties:
if property_name in self.__properties:
spec = self.type.properties[property_name]
if spec.usage != typespec.PropertyUsages.Runtime:
result[property_name] = self.__properties[
property_name]
return result
| |
import urllib
import httplib2
import hashlib
import re
from cgi import parse_qs
from decimal import Decimal
from datetime import datetime
from django.utils.safestring import mark_safe
from django.utils.datastructures import SortedDict
from django.shortcuts import render_to_response
from dateutil.relativedelta import relativedelta
from hiicart.gateway.base import PaymentGatewayBase, TransactionResult, SubmitResult, GatewayError, CancelResult
from hiicart.gateway.veritrans_air.settings import SETTINGS as default_settings
from hiicart.gateway.veritrans_air.forms import PaymentForm, FORM_MODEL_TRANSLATION
from hiicart.models import HiiCartError, PaymentResponse
TOKEN_ENDPOINT = "https://air.veritrans.co.jp/web/commodityRegist.action"
PAYMENT_ENDPOINT = "https://air.veritrans.co.jp/web/paymentStart.action"
class VeritransAirGateway(PaymentGatewayBase):
"""Veritrans Air processor"""
def __init__(self, cart):
super(VeritransAirGateway, self).__init__('veritrans_air', cart, default_settings)
self._require_settings(['MERCHANT_ID', 'MERCHANT_ID'])
def _get_token(self, params_dict):
#httplib2.debuglevel = 1
http = httplib2.Http()
params_dict['MERCHANT_ID'] = self.settings['MERCHANT_ID']
params_dict['SESSION_ID'] = self.settings['SESSION_ID']
params_dict["SETTLEMENT_TYPE"] = self.settings["SETTLEMENT_TYPE"]
if self.settings['LIVE']:
params_dict["DUMMY_PAYMENT_FLAG"] = "0"
else:
params_dict["DUMMY_PAYMENT_FLAG"] = "1"
params_pairs = []
for (key, val) in params_dict.iteritems():
if isinstance(val, (list, tuple)):
for v in val:
params_pairs.append((key, unicode(v).encode('utf-8')))
else:
params_pairs.append((key, unicode(val).encode('utf-8')))
encoded_params = urllib.urlencode(params_pairs)
headers = {'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Accept-Language': 'ja'}
response, content = http.request(TOKEN_ENDPOINT, 'POST', body=encoded_params, headers=headers)
response_dict = {}
for line in content.splitlines():
key, val = line.split("=")
response_dict[key]=val
if 'ERROR_MESSAGE' in response_dict:
raise GatewayError("Error getting token from Veritrans Air: %s" % response_dict['ERROR_MESSAGE'])
return response_dict
def _get_checkout_data(self):
"""Populate request params from shopping cart"""
params = SortedDict()
# Urls for returning user after leaving Veritrans
if self.settings.get('FINISH_PAYMENT_RETURN_URL'):
finish_url = self.settings['FINISH_PAYMENT_RETURN_URL']
if '?' in finish_url:
finish_url += '&cart='
else:
finish_url += '?cart='
finish_url += self.cart._cart_uuid
params['FINISH_PAYMENT_RETURN_URL'] = finish_url
if self.settings.get('FINISH_PAYMENT_ACCESS_URL'):
params['FINISH_PAYMENT_ACCESS_URL'] = self.settings['FINISH_PAYMENT_ACCESS_URL']
if self.settings.get('CARD_CAPTURE_FLAG'):
params['CARD_CAPTURE_FLAG'] = self.settings['CARD_CAPTURE_FLAG']
if self.settings.get('UNFINISH_PAYMENT_RETURN_URL'):
params['UNFINISH_PAYMENT_RETURN_URL'] = self.settings['UNFINISH_PAYMENT_RETURN_URL']
if self.settings.get('ERROR_PAYMENT_RETURN_URL'):
params['ERROR_PAYMENT_RETURN_URL'] = self.settings['ERROR_PAYMENT_RETURN_URL']
params['ORDER_ID'] = self.cart.id
params['AMOUNT'] = Decimal(self.cart.total).quantize(Decimal('1'))
params['MERCHANTHASH'] = hashlib.sha512(b"%s,%s,%s,%s,%s" % (self.settings["MERCHANT_KEY"],
self.settings["MERCHANT_ID"],
self.settings["SETTLEMENT_TYPE"],
params['ORDER_ID'],
params['AMOUNT'])
).hexdigest()
if self.cart.shipping:
params['SHIPPING_AMOUNT'] = self.cart.shipping.quantize(Decimal('1'))
params["MAILADDRESS"] = self.cart.bill_email
# phone needs to be 10 digits, no hyphens. If it fails these checks,
# they'll input it in veritrans.
if self.cart.bill_phone:
phone = self.cart.bill_phone.replace('-', '')
if len(phone) == 10 and re.match('^\d+$', phone):
params["TELEPHONE_NO"] = phone
# params['NAME1'] = self.cart.bill_first_name
# params['NAME2'] = self.cart.bill_last_name
# params['ADDRESS1'] = self.cart.bill_street1
# params['ADDRESS2'] = self.cart.bill_city
# params['ADDRESS3'] = self.cart.bill_state
# params['ZIP_CODE'] = self.cart.bill_postal_code
# params['TELEPHONE_NO'] = self.cart.bill_phone
# params['MAILADDRESS'] = self.cart.bill_email
# Add one-time line items
# params['COMMODITY_ID'] = []
# params['COMMODITY_UNIT'] = []
# params['COMMODITY_NUM'] = []
# params['COMMODITY_NAME'] = []
# for item in self.cart.one_time_lineitems:
# params['COMMODITY_NAME'].append(item.name)
# params['COMMODITY_UNIT'].append(item.unit_price.quantize(Decimal('1')))
# params['COMMODITY_NUM'].append(item.quantity)
# params['COMMODITY_ID'].append(item.sku)
return params
def confirm_payment(self, request):
"""
Records billing and shipping info for Veritrans AIR
"""
form = PaymentForm(request.POST)
if form.is_valid():
self.cart.ship_first_name = form.cleaned_data['shipping__first_name'] or self.cart.ship_first_name
self.cart.ship_last_name = form.cleaned_data['shipping__last_name'] or self.cart.ship_last_name
self.cart.ship_street1 = form.cleaned_data['shipping__street_address'] or self.cart.ship_street1
self.cart.ship_street2 = form.cleaned_data['shipping__extended_address'] or self.cart.ship_street2
self.cart.ship_city = form.cleaned_data['shipping__locality'] or self.cart.ship_city
self.cart.ship_state = form.cleaned_data['shipping__region'] or self.cart.ship_state
self.cart.ship_postal_code = form.cleaned_data['shipping__postal_code'] or self.cart.ship_postal_code
self.cart.ship_country = form.cleaned_data['shipping__country_code_alpha2'] or self.cart.ship_country
self.cart.ship_phone = form.cleaned_data['customer__phone'] or self.cart.ship_phone
self.cart.bill_first_name = form.cleaned_data['billing__first_name'] or self.cart.bill_first_name
self.cart.bill_last_name = form.cleaned_data['billing__last_name'] or self.cart.bill_last_name
self.cart.bill_street1 = form.cleaned_data['billing__street_address'] or self.cart.bill_street1
self.cart.bill_street2 = form.cleaned_data['billing__extended_address'] or self.cart.bill_street2
self.cart.bill_city = form.cleaned_data['billing__locality'] or self.cart.bill_city
self.cart.bill_state = form.cleaned_data['billing__region'] or self.cart.bill_state
self.cart.bill_postal_code = form.cleaned_data['billing__postal_code'] or self.cart.bill_postal_code
self.cart.bill_country = form.cleaned_data['billing__country_code_alpha2'] or self.cart.bill_country
self.cart.bill_phone = form.cleaned_data['customer__phone'] or self.cart.bill_phone
self.cart.save()
return TransactionResult(
transaction_id=self.cart.id,
success=True,
status='success')
else:
return TransactionResult(
transaction_id=None,
success=False,
status='failed',
errors=form._errors)
def set_merchant_encryption_key(self, key):
response, created = PaymentResponse.objects.get_or_create(cart=self.cart, defaults={
'response_code': 0,
'response_text': key
})
if not created:
response.response_code = 0
response.response_text = key
response.save()
def get_merchant_encryption_key(self):
responses = PaymentResponse.objects.filter(cart=self.cart)
if len(responses):
return responses[0].response_text
else:
return None
def submit(self, collect_address=False, cart_settings_kwargs=None):
"""
Simply returns the gateway type to let the frontend know how to proceed.
"""
return SubmitResult("direct")
def start_transaction(self, request, **kwargs):
"""
Veritrans Air doesn't need anything special to start a transaction before tokenization.
Just get the URL for the form action.
"""
data = {'submit_url': kwargs.get('submit_url')}
for f, m in FORM_MODEL_TRANSLATION.iteritems():
data[f] = getattr(self.cart, m)
return data
@property
def form(self):
"""Returns an instance of PaymentForm."""
return PaymentForm()
def redirect_view(self, cart_settings_kwargs=None):
"""Submit order details to the gateway."""
self._update_with_cart_settings(cart_settings_kwargs)
params = self._get_checkout_data()
response = self._get_token(params)
self.cart._cart_state = "SUBMITTED"
self.cart.save()
self.set_merchant_encryption_key(response.get('MERCHANT_ENCRYPTION_KEY'))
fields = {
'BROWSER_ENCRYPTION_KEY': response['BROWSER_ENCRYPTION_KEY'],
'ORDER_ID': params['ORDER_ID'],
'MERCHANT_ID': self.settings['MERCHANT_ID']
}
context = {"url": PAYMENT_ENDPOINT, "fields": fields}
response = render_to_response('gateway/veritrans_air/payment.html', context)
return response
def _is_valid(self):
"""Return True if gateway is valid."""
return True
def refund_payment(self, payment, reason=None):
"""
Refund the full amount of this payment
"""
pass
def refund(self, payment, amount, reason=None):
"""Refund a payment."""
return SubmitResult(None)
def sanitize_clone(self):
"""Nothing to fix here."""
pass
| |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Read/Write Avro File Object Containers.
"""
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import chr
from builtins import str
from builtins import range
from builtins import object
import zlib
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from avro import schema
from avro import io
try:
import snappy
has_snappy = True
except ImportError:
has_snappy = False
#
# Constants
#
VERSION = 1
MAGIC = 'Obj' + chr(VERSION)
MAGIC_SIZE = len(MAGIC)
SYNC_SIZE = 16
SYNC_INTERVAL = 4000 * SYNC_SIZE # TODO(hammer): make configurable
META_SCHEMA = schema.parse("""\
{"type": "record", "name": "org.apache.avro.file.Header",
"fields" : [
{"name": "magic", "type": {"type": "fixed", "name": "magic", "size": %d}},
{"name": "meta", "type": {"type": "map", "values": "bytes"}},
{"name": "sync", "type": {"type": "fixed", "name": "sync", "size": %d}}]}
""" % (MAGIC_SIZE, SYNC_SIZE))
VALID_CODECS = ['null', 'deflate']
if has_snappy:
VALID_CODECS.append('snappy')
VALID_ENCODINGS = ['binary'] # not used yet
CODEC_KEY = "avro.codec"
SCHEMA_KEY = "avro.schema"
#
# Exceptions
#
class DataFileException(schema.AvroException):
"""
Raised when there's a problem reading or writing file object containers.
"""
def __init__(self, fail_msg):
schema.AvroException.__init__(self, fail_msg)
#
# Write Path
#
class DataFileWriter(object):
@staticmethod
def generate_sync_marker():
return generate_sixteen_random_bytes()
# TODO(hammer): make 'encoder' a metadata property
def __init__(self, writer, datum_writer, writers_schema=None, codec='null'):
"""
If the schema is not present, presume we're appending.
@param writer: File-like object to write into.
"""
self._writer = writer
self._encoder = io.BinaryEncoder(writer)
self._datum_writer = datum_writer
self._buffer_writer = StringIO()
self._buffer_encoder = io.BinaryEncoder(self._buffer_writer)
self._block_count = 0
self._meta = {}
self._header_written = False
if writers_schema is not None:
if codec not in VALID_CODECS:
raise DataFileException("Unknown codec: %r" % codec)
self._sync_marker = DataFileWriter.generate_sync_marker()
self.set_meta('avro.codec', codec)
self.set_meta('avro.schema', str(writers_schema))
self.datum_writer.writers_schema = writers_schema
else:
# open writer for reading to collect metadata
dfr = DataFileReader(writer, io.DatumReader())
# TODO(hammer): collect arbitrary metadata
# collect metadata
self._sync_marker = dfr.sync_marker
self.set_meta('avro.codec', dfr.get_meta('avro.codec'))
# get schema used to write existing file
schema_from_file = dfr.get_meta('avro.schema')
self.set_meta('avro.schema', schema_from_file)
self.datum_writer.writers_schema = schema.parse(schema_from_file)
# seek to the end of the file and prepare for writing
writer.seek(0, 2)
self._header_written = True
# read-only properties
writer = property(lambda self: self._writer)
encoder = property(lambda self: self._encoder)
datum_writer = property(lambda self: self._datum_writer)
buffer_writer = property(lambda self: self._buffer_writer)
buffer_encoder = property(lambda self: self._buffer_encoder)
sync_marker = property(lambda self: self._sync_marker)
meta = property(lambda self: self._meta)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Perform a close if there's no exception
if type is None:
self.close()
# read/write properties
def set_block_count(self, new_val):
self._block_count = new_val
block_count = property(lambda self: self._block_count, set_block_count)
# utility functions to read/write metadata entries
def get_meta(self, key):
return self._meta.get(key)
def set_meta(self, key, val):
self._meta[key] = val
def _write_header(self):
header = {'magic': MAGIC,
'meta': self.meta,
'sync': self.sync_marker}
self.datum_writer.write_data(META_SCHEMA, header, self.encoder)
self._header_written = True
# TODO(hammer): make a schema for blocks and use datum_writer
def _write_block(self):
if not self._header_written:
self._write_header()
if self.block_count > 0:
# write number of items in block
self.encoder.write_long(self.block_count)
# write block contents
uncompressed_data = self.buffer_writer.getvalue()
if self.get_meta(CODEC_KEY) == 'null':
compressed_data = uncompressed_data
compressed_data_length = len(compressed_data)
elif self.get_meta(CODEC_KEY) == 'deflate':
# The first two characters and last character are zlib
# wrappers around deflate data.
compressed_data = zlib.compress(uncompressed_data)[2:-1]
compressed_data_length = len(compressed_data)
elif self.get_meta(CODEC_KEY) == 'snappy':
compressed_data = snappy.compress(uncompressed_data)
compressed_data_length = len(compressed_data) + 4 # crc32
else:
fail_msg = '"%s" codec is not supported.' % self.get_meta(CODEC_KEY)
raise DataFileException(fail_msg)
# Write length of block
self.encoder.write_long(compressed_data_length)
# Write block
self.writer.write(compressed_data)
# Write CRC32 checksum for Snappy
if self.get_meta(CODEC_KEY) == 'snappy':
self.encoder.write_crc32(uncompressed_data)
# write sync marker
self.writer.write(self.sync_marker)
# reset buffer
self.buffer_writer.truncate(0)
self.block_count = 0
def append(self, datum):
"""Append a datum to the file."""
self.datum_writer.write(datum, self.buffer_encoder)
self.block_count += 1
# if the data to write is larger than the sync interval, write the block
if self.buffer_writer.tell() >= SYNC_INTERVAL:
self._write_block()
def sync(self):
"""
Return the current position as a value that may be passed to
DataFileReader.seek(long). Forces the end of the current block,
emitting a synchronization marker.
"""
self._write_block()
return self.writer.tell()
def flush(self):
"""Flush the current state of the file, including metadata."""
self._write_block()
self.writer.flush()
def close(self):
"""Close the file."""
self.flush()
self.writer.close()
class DataFileReader(object):
"""Read files written by DataFileWriter."""
# TODO(hammer): allow user to specify expected schema?
# TODO(hammer): allow user to specify the encoder
def __init__(self, reader, datum_reader):
self._reader = reader
self._raw_decoder = io.BinaryDecoder(reader)
self._datum_decoder = None # Maybe reset at every block.
self._datum_reader = datum_reader
# read the header: magic, meta, sync
self._read_header()
# ensure codec is valid
self.codec = self.get_meta('avro.codec')
if self.codec is None:
self.codec = "null"
if self.codec not in VALID_CODECS:
raise DataFileException('Unknown codec: %s.' % self.codec)
# get file length
self._file_length = self.determine_file_length()
# get ready to read
self._block_count = 0
self.datum_reader.writers_schema = schema.parse(self.get_meta(SCHEMA_KEY))
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Perform a close if there's no exception
if type is None:
self.close()
def __iter__(self):
return self
# read-only properties
reader = property(lambda self: self._reader)
raw_decoder = property(lambda self: self._raw_decoder)
datum_decoder = property(lambda self: self._datum_decoder)
datum_reader = property(lambda self: self._datum_reader)
sync_marker = property(lambda self: self._sync_marker)
meta = property(lambda self: self._meta)
file_length = property(lambda self: self._file_length)
# read/write properties
def set_block_count(self, new_val):
self._block_count = new_val
block_count = property(lambda self: self._block_count, set_block_count)
# utility functions to read/write metadata entries
def get_meta(self, key):
return self._meta.get(key)
def set_meta(self, key, val):
self._meta[key] = val
def determine_file_length(self):
"""
Get file length and leave file cursor where we found it.
"""
remember_pos = self.reader.tell()
self.reader.seek(0, 2)
file_length = self.reader.tell()
self.reader.seek(remember_pos)
return file_length
def is_EOF(self):
return self.reader.tell() == self.file_length
def _read_header(self):
# seek to the beginning of the file to get magic block
self.reader.seek(0, 0)
# read header into a dict
header = self.datum_reader.read_data(
META_SCHEMA, META_SCHEMA, self.raw_decoder)
# check magic number
if header.get('magic') != MAGIC:
fail_msg = "Not an Avro data file: %s doesn't match %s."\
% (header.get('magic'), MAGIC)
raise schema.AvroException(fail_msg)
# set metadata
self._meta = header['meta']
# set sync marker
self._sync_marker = header['sync']
def _read_block_header(self):
self.block_count = self.raw_decoder.read_long()
if self.codec == "null":
# Skip a long; we don't need to use the length.
self.raw_decoder.skip_long()
self._datum_decoder = self._raw_decoder
elif self.codec == 'deflate':
# Compressed data is stored as (length, data), which
# corresponds to how the "bytes" type is encoded.
data = self.raw_decoder.read_bytes()
# -15 is the log of the window size; negative indicates
# "raw" (no zlib headers) decompression. See zlib.h.
uncompressed = zlib.decompress(data, -15)
self._datum_decoder = io.BinaryDecoder(StringIO(uncompressed))
elif self.codec == 'snappy':
# Compressed data includes a 4-byte CRC32 checksum
length = self.raw_decoder.read_long()
data = self.raw_decoder.read(length - 4)
uncompressed = snappy.decompress(data)
self._datum_decoder = io.BinaryDecoder(StringIO(uncompressed))
self.raw_decoder.check_crc32(uncompressed);
else:
raise DataFileException("Unknown codec: %r" % self.codec)
def _skip_sync(self):
"""
Read the length of the sync marker; if it matches the sync marker,
return True. Otherwise, seek back to where we started and return False.
"""
proposed_sync_marker = self.reader.read(SYNC_SIZE)
if proposed_sync_marker != self.sync_marker:
self.reader.seek(-SYNC_SIZE, 1)
return False
else:
return True
# TODO(hammer): handle block of length zero
# TODO(hammer): clean this up with recursion
def __next__(self):
"""Return the next datum in the file."""
if self.block_count == 0:
if self.is_EOF():
raise StopIteration
elif self._skip_sync():
if self.is_EOF(): raise StopIteration
self._read_block_header()
else:
self._read_block_header()
datum = self.datum_reader.read(self.datum_decoder)
self.block_count -= 1
return datum
def close(self):
"""Close this reader."""
self.reader.close()
def generate_sixteen_random_bytes():
try:
import os
return os.urandom(16)
except:
import random
return [ chr(random.randrange(256)) for i in range(16) ]
| |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUSiteInfo(NURESTObject):
""" Represents a SiteInfo in the VSD
Notes:
Remote Site info.
"""
__rest_name__ = "site"
__resource_name__ = "sites"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a SiteInfo instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> siteinfo = NUSiteInfo(id=u'xxxx-xxx-xxx-xxx', name=u'SiteInfo')
>>> siteinfo = NUSiteInfo(data=my_dict)
"""
super(NUSiteInfo, self).__init__()
# Read/Write Attributes
self._name = None
self._last_updated_by = None
self._last_updated_date = None
self._address = None
self._description = None
self._site_identifier = None
self._embedded_metadata = None
self._xmpp_domain = None
self._entity_scope = None
self._creation_date = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="address", remote_name="address", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="site_identifier", remote_name="siteIdentifier", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="xmpp_domain", remote_name="xmppDomain", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def name(self):
""" Get name value.
Notes:
name of the Remote Site.
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
name of the Remote Site.
"""
self._name = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def address(self):
""" Get address value.
Notes:
unique fqdn/address of the remote site
"""
return self._address
@address.setter
def address(self, value):
""" Set address value.
Notes:
unique fqdn/address of the remote site
"""
self._address = value
@property
def description(self):
""" Get description value.
Notes:
Description of the Remote Site.
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
Description of the Remote Site.
"""
self._description = value
@property
def site_identifier(self):
""" Get site_identifier value.
Notes:
unique identifier of the remote site
This attribute is named `siteIdentifier` in VSD API.
"""
return self._site_identifier
@site_identifier.setter
def site_identifier(self, value):
""" Set site_identifier value.
Notes:
unique identifier of the remote site
This attribute is named `siteIdentifier` in VSD API.
"""
self._site_identifier = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def xmpp_domain(self):
""" Get xmpp_domain value.
Notes:
unique xmpp domain name of the remote site
This attribute is named `xmppDomain` in VSD API.
"""
return self._xmpp_domain
@xmpp_domain.setter
def xmpp_domain(self, value):
""" Set xmpp_domain value.
Notes:
unique xmpp domain name of the remote site
This attribute is named `xmppDomain` in VSD API.
"""
self._xmpp_domain = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| |
# #######
# Copyright (c) 2020 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import random
import string
from uuid import uuid4
from functools import wraps
from msrest.exceptions import ValidationError
from msrestazure.azure_exceptions import CloudError
from cloudify import exceptions as cfy_exc
from cloudify_common_sdk.utils import \
skip_creative_or_destructive_operation as skip
from . import utils, constants
from azure_sdk.resources.network.route import Route
from azure_sdk.resources.network.subnet import Subnet
from azure_sdk.resources.deployment import Deployment
from azure_sdk.resources.resource_group import ResourceGroup
from azure_sdk.resources.storage.file_share import FileShare
from azure_sdk.resources.storage.storage_account import StorageAccount
from azure_sdk.resources.network.network_security_rule \
import NetworkSecurityRule
from azure_sdk.resources.compute.virtual_machine_extension \
import VirtualMachineExtension
from azure_sdk.resources.network.load_balancer import \
(LoadBalancerProbe,
LoadBalancerInboundNatRule,
LoadBalancerLoadBalancingRule,
LoadBalancerBackendAddressPool
)
def sa_name_generator():
"""Generates a unique SA resource name"""
return ''.join(random.choice(
string.ascii_lowercase + string.digits) for i in range(3, 24))
def file_share_name_generator():
"""Generates a unique File Share resource name"""
return ''.join(random.choice(string.ascii_lowercase + string.digits)
for i in range(random.randint(24, 63)))
def get_unique_name(resource, resource_group_name, name, **kwargs):
if not name:
for _ in range(0, 15):
# special naming handling
if isinstance(resource, StorageAccount):
name = sa_name_generator()
elif isinstance(resource, FileShare):
name = file_share_name_generator()
else:
name = "{0}".format(uuid4())
try:
# handle speical cases
# resource_group
if isinstance(resource, ResourceGroup):
result = resource.get(name)
# virtual_machine_extension
elif isinstance(resource, VirtualMachineExtension):
vm_name = kwargs['vm_name']
result = resource.get(resource_group_name, vm_name, name)
# subnet
elif isinstance(resource, Subnet):
vnet_name = kwargs['vnet_name']
result = resource.get(resource_group_name, vnet_name, name)
# route
elif isinstance(resource, Route):
rtbl_name = kwargs['rtbl_name']
result = resource.get(resource_group_name, rtbl_name, name)
# network_security_rule
elif isinstance(resource, NetworkSecurityRule):
nsg_name = kwargs['nsg_name']
result = resource.get(resource_group_name, nsg_name, name)
elif isinstance(resource, (LoadBalancerBackendAddressPool,
LoadBalancerLoadBalancingRule,
LoadBalancerInboundNatRule,
LoadBalancerProbe)):
lb_name = kwargs['lb_name']
result = resource.get(resource_group_name, lb_name, name)
elif isinstance(resource, FileShare):
sa_name = kwargs['sa_name']
result = resource.get(resource_group_name, sa_name, name)
else:
result = resource.get(resource_group_name, name)
if result: # found a resource with same name
name = ""
continue
except CloudError: # this means name is not used
return name
else:
return name
def with_generate_name(resource_class_name):
def wrapper_outer(func):
@wraps(func)
def wrapper_inner(*args, **kwargs):
ctx = kwargs['ctx']
try:
# check if name is set or not and generate one if it wasn't set
azure_config = ctx.node.properties.get('azure_config')
if not azure_config.get("subscription_id"):
azure_config = ctx.node.properties.get('client_config')
else:
ctx.logger.warn("azure_config is deprecated "
"please use client_config, "
"in later version it will be removed")
resource = resource_class_name(azure_config, ctx.logger)
name = utils.get_resource_name(ctx)
resource_group_name = name
if not isinstance(resource, ResourceGroup):
resource_group_name = utils.get_resource_group(ctx)
if not name:
ctx.logger.info(
"Generating unique name for {0}".format(
resource_class_name))
# handle special cases
# virtual_machine_extension
if isinstance(resource, VirtualMachineExtension):
vm_name = \
ctx.node.properties.get('virtual_machine_name')
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name,
vm_name=vm_name)
# subnet
elif isinstance(resource, Subnet):
vnet_name = utils.get_virtual_network(ctx)
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name)
# route
elif isinstance(resource, Route):
rtbl_name = utils.get_route_table(ctx)
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name,
rtbl_name=rtbl_name)
# network_security_rule
elif isinstance(resource, NetworkSecurityRule):
nsg_name = utils.get_network_security_group(ctx)
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name,
nsg_name=nsg_name)
elif isinstance(resource, Deployment):
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name)
elif isinstance(resource, (LoadBalancerBackendAddressPool,
LoadBalancerLoadBalancingRule,
LoadBalancerInboundNatRule,
LoadBalancerProbe)):
lb_name = utils.get_load_balancer(ctx)
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name,
lb_name=lb_name)
elif isinstance(resource, FileShare):
sa_name = utils.get_storage_account(ctx)
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name,
sa_name=sa_name)
else:
name = get_unique_name(
resource=resource,
resource_group_name=resource_group_name,
name=name)
ctx.instance.runtime_properties['name'] = name
except CloudError:
raise cfy_exc.NonRecoverableError(
"Can't generate name for {0}".format(
resource_class_name))
return func(*args, **kwargs)
return wrapper_inner
return wrapper_outer
def get_create_op(op_name, node_type):
""" Determine if we are dealing with a creation operation.
Normally we just do the logic in the last return. However, we may want
special behavior for some types.
:param op_name: ctx.operation.name.
:param node_type: ctx.node.type_hierarchy
:return: bool
"""
op = op_name.split('.')[-1]
if utils.check_types_in_hierarchy(constants.NIC_NODE_TYPE, node_type):
if 'configure' in op: # We do want to fall back on 'create' as well.
return True
elif utils.check_types_in_hierarchy(constants.VM_NODE_TYPE, node_type):
if 'configure' in op:
return True
elif utils.check_types_in_hierarchy(constants.LB_NODE_TYPE, node_type):
if 'configure' in op:
return True
return 'create' in op
def get_delete_op(op_name, node_type=None):
""" Determine if we are dealing with a deletion operation.
Normally we just do the logic in the last return. However, we may want
special behavior for some types.
:param op_name: ctx.operation.name.
:param node_type: ctx.node.type_hierarchy
:return: bool
"""
op = op_name.split('.')[-1]
return 'delete' in op
def get_special_condition(type_list, op_name):
op = op_name.split('.')[-1]
if 'cloudify.azure.Deployment' in type_list:
return True
elif op not in ['create', 'delete']:
return True
return False
def with_azure_resource(resource_class_name):
def wrapper_outer(func):
@wraps(func)
def wrapper_inner(*args, **kwargs):
ctx = kwargs['ctx']
name = utils.get_resource_name(ctx)
# check if azure_config is given and if the resource
# is external or not
azure_config = utils.get_client_config(ctx.node.properties)
resource_factory = ResourceGetter(ctx, azure_config, name)
exists = resource_factory.get_resource(resource_class_name)
special_condition = get_special_condition(ctx.node.type_hierarchy,
ctx.operation.name)
create_op = get_create_op(ctx.operation.name,
ctx.node.type_hierarchy)
delete_op = get_delete_op(ctx.operation.name,
ctx.node.type_hierarchy)
# There is now a good idea whether the desired resource exists.
# Now find out if it is expected and if it does or doesn't.
if not skip(
resource_class_name,
name,
_ctx_node=ctx.node,
exists=exists,
special_condition=special_condition,
create_operation=create_op,
delete_operation=delete_op):
return func(*args, **kwargs)
return wrapper_inner
return wrapper_outer
class ResourceGetter(object):
def __init__(self, ctx, azure_config, resource_name):
self.azure_config = azure_config
self.ctx = ctx
self.name = resource_name
self.resource_group_name = None
def get_resource(self, resource_class_name):
try:
resource = resource_class_name(self.azure_config, self.ctx.logger)
name = self.name
if not isinstance(resource, ResourceGroup):
resource_group_name = utils.get_resource_group(self.ctx)
self.resource_group_name = resource_group_name
# handle speical cases
# resource_group
if isinstance(resource, ResourceGroup):
exists = resource.get(self.name)
name = resource_group_name = \
self.resource_group_name = self.name
elif isinstance(resource, Deployment):
exists = resource.get(resource_group_name, self.name)
# virtual_machine_extension
elif isinstance(resource, VirtualMachineExtension):
name = vm_name = \
self.ctx.node.properties.get('virtual_machine_name')
exists = resource.get(resource_group_name, vm_name, self.name)
# subnet
elif isinstance(resource, Subnet):
name = vnet_name = utils.get_virtual_network(self.ctx)
exists = resource.get(resource_group_name, vnet_name,
self.name)
# route
elif isinstance(resource, Route):
name = rtbl_name = utils.get_route_table(self.ctx)
exists = resource.get(resource_group_name, rtbl_name,
self.name)
# network_security_rule
elif isinstance(resource, NetworkSecurityRule):
name = nsg_name = utils.get_network_security_group(self.ctx)
exists = resource.get(resource_group_name, nsg_name, self.name)
# load_balancer_backend_address_pool
elif isinstance(resource, (LoadBalancerBackendAddressPool,
LoadBalancerLoadBalancingRule,
LoadBalancerInboundNatRule,
LoadBalancerProbe)):
name = lb_name = utils.get_load_balancer(self.ctx)
exists = resource.get(resource_group_name,
lb_name,
self.name)
# file share
elif isinstance(resource, FileShare):
name = sa_name = utils.get_storage_account(self.ctx)
exists = resource.get(resource_group_name, sa_name, self.name)
else:
exists = resource.get(resource_group_name, self.name)
if 'resource' not in self.ctx.instance.runtime_properties:
utils.save_common_info_in_runtime_properties(
resource_group_name,
name,
exists
)
except CloudError:
exists = None
return exists
def get_operation_config(op, runtime_properties, properties):
op_config = runtime_properties.get('operation_config', {})
if 'create' in op:
if 'create' in op_config:
return op_config['create']
return properties['operation_config']['create']
elif 'start' in op:
if 'update' in op_config:
return op_config['update']
return properties['operation_config']['update']
elif 'delete' in op:
if 'delete' in op_config:
return op_config['delete']
return properties['operation_config']['delete']
raise cfy_exc.NonRecoverableError(
'The operation config provided is invalid.')
def get_custom_resource_config(runtime_properties, properties):
resource_config = runtime_properties.get('resource_config', {})
if not resource_config:
return properties['resource_config']
return resource_config
def configure_custom_resource(func):
@wraps(func)
def wrapper(**kwargs):
ctx = kwargs['ctx']
op_name = ctx.operation.name.split('.')[-1]
runprops = ctx.instance.runtime_properties
props = ctx.node.properties
resource = kwargs.pop(
'resource_config',
get_custom_resource_config(runprops, props))
operation = kwargs.pop(
'operation_config',
get_operation_config(op_name, runprops, props)
)
client = utils.get_client_config(ctx.node.properties)
api = ctx.node.properties.get('api_version')
try:
return func(ctx, resource, operation, client, api)
except (TypeError, AttributeError) as e:
raise cfy_exc.NonRecoverableError(str(e))
except ValidationError as e:
raise cfy_exc.NonRecoverableError(str(e))
except NotImplementedError as e:
bad_api_regex = re.compile(
r'APIVersion\s([\d]{2,4}\-){2}[\d]{2}\sis\snot\savailable')
if bad_api_regex.search(str(e)):
raise cfy_exc.NonRecoverableError(
'Invalid API version for current Azure Plugin wagon.')
raise e
return wrapper
| |
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates C++ source files from a mojom.Module."""
import datetime
import mojom
import mojom_pack
import os
import re
import sys
from string import Template
# mojom_cpp_generator provides a way to generate c++ code from a mojom.Module.
# cpp = mojom_cpp_generator.CPPGenerator(module)
# cpp.GenerateFiles("/tmp/g")
class Lines(object):
"""Helper class to maintain list of template expanded lines."""
def __init__(self, template, indent = None):
self.template = template
self.indent = indent
self.lines = []
def Add(self, map = {}, **substitutions):
if len(substitutions) > 0:
map = map.copy()
map.update(substitutions)
self.lines.append(self.template.substitute(map))
def __repr__(self):
if self.indent is not None:
prefix = "".ljust(self.indent, ' ')
repr = '\n'.join(self.lines)
self.lines = map(lambda l: prefix + l, repr.splitlines())
return '\n'.join(self.lines)
def GetStructFromMethod(interface, method):
"""Converts a method's parameters into the fields of a struct."""
params_class = "%s_%s_Params" % (interface.name, method.name)
struct = mojom.Struct(params_class)
for param in method.parameters:
struct.AddField(param.name, param.kind, param.ordinal)
return struct
def IsPointerKind(kind):
return isinstance(kind, (mojom.Struct, mojom.Array)) or kind.spec == 's'
def CamelToUnderscores(camel):
s = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', camel)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s).lower()
class CPPGenerator(object):
struct_serialization_compute_template = \
Template(" +\n mojo::internal::ComputeSizeOf($NAME->$FIELD())")
struct_serialization_clone_template = Template(
" clone->set_$FIELD(mojo::internal::Clone($NAME->$FIELD(), buf));")
struct_serialization_encode_template = Template(
" Encode(&$NAME->${FIELD}_, handles);")
struct_serialization_encode_handle_template = Template(
" EncodeHandle(&$NAME->${FIELD}_, handles);")
struct_serialization_decode_template = Template(
" if (!Decode(&$NAME->${FIELD}_, message))\n"
" return false;")
struct_serialization_decode_handle_template = Template(
" if (!DecodeHandle(&$NAME->${FIELD}_, message.handles))\n"
" return false;")
param_set_template = Template(" params->set_$NAME($NAME);")
param_struct_set_template = Template(
" params->set_$NAME(mojo::internal::Clone($NAME, builder.buffer()));")
param_struct_compute_template = Template(
" payload_size += mojo::internal::ComputeSizeOf($NAME);")
field_template = Template(" $TYPE ${FIELD}_;")
bool_field_template = Template(" uint8_t ${FIELD}_ : 1;")
setter_template = \
Template(" void set_$FIELD($TYPE $FIELD) { ${FIELD}_ = $FIELD; }")
ptr_setter_template = \
Template(" void set_$FIELD($TYPE $FIELD) { ${FIELD}_.ptr = $FIELD; }")
getter_template = \
Template(" $TYPE $FIELD() const { return ${FIELD}_; }")
ptr_getter_template = \
Template(" $TYPE $FIELD() const { return ${FIELD}_.ptr; }")
pad_template = Template(" uint8_t _pad${COUNT}_[$PAD];")
templates = {}
HEADER_SIZE = 8
kind_to_type = {
mojom.BOOL: "bool",
mojom.INT8: "int8_t",
mojom.UINT8: "uint8_t",
mojom.INT16: "int16_t",
mojom.UINT16: "uint16_t",
mojom.INT32: "int32_t",
mojom.UINT32: "uint32_t",
mojom.FLOAT: "float",
mojom.HANDLE: "mojo::Handle",
mojom.INT64: "int64_t",
mojom.UINT64: "uint64_t",
mojom.DOUBLE: "double",
}
@classmethod
def GetTemplate(cls, template_name):
if template_name not in cls.templates:
dir = os.path.dirname(__file__)
filename = os.path.join(dir, 'cpp_templates', template_name)
filename = filename.replace('.h', '.h-template')
filename = filename.replace('.cc', '.cc-template')
with open(filename, 'r') as file:
template = Template(file.read())
cls.templates[template_name] = template
return cls.templates[template_name]
@classmethod
def GetType(cls, kind):
if isinstance(kind, mojom.Struct):
return "%s*" % kind.name
if isinstance(kind, mojom.Array):
return "mojo::Array<%s>*" % cls.GetType(kind.kind)
if kind.spec == 's':
return "mojo::String*"
return cls.kind_to_type[kind]
@classmethod
def GetConstType(cls, kind):
if isinstance(kind, mojom.Struct):
return "const %s*" % kind.name
if isinstance(kind, mojom.Array):
return "const mojo::Array<%s>*" % cls.GetConstType(kind.kind)
if kind.spec == 's':
return "const mojo::String*"
return cls.kind_to_type[kind]
@classmethod
def GetGetterLine(cls, field):
subs = {'FIELD': field.name, 'TYPE': cls.GetType(field.kind)}
if IsPointerKind(field.kind):
return cls.ptr_getter_template.substitute(subs)
else:
return cls.getter_template.substitute(subs)
@classmethod
def GetSetterLine(cls, field):
subs = {'FIELD': field.name, 'TYPE': cls.GetType(field.kind)}
if IsPointerKind(field.kind):
return cls.ptr_setter_template.substitute(subs)
else:
return cls.setter_template.substitute(subs)
@classmethod
def GetFieldLine(cls, field):
kind = field.kind
if kind.spec == 'b':
return cls.bool_field_template.substitute(FIELD=field.name)
itype = None
if isinstance(kind, mojom.Struct):
itype = "mojo::internal::StructPointer<%s>" % kind.name
elif isinstance(kind, mojom.Array):
itype = "mojo::internal::ArrayPointer<%s>" % cls.GetType(kind.kind)
elif kind.spec == 's':
itype = "mojo::internal::StringPointer"
else:
itype = cls.kind_to_type[kind]
return cls.field_template.substitute(FIELD=field.name, TYPE=itype)
@classmethod
def GetCaseLine(cls, interface, method):
params = map(lambda param: "params->%s()" % param.name, method.parameters)
method_call = "%s(%s);" % (method.name, ", ".join(params))
return cls.GetTemplate("interface_stub_case").substitute(
CLASS = interface.name,
METHOD = method.name,
METHOD_CALL = method_call);
@classmethod
def GetSerializedFields(cls, ps):
fields = []
for pf in ps.packed_fields:
if IsPointerKind(pf.field.kind):
fields.append(pf.field)
return fields
@classmethod
def GetHandleFields(cls, ps):
fields = []
for pf in ps.packed_fields:
if pf.field.kind.spec == 'h':
fields.append(pf.field)
return fields
def GetHeaderGuard(self, name):
return "MOJO_GENERATED_BINDINGS_%s_%s_H_" % \
(self.module.name.upper(), name.upper())
def GetHeaderFile(self, *components):
components = map(lambda c: CamelToUnderscores(c), components)
component_string = '_'.join(components)
return os.path.join(self.header_dir, "%s.h" % component_string)
# Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all files
# to stdout.
def __init__(self, module, header_dir, output_dir=None):
self.module = module
self.header_dir = header_dir
self.output_dir = output_dir
def WriteTemplateToFile(self, template_name, **substitutions):
template = self.GetTemplate(template_name)
filename = \
template_name.replace("module", CamelToUnderscores(self.module.name))
substitutions['YEAR'] = datetime.date.today().year
substitutions['NAMESPACE'] = self.module.namespace
if self.output_dir is None:
file = sys.stdout
else:
file = open(os.path.join(self.output_dir, filename), "w+")
try:
file.write(template.substitute(substitutions))
finally:
if self.output_dir is not None:
file.close()
def GetStructDeclaration(self, name, ps, template, subs = {}):
fields = []
setters = []
getters = []
pad_count = 0
num_fields = len(ps.packed_fields)
for i in xrange(num_fields):
pf = ps.packed_fields[i]
field = pf.field
fields.append(self.GetFieldLine(field))
if i < (num_fields - 1):
next_pf = ps.packed_fields[i+1]
pad = next_pf.offset - (pf.offset + pf.size)
if pad > 0:
fields.append(self.pad_template.substitute(COUNT=pad_count, PAD=pad))
pad_count += 1
setters.append(self.GetSetterLine(field))
getters.append(self.GetGetterLine(field))
if num_fields > 0:
last_field = ps.packed_fields[num_fields - 1]
offset = last_field.offset + last_field.size
pad = mojom_pack.GetPad(offset, 8)
if pad > 0:
fields.append(self.pad_template.substitute(COUNT=pad_count, PAD=pad))
pad_count += 1
size = offset + pad
else:
size = 0
subs.update(
CLASS = name,
SETTERS = '\n'.join(setters),
GETTERS = '\n'.join(getters),
FIELDS = '\n'.join(fields),
SIZE = size + self.HEADER_SIZE)
return template.substitute(subs)
def GetStructSerialization(
self, class_name, param_name, ps, template, indent = None):
struct = ps.struct
encodes = Lines(self.struct_serialization_encode_template, indent)
encode_handles = \
Lines(self.struct_serialization_encode_handle_template, indent)
decodes = Lines(self.struct_serialization_decode_template, indent)
decode_handles = \
Lines(self.struct_serialization_decode_handle_template, indent)
fields = self.GetSerializedFields(ps)
handle_fields = self.GetHandleFields(ps)
for field in fields:
substitutions = {'NAME': param_name, 'FIELD': field.name}
encodes.Add(substitutions)
decodes.Add(substitutions)
for field in handle_fields:
substitutions = {'NAME': param_name, 'FIELD': field.name}
encode_handles.Add(substitutions)
decode_handles.Add(substitutions)
return template.substitute(
CLASS = "%s::%s" % (self.module.namespace, class_name),
NAME = param_name,
ENCODES = encodes,
DECODES = decodes,
ENCODE_HANDLES = encode_handles,
DECODE_HANDLES = decode_handles)
def GetStructClassDeclarations(self):
struct_decls = map(
lambda s: self.GetStructDeclaration(
s.name,
mojom_pack.PackedStruct(s),
self.GetTemplate("struct_declaration"),
{}),
self.module.structs)
return '\n'.join(struct_decls)
def GetInterfaceClassDeclaration(self, interface, template, method_postfix):
methods = []
for method in interface.methods:
params = []
for param in method.parameters:
params.append("%s %s" % (self.GetConstType(param.kind), param.name))
methods.append(
" virtual void %s(%s) %s;" %
(method.name, ", ".join(params), method_postfix))
return template.substitute(
CLASS=interface.name,
METHODS='.\n'.join(methods))
def GetInterfaceClassDeclarations(self):
template = self.GetTemplate("interface_declaration")
interface_decls = \
map(lambda i:
self.GetInterfaceClassDeclaration(i, template, " = 0"),
self.module.interfaces)
return '\n'.join(interface_decls)
def GetInterfaceProxyDeclarations(self):
template = self.GetTemplate("interface_proxy_declaration")
interface_decls = \
map(lambda i:
self.GetInterfaceClassDeclaration(i, template, "MOJO_OVERRIDE"),
self.module.interfaces)
return '\n'.join(interface_decls)
def GetInterfaceStubDeclarations(self):
template = self.GetTemplate("interface_stub_declaration")
interface_decls = \
map(lambda i: template.substitute(CLASS=i.name), self.module.interfaces)
return '\n'.join(interface_decls)
def GenerateModuleHeader(self):
self.WriteTemplateToFile("module.h",
HEADER_GUARD = self.GetHeaderGuard(self.module.name),
STRUCT_CLASS_DECLARARTIONS = self.GetStructClassDeclarations(),
INTERFACE_CLASS_DECLARATIONS = self.GetInterfaceClassDeclarations(),
INTERFACE_PROXY_DECLARATIONS = self.GetInterfaceProxyDeclarations(),
INTERFACE_STUB_DECLARATIONS = self.GetInterfaceStubDeclarations())
def GetParamsDefinition(self, interface, method, next_id):
struct = GetStructFromMethod(interface, method)
method_name = "k%s_%s_Name" % (interface.name, method.name)
if method.ordinal is not None:
next_id = method.ordinal
params_def = self.GetStructDeclaration(
struct.name,
mojom_pack.PackedStruct(struct),
self.GetTemplate("params_definition"),
{'METHOD_NAME': method_name, 'METHOD_ID': next_id})
return params_def, next_id + 1
def GetStructDefinitions(self):
template = self.GetTemplate("struct_definition")
return '\n'.join(map(
lambda s: template.substitute(
CLASS = s.name, NUM_FIELDS = len(s.fields)),
self.module.structs));
def GetInterfaceDefinition(self, interface):
cases = []
implementations = Lines(self.GetTemplate("proxy_implementation"))
for method in interface.methods:
cases.append(self.GetCaseLine(interface, method))
sets = []
computes = Lines(self.param_struct_compute_template)
for param in method.parameters:
if IsPointerKind(param.kind):
sets.append(
self.param_struct_set_template.substitute(NAME=param.name))
computes.Add(NAME=param.name)
else:
sets.append(self.param_set_template.substitute(NAME=param.name))
params_list = map(
lambda param: "%s %s" % (self.GetConstType(param.kind), param.name),
method.parameters)
name = "k%s_%s_Name" % (interface.name, method.name)
params_name = "%s_%s_Params" % (interface.name, method.name)
implementations.Add(
CLASS = interface.name,
METHOD = method.name,
NAME = name,
PARAMS = params_name,
PARAMS_LIST = ', '.join(params_list),
COMPUTES = computes,
SETS = '\n'.join(sets))
stub_definition = self.GetTemplate("interface_stub_definition").substitute(
CLASS = interface.name,
CASES = '\n'.join(cases))
return self.GetTemplate("interface_definition").substitute(
CLASS = interface.name + "Proxy",
PROXY_DEFINITIONS = implementations,
STUB_DEFINITION = stub_definition)
def GetInterfaceDefinitions(self):
return '\n'.join(
map(lambda i: self.GetInterfaceDefinition(i), self.module.interfaces))
def GetStructSerializationDefinition(self, struct):
ps = mojom_pack.PackedStruct(struct)
param_name = CamelToUnderscores(struct.name)
clones = Lines(self.struct_serialization_clone_template)
sizes = " return sizeof(*%s)" % param_name
fields = self.GetSerializedFields(ps)
for field in fields:
substitutions = {'NAME': param_name, 'FIELD': field.name}
sizes += \
self.struct_serialization_compute_template.substitute(substitutions)
clones.Add(substitutions)
sizes += ";"
serialization = self.GetStructSerialization(
struct.name, param_name, ps, self.GetTemplate("struct_serialization"))
return self.GetTemplate("struct_serialization_definition").substitute(
NAME = param_name,
CLASS = "%s::%s" % (self.module.namespace, struct.name),
SIZES = sizes,
CLONES = clones,
SERIALIZATION = serialization)
def GetStructSerializationDefinitions(self):
return '\n'.join(
map(lambda i: self.GetStructSerializationDefinition(i),
self.module.structs))
def GetInterfaceSerializationDefinitions(self):
serializations = []
for interface in self.module.interfaces:
for method in interface.methods:
struct = GetStructFromMethod(interface, method)
ps = mojom_pack.PackedStruct(struct)
serializations.append(self.GetStructSerialization(
struct.name,
"params",
ps,
self.GetTemplate("params_serialization"),
2))
return '\n'.join(serializations)
def GetParamsDefinitions(self):
params_defs = []
for interface in self.module.interfaces:
next_id = 0
for method in interface.methods:
(params_def, next_id) = \
self.GetParamsDefinition(interface, method, next_id)
params_defs.append(params_def)
return '\n'.join(params_defs)
def GenerateModuleSource(self):
self.WriteTemplateToFile("module.cc",
HEADER = self.GetHeaderFile(self.module.name),
INTERNAL_HEADER = self.GetHeaderFile(self.module.name, "internal"),
PARAM_DEFINITIONS = self.GetParamsDefinitions(),
STRUCT_DEFINITIONS = self.GetStructDefinitions(),
INTERFACE_DEFINITIONS = self.GetInterfaceDefinitions(),
STRUCT_SERIALIZATION_DEFINITIONS =
self.GetStructSerializationDefinitions(),
INTERFACE_SERIALIZATION_DEFINITIONS =
self.GetInterfaceSerializationDefinitions())
def GenerateModuleInternalHeader(self):
traits = map(
lambda s: self.GetTemplate("struct_serialization_traits").substitute(
NAME = CamelToUnderscores(s.name),
FULL_CLASS = "%s::%s" % (self.module.namespace, s.name)),
self.module.structs);
self.WriteTemplateToFile("module_internal.h",
HEADER_GUARD = self.GetHeaderGuard(self.module.name + "_INTERNAL"),
HEADER = self.GetHeaderFile(self.module.name),
TRAITS = '\n'.join(traits))
def GenerateFiles(self):
self.GenerateModuleHeader()
self.GenerateModuleInternalHeader()
self.GenerateModuleSource()
| |
# Copyright (c) 2013 eBay Software Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from trove.common import cfg
from trove.common import exception
from trove.common import instance as rd_instance
from trove.guestagent import backup
from trove.guestagent import dbaas
from trove.guestagent import volume
from trove.guestagent.datastore.couchbase import service
from trove.guestagent.datastore.couchbase import system
from trove.openstack.common import log as logging
from trove.openstack.common import periodic_task
from trove.openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
MANAGER = CONF.datastore_manager
class Manager(periodic_task.PeriodicTasks):
"""
This is Couchbase Manager class. It is dynamically loaded
based off of the datastore of the trove instance
"""
def __init__(self):
self.appStatus = service.CouchbaseAppStatus()
self.app = service.CouchbaseApp(self.appStatus)
@periodic_task.periodic_task(ticks_between_runs=3)
def update_status(self, context):
"""
Updates the couchbase trove instance. It is decorated with
perodic task so it is automatically called every 3 ticks.
"""
self.appStatus.update()
def change_passwords(self, context, users):
raise exception.DatastoreOperationNotSupported(
operation='change_passwords', datastore=MANAGER)
def reset_configuration(self, context, configuration):
self.app.reset_configuration(configuration)
def prepare(self, context, packages, databases, memory_mb, users,
device_path=None, mount_point=None, backup_info=None,
config_contents=None, root_password=None, overrides=None,
cluster_config=None):
"""
This is called when the trove instance first comes online.
It is the first rpc message passed from the task manager.
prepare handles all the base configuration of the Couchbase instance.
"""
self.appStatus.begin_install()
self.app.install_if_needed(packages)
if device_path:
device = volume.VolumeDevice(device_path)
# unmount if device is already mounted
device.unmount_device(device_path)
device.format()
device.mount(mount_point)
LOG.debug('Mounted the volume (%s).' % device_path)
self.app.start_db_with_conf_changes(config_contents)
LOG.debug('Securing couchbase now.')
if root_password:
self.app.enable_root(root_password)
self.app.initial_setup()
if backup_info:
LOG.debug('Now going to perform restore.')
self._perform_restore(backup_info,
context,
mount_point)
self.app.complete_install_or_restart()
LOG.info(_('Completed setup of Couchbase database instance.'))
def restart(self, context):
"""
Restart this couchbase instance.
This method is called when the guest agent
gets a restart message from the taskmanager.
"""
self.app.restart()
def start_db_with_conf_changes(self, context, config_contents):
self.app.start_db_with_conf_changes(config_contents)
def stop_db(self, context, do_not_start_on_reboot=False):
"""
Stop this couchbase instance.
This method is called when the guest agent
gets a stop message from the taskmanager.
"""
self.app.stop_db(do_not_start_on_reboot=do_not_start_on_reboot)
def get_filesystem_stats(self, context, fs_path):
"""Gets the filesystem stats for the path given."""
mount_point = CONF.get(
'mysql' if not MANAGER else MANAGER).mount_point
return dbaas.get_filesystem_volume_stats(mount_point)
def update_attributes(self, context, username, hostname, user_attrs):
raise exception.DatastoreOperationNotSupported(
operation='update_attributes', datastore=MANAGER)
def create_database(self, context, databases):
raise exception.DatastoreOperationNotSupported(
operation='create_database', datastore=MANAGER)
def create_user(self, context, users):
raise exception.DatastoreOperationNotSupported(
operation='create_user', datastore=MANAGER)
def delete_database(self, context, database):
raise exception.DatastoreOperationNotSupported(
operation='delete_database', datastore=MANAGER)
def delete_user(self, context, user):
raise exception.DatastoreOperationNotSupported(
operation='delete_user', datastore=MANAGER)
def get_user(self, context, username, hostname):
raise exception.DatastoreOperationNotSupported(
operation='get_user', datastore=MANAGER)
def grant_access(self, context, username, hostname, databases):
raise exception.DatastoreOperationNotSupported(
operation='grant_access', datastore=MANAGER)
def revoke_access(self, context, username, hostname, database):
raise exception.DatastoreOperationNotSupported(
operation='revoke_access', datastore=MANAGER)
def list_access(self, context, username, hostname):
raise exception.DatastoreOperationNotSupported(
operation='list_access', datastore=MANAGER)
def list_databases(self, context, limit=None, marker=None,
include_marker=False):
raise exception.DatastoreOperationNotSupported(
operation='list_databases', datastore=MANAGER)
def list_users(self, context, limit=None, marker=None,
include_marker=False):
raise exception.DatastoreOperationNotSupported(
operation='list_users', datastore=MANAGER)
def enable_root(self, context):
return self.app.enable_root()
def is_root_enabled(self, context):
return os.path.exists(system.pwd_file)
def _perform_restore(self, backup_info, context, restore_location):
"""
Restores all couchbase buckets and their documents from the
backup.
"""
LOG.info(_("Restoring database from backup %s") %
backup_info['id'])
try:
backup.restore(context, backup_info, restore_location)
except Exception as e:
LOG.error(_("Error performing restore from backup %s") %
backup_info['id'])
LOG.error(e)
self.status.set_status(rd_instance.ServiceStatuses.FAILED)
raise
LOG.info(_("Restored database successfully"))
def create_backup(self, context, backup_info):
"""
Backup all couchbase buckets and their documents.
"""
backup.backup(context, backup_info)
def mount_volume(self, context, device_path=None, mount_point=None):
device = volume.VolumeDevice(device_path)
device.mount(mount_point, write_to_fstab=False)
LOG.debug("Mounted the device %s at the mount_point %s." %
(device_path, mount_point))
def unmount_volume(self, context, device_path=None, mount_point=None):
device = volume.VolumeDevice(device_path)
device.unmount(mount_point)
LOG.debug("Unmounted the device %s from the mount point %s." %
(device_path, mount_point))
def resize_fs(self, context, device_path=None, mount_point=None):
device = volume.VolumeDevice(device_path)
device.resize_fs(mount_point)
LOG.debug("Resized the filesystem at %s." % mount_point)
def update_overrides(self, context, overrides, remove=False):
LOG.debug("Updating overrides.")
raise exception.DatastoreOperationNotSupported(
operation='update_overrides', datastore=MANAGER)
def apply_overrides(self, context, overrides):
LOG.debug("Applying overrides.")
raise exception.DatastoreOperationNotSupported(
operation='apply_overrides', datastore=MANAGER)
def get_replication_snapshot(self, context, snapshot_info):
raise exception.DatastoreOperationNotSupported(
operation='get_replication_snapshot', datastore=MANAGER)
def attach_replication_slave(self, context, snapshot, slave_config):
LOG.debug("Attaching replication slave.")
raise exception.DatastoreOperationNotSupported(
operation='attach_replication_slave', datastore=MANAGER)
def detach_replica(self, context):
raise exception.DatastoreOperationNotSupported(
operation='detach_replica', datastore=MANAGER)
def demote_replication_master(self, context):
LOG.debug("Demoting replication slave.")
raise exception.DatastoreOperationNotSupported(
operation='demote_replication_master', datastore=MANAGER)
| |
"""Config flow for WattTime integration."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from aiowatttime import Client
from aiowatttime.errors import CoordinatesNotFoundError, InvalidCredentialsError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry, OptionsFlow
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.helpers import aiohttp_client, config_validation as cv
from .const import (
CONF_BALANCING_AUTHORITY,
CONF_BALANCING_AUTHORITY_ABBREV,
CONF_SHOW_ON_MAP,
DOMAIN,
LOGGER,
)
CONF_LOCATION_TYPE = "location_type"
LOCATION_TYPE_COORDINATES = "Specify coordinates"
LOCATION_TYPE_HOME = "Use home location"
STEP_COORDINATES_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_LATITUDE): cv.latitude,
vol.Required(CONF_LONGITUDE): cv.longitude,
}
)
STEP_LOCATION_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_LOCATION_TYPE): vol.In(
[LOCATION_TYPE_HOME, LOCATION_TYPE_COORDINATES]
),
}
)
STEP_REAUTH_CONFIRM_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_PASSWORD): str,
}
)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
@callback
def get_unique_id(data: dict[str, Any]) -> str:
"""Get a unique ID from a data payload."""
return f"{data[CONF_LATITUDE]}, {data[CONF_LONGITUDE]}"
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for WattTime."""
VERSION = 1
def __init__(self) -> None:
"""Initialize."""
self._client: Client | None = None
self._data: dict[str, Any] = {}
async def _async_validate_credentials(
self, username: str, password: str, error_step_id: str, error_schema: vol.Schema
) -> FlowResult:
"""Validate input credentials and proceed accordingly."""
session = aiohttp_client.async_get_clientsession(self.hass)
try:
self._client = await Client.async_login(username, password, session=session)
except InvalidCredentialsError:
return self.async_show_form(
step_id=error_step_id,
data_schema=error_schema,
errors={"base": "invalid_auth"},
description_placeholders={CONF_USERNAME: username},
)
except Exception as err: # pylint: disable=broad-except
LOGGER.exception("Unexpected exception while logging in: %s", err)
return self.async_show_form(
step_id=error_step_id,
data_schema=error_schema,
errors={"base": "unknown"},
description_placeholders={CONF_USERNAME: username},
)
if CONF_LATITUDE in self._data:
# If coordinates already exist at this stage, we're in an existing flow and
# should reauth:
entry_unique_id = get_unique_id(self._data)
if existing_entry := await self.async_set_unique_id(entry_unique_id):
self.hass.config_entries.async_update_entry(
existing_entry, data=self._data
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(existing_entry.entry_id)
)
return self.async_abort(reason="reauth_successful")
# ...otherwise, we're in a new flow:
self._data[CONF_USERNAME] = username
self._data[CONF_PASSWORD] = password
return await self.async_step_location()
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
"""Define the config flow to handle options."""
return WattTimeOptionsFlowHandler(config_entry)
async def async_step_coordinates(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the coordinates step."""
if not user_input:
return self.async_show_form(
step_id="coordinates", data_schema=STEP_COORDINATES_DATA_SCHEMA
)
if TYPE_CHECKING:
assert self._client
unique_id = get_unique_id(user_input)
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
try:
grid_region = await self._client.emissions.async_get_grid_region(
user_input[CONF_LATITUDE], user_input[CONF_LONGITUDE]
)
except CoordinatesNotFoundError:
return self.async_show_form(
step_id="coordinates",
data_schema=STEP_COORDINATES_DATA_SCHEMA,
errors={CONF_LATITUDE: "unknown_coordinates"},
)
except Exception as err: # pylint: disable=broad-except
LOGGER.exception("Unexpected exception while getting region: %s", err)
return self.async_show_form(
step_id="coordinates",
data_schema=STEP_COORDINATES_DATA_SCHEMA,
errors={"base": "unknown"},
)
return self.async_create_entry(
title=unique_id,
data={
CONF_USERNAME: self._data[CONF_USERNAME],
CONF_PASSWORD: self._data[CONF_PASSWORD],
CONF_LATITUDE: user_input[CONF_LATITUDE],
CONF_LONGITUDE: user_input[CONF_LONGITUDE],
CONF_BALANCING_AUTHORITY: grid_region["name"],
CONF_BALANCING_AUTHORITY_ABBREV: grid_region["abbrev"],
},
)
async def async_step_location(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the "pick a location" step."""
if not user_input:
return self.async_show_form(
step_id="location", data_schema=STEP_LOCATION_DATA_SCHEMA
)
if user_input[CONF_LOCATION_TYPE] == LOCATION_TYPE_HOME:
return await self.async_step_coordinates(
{
CONF_LATITUDE: self.hass.config.latitude,
CONF_LONGITUDE: self.hass.config.longitude,
}
)
return await self.async_step_coordinates()
async def async_step_reauth(self, config: dict[str, Any]) -> FlowResult:
"""Handle configuration by re-auth."""
self._data = {**config}
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle re-auth completion."""
if not user_input:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_REAUTH_CONFIRM_DATA_SCHEMA,
description_placeholders={CONF_USERNAME: self._data[CONF_USERNAME]},
)
self._data[CONF_PASSWORD] = user_input[CONF_PASSWORD]
return await self._async_validate_credentials(
self._data[CONF_USERNAME],
self._data[CONF_PASSWORD],
"reauth_confirm",
STEP_REAUTH_CONFIRM_DATA_SCHEMA,
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
if not user_input:
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
)
return await self._async_validate_credentials(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
"user",
STEP_USER_DATA_SCHEMA,
)
class WattTimeOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a WattTime options flow."""
def __init__(self, entry: ConfigEntry) -> None:
"""Initialize."""
self.entry = entry
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required(
CONF_SHOW_ON_MAP,
default=self.entry.options.get(CONF_SHOW_ON_MAP, True),
): bool
}
),
)
| |
from pathlib import Path
import os
import unittest
from sox import combine
from sox.core import SoxError
def relpath(f):
return os.path.join(os.path.dirname(__file__), f)
INPUT_WAV = relpath('data/input.wav')
INPUT_WAV2 = relpath('data/input2.wav')
INPUT_WAV3 = relpath('data/input3.wav')
INPUT_AIFF = relpath('data/input.aiff')
INPUT_FILE_INVALID = relpath('data/input.xyz')
OUTPUT_FILE = relpath('data/output.wav')
def new_combiner():
return combine.Combiner()
class TestCombineDefault(unittest.TestCase):
def setUp(self):
self.cbn = new_combiner()
def test_globals(self):
expected = ['-D', '-V2']
actual = self.cbn.globals
self.assertEqual(expected, actual)
def test_output_format(self):
expected = {}
actual = self.cbn.output_format
self.assertEqual(expected, actual)
def test_effects(self):
expected = []
actual = self.cbn.effects
self.assertEqual(expected, actual)
def test_effects_log(self):
expected = []
actual = self.cbn.effects_log
self.assertEqual(expected, actual)
def test_build(self):
expected_result = True
actual_result = self.cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'concatenate'
)
self.assertEqual(expected_result, actual_result)
def test_build_pathlib(self):
expected_result = True
actual_result = self.cbn.build(
[Path(INPUT_WAV), (INPUT_WAV)], Path(OUTPUT_FILE), 'concatenate'
)
self.assertEqual(expected_result, actual_result)
def test_build_with_vols(self):
expected_result = True
actual_result = self.cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix',
input_volumes=[0.5, 2]
)
self.assertEqual(expected_result, actual_result)
def test_failed_build(self):
cbn = new_combiner()
with self.assertRaises(SoxError):
cbn.build(
[INPUT_FILE_INVALID, INPUT_WAV], OUTPUT_FILE, 'concatenate'
)
def test_build_with_output_format(self):
expected_result = True
cbn = new_combiner()
cbn.set_output_format(rate=8000)
actual_result = self.cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'concatenate'
)
self.assertEqual(expected_result, actual_result)
class TestCombineTypes(unittest.TestCase):
def setUp(self):
self.cbn = new_combiner()
def test_concatenate(self):
expected = True
actual = self.cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'concatenate'
)
self.assertEqual(expected, actual)
def test_merge(self):
expected = True
actual = self.cbn.build([INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'merge')
self.assertEqual(expected, actual)
def test_mix(self):
expected = True
actual = self.cbn.build([INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix')
self.assertEqual(expected, actual)
def test_mixpower(self):
expected = True
actual = self.cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix-power'
)
self.assertEqual(expected, actual)
def test_multiply(self):
expected = True
actual = self.cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'multiply'
)
self.assertEqual(expected, actual)
class TestSetInputFormat(unittest.TestCase):
def test_none(self):
cbn = new_combiner()
cbn.set_input_format()
expected = []
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_file_type(self):
cbn = new_combiner()
cbn.set_input_format(file_type=['wav', 'aiff'])
expected = [['-t', 'wav'], ['-t', 'aiff']]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_invalid_file_type(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(file_type='wav')
def test_invalid_file_type_val(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(file_type=['xyz', 'wav'])
def test_rate(self):
cbn = new_combiner()
cbn.set_input_format(rate=[2000, 44100, 22050])
expected = [['-r', '2000'], ['-r', '44100'], ['-r', '22050']]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_invalid_rate(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(rate=2000)
def test_invalid_rate_val(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(rate=[-2, 'a'])
def test_bits(self):
cbn = new_combiner()
cbn.set_input_format(bits=[16])
expected = [['-b', '16']]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_invalid_bits(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(bits=32)
def test_invalid_bits_val(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(bits=[0])
def test_channels(self):
cbn = new_combiner()
cbn.set_input_format(channels=[1, 2, 3])
expected = [['-c', '1'], ['-c', '2'], ['-c', '3']]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_invalid_channels(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(channels='x')
def test_invalid_channels_val(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(channels=[1.5, 2, 3])
def test_encoding(self):
cbn = new_combiner()
cbn.set_input_format(encoding=['floating-point', 'oki-adpcm'])
expected = [['-e', 'floating-point'], ['-e', 'oki-adpcm']]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_invalid_encoding(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(encoding='wav')
def test_invalid_encoding_val(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(encoding=['xyz', 'wav'])
def test_ignore_length(self):
cbn = new_combiner()
cbn.set_input_format(ignore_length=[True, False, True])
expected = [['--ignore-length'], [], ['--ignore-length']]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_invalid_ignore_length(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(ignore_length=1)
def test_invalid_ignore_length_val(self):
cbn = new_combiner()
with self.assertRaises(ValueError):
cbn.set_input_format(ignore_length=[False, True, 3])
def test_multiple_same_len(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 2000], bits=[32, 8])
expected = [['-r', '44100', '-b', '32'], ['-r', '2000', '-b', '8']]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_multiple_different_len(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 2000], bits=[32, 8, 16])
expected = [
['-r', '44100', '-b', '32'],
['-r', '2000', '-b', '8'],
['-b', '16']
]
actual = cbn.input_format
self.assertEqual(expected, actual)
def test_build_same_len(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 44100], channels=[1, 1])
actual = cbn.build([INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix')
expected = True
self.assertEqual(expected, actual)
def test_build_same_len_vol(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 44100], channels=[1, 1])
actual = cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix', input_volumes=[1, 2]
)
expected = True
self.assertEqual(expected, actual)
def test_build_greater_len(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 44100, 44100], channels=[1, 1])
actual = cbn.build([INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix')
expected = True
self.assertEqual(expected, actual)
def test_build_greater_len_vol(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 44100, 44100], channels=[1, 1])
actual = cbn.build(
[INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix', input_volumes=[1, 2]
)
expected = True
self.assertEqual(expected, actual)
def test_build_lesser_len(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 44100], channels=[1, 1])
actual = cbn.build(
[INPUT_WAV, INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix'
)
expected = True
self.assertEqual(expected, actual)
def test_build_lesser_len_vol(self):
cbn = new_combiner()
cbn.set_input_format(rate=[44100, 44100], channels=[1, 1])
actual = cbn.build(
[INPUT_WAV, INPUT_WAV, INPUT_WAV], OUTPUT_FILE, 'mix',
input_volumes=[1, 2]
)
expected = True
self.assertEqual(expected, actual)
class TestValidateFileFormats(unittest.TestCase):
def test_different_samplerates(self):
with self.assertRaises(IOError):
combine._validate_file_formats([INPUT_WAV, INPUT_WAV2], 'mix')
def test_different_num_channels(self):
with self.assertRaises(IOError):
combine._validate_file_formats(
[INPUT_WAV, INPUT_WAV3], 'concatenate'
)
class TestValidateSampleRates(unittest.TestCase):
def test_different_samplerates(self):
with self.assertRaises(IOError):
combine._validate_sample_rates([INPUT_WAV, INPUT_WAV2], 'mix')
def test_same_samplerates(self):
expected = None
actual = combine._validate_sample_rates([INPUT_WAV, INPUT_WAV], 'mix')
self.assertEqual(expected, actual)
class TestValidateNumChannels(unittest.TestCase):
def test_different_numchannels(self):
with self.assertRaises(IOError):
combine._validate_num_channels([INPUT_WAV, INPUT_WAV3], 'mix')
def test_same_numchannels(self):
expected = None
actual = combine._validate_num_channels([INPUT_WAV, INPUT_WAV], 'mix')
self.assertEqual(expected, actual)
class TestBuildInputFormatList(unittest.TestCase):
def test_none(self):
expected = [['-v', '1'], ['-v', '1']]
actual = combine._build_input_format_list(
[INPUT_WAV, INPUT_WAV], None, None
)
self.assertEqual(expected, actual)
def test_equal_num_vol(self):
expected = [['-v', '0.5'], ['-v', '1.1']]
actual = combine._build_input_format_list(
[INPUT_WAV, INPUT_WAV], [0.5, 1.1], None
)
self.assertEqual(expected, actual)
def test_greater_num_vol(self):
actual = combine._build_input_format_list(
[INPUT_WAV, INPUT_WAV], [0.5, 1.1, 3], None
)
expected = [['-v', '0.5'], ['-v', '1.1']]
self.assertEqual(expected, actual)
def test_lesser_num_vol(self):
actual = combine._build_input_format_list(
[INPUT_WAV, INPUT_WAV, INPUT_WAV], [0.5, 1.1], None
)
expected = [['-v', '0.5'], ['-v', '1.1'], ['-v', '1']]
self.assertEqual(expected, actual)
def test_equal_num_fmt(self):
expected = [['-v', '1', '-t', 'wav'], ['-v', '1', '-t', 'aiff']]
actual = combine._build_input_format_list(
[INPUT_WAV, INPUT_WAV], None, [['-t', 'wav'], ['-t', 'aiff']]
)
self.assertEqual(expected, actual)
def test_greater_num_fmt(self):
actual = combine._build_input_format_list(
[INPUT_WAV, INPUT_WAV], None,
[['-t', 'wav'], ['-t', 'aiff'], ['-t', 'wav']]
)
expected = [['-v', '1', '-t', 'wav'], ['-v', '1', '-t', 'aiff']]
self.assertEqual(expected, actual)
def test_lesser_num_fmt(self):
actual = combine._build_input_format_list(
[INPUT_WAV, INPUT_WAV, INPUT_WAV], None,
[['-t', 'wav'], ['-t', 'aiff']]
)
expected = [
['-v', '1', '-t', 'wav'], ['-v', '1', '-t', 'aiff'], ['-v', '1']
]
self.assertEqual(expected, actual)
class TestCombinePreview(unittest.TestCase):
def setUp(self):
self.cbn = new_combiner()
self.cbn.trim(0, 0.1)
def test_valid(self):
expected = None
actual = self.cbn.preview([INPUT_WAV, INPUT_WAV], 'mix')
self.assertEqual(expected, actual)
def test_valid_vol(self):
expected = None
actual = self.cbn.preview([INPUT_WAV, INPUT_WAV], 'mix', [1.0, 0.5])
self.assertEqual(expected, actual)
def test_valid_pathlib(self):
expected = None
actual = self.cbn.preview([Path(INPUT_WAV), Path(INPUT_WAV)], 'mix')
self.assertEqual(expected, actual)
class TestBuildInputArgs(unittest.TestCase):
def test_unequal_length(self):
with self.assertRaises(ValueError):
combine._build_input_args([INPUT_WAV, INPUT_WAV], [['-v', '1']])
def test_basic(self):
expected = ['-v', '1', INPUT_WAV, '-v', '1', INPUT_WAV]
actual = combine._build_input_args(
[INPUT_WAV, INPUT_WAV], [['-v', '1'], ['-v', '1']]
)
self.assertEqual(expected, actual)
def test_handles_pathlib(self):
expected = ['-v', '1', INPUT_WAV, '-v', '1', INPUT_WAV]
actual = combine._build_input_args(
[Path(INPUT_WAV), Path(INPUT_WAV)], [['-v', '1'], ['-v', '1']]
)
self.assertEqual(expected, actual)
class TestValidateCombineType(unittest.TestCase):
def test_valid(self):
actual = combine._validate_combine_type('mix')
expected = None
self.assertEqual(expected, actual)
def test_invalid(self):
with self.assertRaises(ValueError):
combine._validate_combine_type('combine')
class TestValidateVolumes(unittest.TestCase):
def test_valid_none(self):
actual = combine._validate_volumes(None)
expected = None
self.assertEqual(expected, actual)
def test_valid_list(self):
actual = combine._validate_volumes([1, 0.1, 3])
expected = None
self.assertEqual(expected, actual)
def test_invalid_type(self):
with self.assertRaises(TypeError):
combine._validate_volumes(1)
def test_invalid_vol(self):
with self.assertRaises(ValueError):
combine._validate_volumes([1.1, 'z', -0.5, 2])
| |
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
threadedprint.py
================
:author: Ian Bicking
:date: 12 Jul 2004
Multi-threaded printing; allows the output produced via print to be
separated according to the thread.
To use this, you must install the catcher, like::
threadedprint.install()
The installation optionally takes one of three parameters:
default
The default destination for print statements (e.g., ``sys.stdout``).
factory
A function that will produce the stream for a thread, given the
thread's name.
paramwriter
Instead of writing to a file-like stream, this function will be
called like ``paramwriter(thread_name, text)`` for every write.
The thread name is the value returned by
``threading.currentThread().getName()``, a string (typically something
like Thread-N).
You can also submit file-like objects for specific threads, which will
override any of these parameters. To do this, call ``register(stream,
[threadName])``. ``threadName`` is optional, and if not provided the
stream will be registered for the current thread.
If no specific stream is registered for a thread, and no default has
been provided, then an error will occur when anything is written to
``sys.stdout`` (or printed).
Note: the stream's ``write`` method will be called in the thread the
text came from, so you should consider thread safety, especially if
multiple threads share the same writer.
Note: if you want access to the original standard out, use
``sys.__stdout__``.
You may also uninstall this, via::
threadedprint.uninstall()
TODO
----
* Something with ``sys.stderr``.
* Some default handlers. Maybe something that hooks into `logging`.
* Possibly cache the results of ``factory`` calls. This would be a
semantic change.
"""
import threading
import sys
from paste.util import filemixin
class PrintCatcher(filemixin.FileMixin):
def __init__(self, default=None, factory=None, paramwriter=None,
leave_stdout=False):
assert len(filter(lambda x: x is not None,
[default, factory, paramwriter])) <= 1, (
"You can only provide one of default, factory, or paramwriter")
if leave_stdout:
assert not default, (
"You cannot pass in both default (%r) and "
"leave_stdout=True" % default)
default = sys.stdout
if default:
self._defaultfunc = self._writedefault
elif factory:
self._defaultfunc = self._writefactory
elif paramwriter:
self._defaultfunc = self._writeparam
else:
self._defaultfunc = self._writeerror
self._default = default
self._factory = factory
self._paramwriter = paramwriter
self._catchers = {}
def write(self, v, currentThread=threading.currentThread):
name = currentThread().getName()
catchers = self._catchers
if not catchers.has_key(name):
self._defaultfunc(name, v)
else:
catcher = catchers[name]
catcher.write(v)
def _writedefault(self, name, v):
self._default.write(v)
def _writefactory(self, name, v):
self._factory(name).write(v)
def _writeparam(self, name, v):
self._paramwriter(name, v)
def _writeerror(self, name, v):
assert False, (
"There is no PrintCatcher output stream for the thread %r"
% name)
def register(self, catcher, name=None,
currentThread=threading.currentThread):
if name is None:
name = currentThread().getName()
self._catchers[name] = catcher
def deregister(self, name=None,
currentThread=threading.currentThread):
if name is None:
name = currentThread().getName()
assert self._catchers.has_key(name), (
"There is no PrintCatcher catcher for the thread %r" % name)
del self._catchers[name]
_printcatcher = None
_oldstdout = None
def install(**kw):
global _printcatcher, _oldstdout, register, deregister
if not _printcatcher:
_oldstdout = sys.stdout
_printcatcher = sys.stdout = PrintCatcher(**kw)
register = _printcatcher.register
deregister = _printcatcher.deregister
def uninstall():
global _printcatcher, _oldstdout, register, deregister
if _printcatcher:
sys.stdout = _oldstdout
_printcatcher = _oldstdout = None
register = not_installed_error
deregister = not_installed_error
def not_installed_error(*args, **kw):
assert False, (
"threadedprint has not yet been installed (call "
"threadedprint.install())")
register = deregister = not_installed_error
class StdinCatcher(filemixin.FileMixin):
def __init__(self, default=None, factory=None, paramwriter=None):
assert len(filter(lambda x: x is not None,
[default, factory, paramwriter])) <= 1, (
"You can only provide one of default, factory, or paramwriter")
if default:
self._defaultfunc = self._readdefault
elif factory:
self._defaultfunc = self._readfactory
elif paramwriter:
self._defaultfunc = self._readparam
else:
self._defaultfunc = self._readerror
self._default = default
self._factory = factory
self._paramwriter = paramwriter
self._catchers = {}
def read(self, size=None, currentThread=threading.currentThread):
name = currentThread().getName()
catchers = self._catchers
if not catchers.has_key(name):
self._defaultfunc(name, size)
else:
catcher = catchers[name]
catcher.read(size)
def _readdefault(self, name, size):
self._default.read(size)
def _readfactory(self, name, size):
self._factory(name).read(size)
def _readparam(self, name, size):
self._paramreader(name, size)
def _readerror(self, name, size):
assert False, (
"There is no StdinCatcher output stream for the thread %r"
% name)
def register(self, catcher, name=None,
currentThread=threading.currentThread):
if name is None:
name = currentThread.getName()
self._catchers[name] = catcher
def deregister(self, catcher, name=None,
currentThread=threading.currentThread):
if name is None:
name = currentThread().getName()
assert self._catchers.has_key(name), (
"There is no StdinCatcher catcher for the thread %r" % name)
del self._catchers[name]
_stdincatcher = None
_oldstdin = None
def install_stdin(**kw):
global _stdincatcher, _oldstdin, register_stdin, deregister_stdin
if not _stdincatcher:
_oldstdin = sys.stdin
_stdincatcher = sys.stdin = StdinCatcher(**kw)
register_stdin = _stdincatcher.register
deregister_stdin = _stdincatcher.deregister
def uninstall():
global _stdincatcher, _oldstin, register_stdin, deregister_stdin
if _stdincatcher:
sys.stdin = _oldstdin
_stdincatcher = _oldstdin = None
register_stdin = deregister_stdin = not_installed_error_stdin
def not_installed_error_stdin(*args, **kw):
assert False, (
"threadedprint has not yet been installed for stdin (call "
"threadedprint.install_stdin())")
| |
# coding: utf-8
"""
Jinja template
fully executable
"""
import os
import sys
import argparse
import time
from pprint import pprint
from twBase import * # NOQA
# make it work on headless server
import matplotlib as mpl
mpl.use('Agg')
# must be sourced after matpotplib, otherwise headleass not working
import utils
from utils import * # NOQA
import importlib
importlib.reload(utils)
#Allow relative imports to directories above cwd/
sys.path.insert(1, os.path.join(sys.path[0], '..'))
np.random.seed(42)
class BestModel(Callback):
'''Loads the best parameters at end of training'''
def __init__(self, output_dir, verbose=0):
self.output_dir = output_dir
self.verbose = verbose
def on_train_end(self, logs={}):
weights_file = self.output_dir + self.model.name + '_weights.hdf5'
self.model.load_weights(weights_file)
class LearningRateTracker(Callback):
def init(self):
super().__init__()
def on_epoch_begin(self, epoch, logs={}):
optimizer = self.model.optimizer
iterations = K.eval(optimizer.iterations)
if type(optimizer) is SGD:
lr = K.eval(optimizer.lr * (1. / (1. + optimizer.decay * optimizer.iterations)))
else:
lr = K.eval(optimizer.lr)
print('\nLR: {:.6f}, Iter: {}, Opt: {}\n'.format(lr, iterations, type(optimizer)))
class DecayLearningRate(Callback):
'''
n_epoch = no. of epochs after decay should happen.
decay = decay value
'''
def __init__(self, nb_epoch=10, decay=0.1):
super().__init__()
self.n_epoch = nb_epoch
self.decay = decay
def on_epoch_begin(self, epoch, logs={}):
oldLr = K.eval(self.model.optimizer.lr)
if epoch > 1 and epoch % self.n_epoch == 0:
newLr = self.decay * oldLr
K.set_value(self.model.optimizer.lr, newLr)
else:
K.set_value(self.model.optimizer.lr, oldLr)
def CreateDenseBlock(output_dim=64, dropout=0, activation='relu'):
def DenseBlock(x):
log.info("Creating MLP block.", neurons=output_dim, dropout=dropout, act=activation)
x = Dense(output_dim)(x)
x = Dropout(dropout)(x)
x = BatchNormalization()(x)
return Activation(activation)(x)
return DenseBlock
def CreateConvBlock1D(nb_filter=64, filter_length=3, dropout=0, activation='relu'):
def ConvBlock(x):
log.info("Creating 1d CONV block.", filter=nb_filter, filter_length=filter_length, act=activation, dropout=dropout)
x = ZeroPadding1D(1)(x)
#x = Convolution1D(nb_filter, filter_length, border_mode='same')(x)
x = Convolution1D(nb_filter, filter_length)(x)
x = Dropout(dropout)(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
return MaxPooling1D()(x)
return ConvBlock
class AbstractModel(object):
@classmethod # classmethod always gets class as parameter
def get_classname(cls):
return cls.__name__
def __init__(self, P):
self.P = P
self.logpath = P.result.path
self.path = P.result.path + self.get_classname()
def init_model(self):
self.create_model()
self.model.summary()
def save(self):
self.model.save(self.path + '.h5')
self.model.save_weights(self.path + '.w.h5')
with open(self.path + '.json', 'w') as textfile:
print(self.model.to_json(), file=textfile)
log.info("model saved.", path=self.path)
def load(self):
self.load_model(self.path+'.json')
self.compile()
# http://stackoverflow.com/questions/41818654/keras-batchnormalization-uninitialized-value
keras.backend.get_session().run(tf.global_variables_initializer())
self.load_weights(self.path+'.w.h5')
log.info("model loaded.", path=self.path)
def load_model(self, path=None):
if not os.path.exists(path):
log.error("-E- model not existing:", path=path)
sys.exit(1)
with open(path, 'r') as textfile:
self.model = model_from_json(textfile.read())
log.info("model definition loaded.", path=path)
def load_weights(self, path=None):
if not os.path.exists(path):
log.error("-E- model not existing:", path=path)
sys.exit(1)
self.model.load_weights(path)
log.info("weights loaded.", path=path)
def compile(self):
# init optimizier from class name
optimizer = getattr(keras.optimizers, self.P.model.optimizer[0])(lr=self.P.model.optimizer[1]['lr'])
self.model.compile(
optimizer=optimizer,
loss=self.P.model.loss_fn,
metrics=self.P.model.metrics)
plot(self.model, to_file=self.path+'.png', show_shapes=False, show_layer_names=False)
def train(self, X_train, y_train, validation_data=None):
global_start_time = time.time()
# http://stackoverflow.com/questions/41818654/keras-batchnormalization-uninitialized-value
#keras.backend.get_session().run(tf.global_variables_initializer())
log.info("Start training.",
e=self.P.training.nb_epoch,
bs=self.P.training.batch_size,
sample=self.P.data.sample,
shuffle=self.P.training.shuffle)
try:
history = self.model.fit(
X_train, y_train,
batch_size=self.P.training.batch_size,
nb_epoch=self.P.training.nb_epoch,
validation_data=validation_data,
verbose=self.P.training.keras_verbosity,
callbacks=[
ModelCheckpoint(filepath=self.path+'.w.h5', save_best_only=True),
EarlyStopping(monitor='val_loss', min_delta=0, patience=20, verbose=0),
CSVLogger(self.logpath+self.get_classname()+'.log.csv', separator=';', append=True),
LearningRateTracker(),
DecayLearningRate(nb_epoch=self.P.training.nb_epoch_lr_decay)
],
shuffle=self.P.training.shuffle)
except KeyboardInterrupt:
print('Training duration (s) : ', time.time() - global_start_time)
log.warn('-W- interrupted')
self.model.save(self.path+'.interrupted.h5')
return self.model
print('Training duration (s) : ', time.time() - global_start_time)
return self.model, history
def evaluate(self, X, y):
log.info("Evaluationg model.", xshape=X.shape, yshape=y.shape)
return self.model.evaluate(X, y,
batch_size=self.P.training.batch_size,
verbose=self.P.training.keras_verbosity)
def predict(self, X, batch_size=64, verbose=1):
log.info("Predicting.", shape=X.shape)
return self.model.predict(X,
batch_size,
verbose)
def kaggle(self, X):
preds = self.predict(X, batch_size=64, verbose=1)
submission = pd.DataFrame(data=preds, index=X[:,0], columns=["Expected"]) # NOQA
submission.to_csv(submission_file, index_label='Id')
def set_lr(self, lr, decay=0):
oldLr = K.eval(self.model.optimizer.lr)
oldDecay = K.eval(self.model.optimizer.decay)
K.set_value(self.model.optimizer.lr, lr)
K.set_value(self.model.optimizer.decay, decay)
newLr = K.eval(self.model.optimizer.lr)
log.info('Set lr and decay', newLr=newLr, oldLr=oldLr, oldDecay=oldDecay, lr=lr, decay=decay)
class DenseModel(AbstractModel):
def __init__(self, P):
super().__init__(P)
self.model = None
def create_model(self):
inputs = Input(shape=self.P.model.input_shape)
x = BatchNormalization()(inputs)
for i, neurons in enumerate(self.P.model.topology):
x = CreateDenseBlock(neurons, dropout=self.P.model.dropout[0], activation=self.P.model.hidden_activation[i])(x)
preds = Dense(self.P.model.output_topology, activation=self.P.model.output_activation)(x)
self.model = Model(input=inputs, output=preds)
self.compile()
# http://stackoverflow.com/questions/41818654/keras-batchnormalization-uninitialized-value
keras.backend.get_session().run(tf.global_variables_initializer())
class ConvModel(AbstractModel):
def __init__(self, P):
super().__init__(P)
self.model = None
def create_model(self):
log.info("Create Model", shape=self.P.model.input_shape)
inputs = Input(shape=self.P.model.input_shape)
x = BatchNormalization()(inputs)
for i, neurons in enumerate(self.P.model.topology):
log.info("Create Topology", neurons=neurons)
x = CreateConvBlock1D(neurons, dropout=self.P.model.dropout[0], activation=self.P.model.hidden_activation[i])(x)
#x = Convolution1D(32, 3, border_mode='same')(x)
#x = BatchNormalization()(x)
#x = Activation('relu')(x)
#x = CreateConvBlock1D(32, 3, 'relu')(x)
#fwd1 = CreateConvBlock1D(64, 3)(x)
#fwd1 = CreateConvBlock1D(128, 3)(fwd1)
#fwd1 = CreateConvBlock1D(256, 3)(fwd1)
#fwd1 = CreateConvBlock1D(512, 3)(fwd1)
x = Flatten()(x)
log.info("Create Topology", dense=1024)
x = CreateDenseBlock(1024)(x)
preds = Dense(self.P.model.output_topology, activation=self.P.model.output_activation)(x)
self.model = Model(input=inputs, output=preds)
self.compile()
# http://stackoverflow.com/questions/41818654/keras-batchnormalization-uninitialized-value
keras.backend.get_session().run(tf.global_variables_initializer())
class LstmModel(AbstractModel):
def __init__(self, P):
super().__init__(P)
self.model = None
def create_model(self):
log.info("Create Model", shape=self.P.model.input_shape)
inputs = Input(shape=self.P.model.input_shape)
x = BatchNormalization()(inputs)
for i, neurons in enumerate(self.P.model.topology):
log.info("Create Topology", neurons=neurons)
x = LSTM(neurons, return_sequences=True)(x)
x = Dropout(self.P.model.dropout[0])(x)
x = BatchNormalization()(x)
x = Flatten()(x)
for i, neurons in enumerate(self.P.model.final_dense_layer):
x = CreateDenseBlock(neurons, dropout=self.P.model.dropout[0], activation=self.P.model.final_dense_layer_activation[i])(x)
preds = Dense(self.P.model.output_topology, activation=self.P.model.output_activation)(x)
self.model = Model(input=inputs, output=preds)
self.compile()
# http://stackoverflow.com/questions/41818654/keras-batchnormalization-uninitialized-value
keras.backend.get_session().run(tf.global_variables_initializer())
def create_data(P):
startTime = time.time()
data = _read_input(P)
if P.data.dense:
log.info("Squeezing to dense")
data[0] = np.squeeze(data[0])
data[2] = np.squeeze(data[2])
if P.data.sample:
log.info("Subsampling", sample=P.data.sample)
x = np.random.choice([0, 1], size=len(data[0]), p=[.3, .7])
data[0] = data[0][x==0] # NOQA
data[1] = data[1][x==0] # NOQA
log.info("Loading completed", time=time.time()-startTime)
return data
def _read_input(P):
paths = []
data = []
log.info("Reading data", path=P.data.path.dirPath)
paths.append(P.data.path.dirPath + P.data.path.X_train)
paths.append(P.data.path.dirPath + P.data.path.y_train)
paths.append(P.data.path.dirPath + P.data.path.X_valid)
paths.append(P.data.path.dirPath + P.data.path.y_valid)
for path in paths:
if not os.path.exists(path):
log.error("-E- data does not exist.", path=path)
sys.exit(1)
else:
log.info("Loading data", path=path)
data.append(np.load(path))
return data
def _read_test(P):
paths = []
data = []
log.info("Reading data", path=P.data.path.dirPath)
paths.append(P.data.path.dirPath + P.data.path.X_test)
paths.append(P.data.path.dirPath + P.data.path.y_test)
for path in paths:
if not os.path.exists(path):
log.error("-E- data does not exist.", path=path)
sys.exit(1)
else:
log.info("Loading data", path=path)
data.append(np.load(path))
return data
def _init_model(P):
log.info("Init model", model=P.model.model)
modelObj = getattr(sys.modules[__name__], P.model.model)(P)
return modelObj
def eval_model(P):
if args.test:
log.info('-L- Logging: {0}'.format('->'*10))
pprint(args)
return
modelObj = _init_model(P)
log.info("Loading model")
modelObj.load()
X, y = _read_test(P)
result = modelObj.evaluate(X, y)
print("\n\n--------------- Accuracy on Test Data ---------------")
[print("{}: {}".format(r[0], r[1])) for r in zip(modelObj.model.metrics_names, result)]
def predict_model(P, data):
pass
def run_model(P):
if P.args.test:
log.info('-L- Logging: {0}'.format('->'*10))
pprint(args)
return
# init model class, but no model creation yet
modelObj = _init_model(P)
if P.model.load:
log.info("Loading model")
modelObj.load()
else:
log.info("Creating model", model=P.model.model)
modelObj.init_model()
try:
lr = P.model.optimizer[1]['lr']
except AttributeError:
pass
modelObj.set_lr(lr, lr/P.training.batch_size)
hist = modelObj.train(
X_train,
y_train,
validation_data=(X_valid, y_valid)
)
modelObj.save()
log.info("Training completed.", optimizer=type(modelObj.model.optimizer), epoch=P.training.nb_epoch)
pprint(hist[1].history['val_loss'])
plt.plot(hist[1].history['val_loss'])
plt.savefig(modelObj.logpath+modelObj.get_classname()+'.training.png', bbox_inches='tight')
#plt.show()
def get_parameters(args):
# params for testing of model
params = {
"args": {
"test": args.test
},
"data": {
"path": {
#"dirPath": "./data/1000_1/sample/",
"dirPath": "./data/complatt/",
"X_train": "X_train.npy",
"y_train": "y_train.npy",
"X_valid": "X_valid.npy",
"y_valid": "y_valid.npy",
"X_test": "X_test.npy",
"y_test": "y_test.npy",
},
"sample": False,
"dense": False
},
"model": {
"model": "DenseModel",
#"model": "ConvModel",
#"model": "LstmModel",
"input_shape": (47,),
#"input_shape": (300, 1),
"load": args.load,
"topology": [512, 256],
#"topology": [128, 64, 64],
"hidden_activation": ['relu', 'relu', 'relu', 'relu', 'relu', 'relu'],
"final_dense_layer": [64],
"final_dense_layer_activation": ['relu'],
"output_topology": 1,
"output_activation": 'linear',
"dropout": [0],
"loss_fn": 'mae',
"metrics": ['mae'],
"optimizer": ['Adam', {"lr": 0.001}]
},
"training": {
"device": "/cpu:0",
"batch_size": 2 ** 10,
"tracking_epoch_index": 1,
"nb_epoch": 5,
"nb_epoch_lr_decay": 50,
"shuffle": False,
"keras_verbosity": 1
},
"result": {
"path": "./result/",
"file_path": __file__,
"save_train_log": True,
"save_valid_log": True,
"save_activations": True
}
}
# params generated by jinja
###{{params}}###
P = Struct(params)
### Overwrite hard coded conf with cmdline params
# must not have default in argsparse
if hasattr(args, 'epoch'):
if args.epoch is not None:
P.training.nb_epoch = args.epoch
if hasattr(args, 'batch_size'):
if args.batch_size is not None:
P.training.batch_size = args.batch_size
if args.sample:
P.data.sample = args.sample
if hasattr(args, 'set_lr'):
if args.set_lr is not None:
P.model.optimizer[1]['lr'] = args.set_lr
if not os.path.exists(P.result.path):
os.makedirs(P.result.path)
return P
def process_command_line(argv):
# create the top-level parser
parser = argparse.ArgumentParser(description="Run and Train Model")
subparsers = parser.add_subparsers(title='subcommands', description='valid subcommands', help='additional help', dest='subcommands')
subparsers.required = True # makes sure, that a subcommand is given
# automatic/hidden parameters
#parser.add_argument("-i", "--isDense", help=argparse.SUPPRESS, action="store_true")
parser.add_argument("-s", "--sample", help="use only sample subset", action="store_true")
parser.add_argument("-t", "--test", help="run only function header", action="store_true")
# run model
parser_rm = subparsers.add_parser('run_model', aliases=['rm'])
parser_rm.add_argument("--set_lr", help="set_lr", type=float)
parser_rm.add_argument("--load", help="load model", action="store_true")
parser_rm.add_argument("-e", "--epoch", help="number of epochs", type=int)
parser_rm.add_argument("-b", "--batch_size", help="batch_size", type=int)
parser_rm.set_defaults(func=run_model) # defines the function to call
# evaluate
parser_rm = subparsers.add_parser('evaluate_model', aliases=['ev'])
parser_rm.add_argument("--load", help=argparse.SUPPRESS, action="store_false")
parser_rm.add_argument("-b", "--batch_size", help="batch_size", type=int, default=64)
parser_rm.set_defaults(func=eval_model)
# predict
parser_rm = subparsers.add_parser('predict_model', aliases=['pr'])
parser_rm.add_argument("--load", help="load model", type=str, required=True)
parser_rm.add_argument("data", help="x-array data", type=str)
parser_rm.add_argument("-b", "--batch_size", help="batch_size", type=int, default=64)
parser_rm.set_defaults(func=predict_model) # defines the function to call
args = parser.parse_args(argv)
return args
if __name__ == '__main__':
twStart()
args = process_command_line(sys.argv[1:])
logging.basicConfig(format="", stream=sys.stderr, level=logging.DEBUG)
logcfg(sys.stderr, logging.DEBUG, RenderEnum.console)
log = structlog.get_logger(__name__)
P = get_parameters(args)
log.info("Data:", sample=P.data.sample, bs=P.training.batch_size, epoch=P.training.nb_epoch)
X_train, y_train, X_valid, y_valid = create_data(P)
### run the subcommand
args.func(P)
twEnd()
sys.exit(0)
| |
from django.test import TestCase
from django.core.exceptions import PermissionDenied
from django.contrib.auth.models import Group
from hs_access_control.models import PrivilegeCodes
from hs_core import hydroshare
from hs_core.testing import MockIRODSTestCaseMixin
from hs_access_control.tests.utilities import global_reset, \
assertResourceUserState, assertUserResourceState, \
assertUserGroupState, assertGroupUserState, \
assertGroupResourceState, assertResourceGroupState
class BasicFunction(MockIRODSTestCaseMixin, TestCase):
""" test basic functions """
def setUp(self):
super(BasicFunction, self).setUp()
global_reset()
self.group, _ = Group.objects.get_or_create(name='Resource Author')
self.alva = hydroshare.create_account(
'alva@gmail.com',
username='alva',
first_name='alva',
last_name='couch',
superuser=False,
groups=[]
)
self.george = hydroshare.create_account(
'george@gmail.com',
username='george',
first_name='george',
last_name='miller',
superuser=False,
groups=[]
)
self.john = hydroshare.create_account(
'john@gmail.com',
username='john',
first_name='john',
last_name='miller',
superuser=False,
groups=[]
)
self.admin = hydroshare.create_account(
'admin@gmail.com',
username='admin',
first_name='first_name_admin',
last_name='last_name_admin',
superuser=True,
groups=[]
)
# george creates a resource 'bikes'
self.bikes = hydroshare.create_resource(
resource_type='GenericResource',
owner=self.george,
title='Bikes',
metadata=[],
)
# george creates a group 'bikers'
self.bikers = self.george.uaccess\
.create_group(title='Bikers', description="We are the bikers")
# george creates a group 'harpers'
self.harpers = self.george.uaccess\
.create_group(title='Harpers', description="We are the harpers")
def test_matrix_testing(self):
""" Test that matrix testing routines function as believed """
george = self.george
alva = self.alva
john = self.john
bikes = self.bikes
bikers = self.bikers
harpers = self.harpers
assertResourceUserState(self, bikes, [george], [], [])
assertUserResourceState(self, george, [bikes], [], [])
assertUserResourceState(self, alva, [], [], [])
assertUserResourceState(self, john, [], [], [])
assertUserGroupState(self, george, [harpers, bikers], [], [])
assertUserGroupState(self, alva, [], [], [])
assertUserGroupState(self, john, [], [], [])
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.CHANGE)
assertResourceUserState(self, bikes, [george], [alva], [])
assertUserResourceState(self, george, [bikes], [], [])
assertUserResourceState(self, alva, [], [bikes], [])
assertUserResourceState(self, john, [], [], [])
george.uaccess.share_resource_with_user(
bikes, john, PrivilegeCodes.VIEW)
assertResourceUserState(self, bikes, [george], [alva], [john])
assertUserResourceState(self, george, [bikes], [], [])
assertUserResourceState(self, alva, [], [bikes], [])
assertUserResourceState(self, john, [], [], [bikes])
bikes.raccess.immutable = True
bikes.raccess.save()
assertResourceUserState(
self, bikes, [george], [], [
alva, john]) # squashes CHANGE
assertUserResourceState(self, george, [bikes], [], [])
# immutable squashes CHANGE
assertUserResourceState(self, alva, [], [], [bikes])
assertUserResourceState(self, john, [], [], [bikes])
assertGroupUserState(self, bikers, [george], [], [])
assertGroupUserState(self, harpers, [george], [], [])
assertUserGroupState(self, george, [bikers, harpers], [], [])
assertUserGroupState(self, alva, [], [], [])
assertUserGroupState(self, john, [], [], [])
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.CHANGE)
assertGroupUserState(self, bikers, [george], [alva], [])
assertGroupUserState(self, harpers, [george], [], [])
assertUserGroupState(self, george, [bikers, harpers], [], [])
assertUserGroupState(self, alva, [], [bikers], [])
assertUserGroupState(self, john, [], [], [])
george.uaccess.share_group_with_user(bikers, john, PrivilegeCodes.VIEW)
assertGroupUserState(self, bikers, [george], [alva], [john])
assertGroupUserState(self, harpers, [george], [], [])
assertUserGroupState(self, george, [bikers, harpers], [], [])
assertUserGroupState(self, alva, [], [bikers], [])
assertUserGroupState(self, john, [], [], [bikers])
assertResourceGroupState(self, bikes, [], [])
assertGroupResourceState(self, bikers, [], [])
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
# immutable squashes state
assertResourceGroupState(self, bikes, [], [bikers])
# immutable squashes state
assertGroupResourceState(self, bikers, [], [bikes])
bikes.raccess.immutable = False
bikes.raccess.save()
# without immutable, CHANGE returns
assertResourceGroupState(self, bikes, [bikers], [])
# without immutable, CHANGE returns
assertGroupResourceState(self, bikers, [bikes], [])
def test_share(self):
bikes = self.bikes
harpers = self.harpers
bikers = self.bikers
george = self.george
alva = self.alva
admin = self.admin
john = self.john
assertResourceUserState(self, bikes, [george], [], [])
assertUserResourceState(self, george, [bikes], [], [])
assertUserResourceState(self, alva, [], [], [])
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
assertResourceUserState(self, bikes, [george, alva], [], [])
assertUserResourceState(self, george, [bikes], [], [])
assertUserResourceState(self, alva, [bikes], [], [])
# test a user can downgrade (e.g., from OWNER to CHANGE) his/her access
# privilege
alva.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.CHANGE)
assertResourceUserState(self, bikes, [george], [alva], [])
assertUserResourceState(self, george, [bikes], [], [])
assertUserResourceState(self, alva, [], [bikes], [])
# unshare bikes
george.uaccess.unshare_resource_with_user(bikes, alva)
assertResourceUserState(self, bikes, [george], [], [])
assertUserResourceState(self, george, [bikes], [], [])
assertUserResourceState(self, alva, [], [], [])
assertGroupResourceState(self, bikers, [], [])
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
assertGroupResourceState(self, bikers, [], [bikes])
george.uaccess.share_resource_with_group(
bikes, harpers, PrivilegeCodes.CHANGE)
assertGroupResourceState(self, harpers, [bikes], [])
george.uaccess.share_group_with_user(
harpers, alva, PrivilegeCodes.CHANGE)
assertUserGroupState(self, alva, [], [harpers], [])
# isolated from group privilege CHANGE
assertUserResourceState(self, alva, [], [], [])
assertGroupResourceState(self, harpers, [bikes], [])
george.uaccess.unshare_group_with_user(harpers, alva)
# isolated from group privilege CHANGE
assertUserResourceState(self, alva, [], [], [])
george.uaccess.unshare_resource_with_group(bikes, harpers)
assertGroupResourceState(self, harpers, [], [])
# test upgrade privilege by non owners
# let george (owner) grant change privilege to alva (non owner)
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.CHANGE)
assertUserResourceState(self, alva, [], [bikes], [])
# let alva (non owner) grant view privilege to john (non owner)
alva.uaccess.share_resource_with_user(
bikes, self.john, PrivilegeCodes.VIEW)
assertUserResourceState(self, john, [], [], [bikes])
assertResourceUserState(self, bikes, [george], [alva], [john])
# let alva (non owner) grant change privilege (upgrade) to john (non
# owner)
alva.uaccess.share_resource_with_user(
bikes, self.john, PrivilegeCodes.CHANGE)
assertUserResourceState(self, john, [], [bikes], [])
assertResourceUserState(self, bikes, [george], [alva, john], [])
# test django admin has ownership permission over any resource when not
# owning a resource
self.assertFalse(admin.uaccess.owns_resource(bikes))
self.assertEqual(
bikes.raccess.get_effective_privilege(admin),
PrivilegeCodes.OWNER)
# test django admin can always view/change or delete any resource
self.assertTrue(admin.uaccess.can_view_resource(bikes))
self.assertTrue(admin.uaccess.can_change_resource(bikes))
self.assertTrue(admin.uaccess.can_delete_resource(bikes))
# test django admin can change resource flags
self.assertTrue(admin.uaccess.can_change_resource_flags(bikes))
# test django admin can share any resource with all possible permission
# types
self.assertTrue(
admin.uaccess.can_share_resource(
bikes, PrivilegeCodes.OWNER))
self.assertTrue(
admin.uaccess.can_share_resource(
bikes, PrivilegeCodes.CHANGE))
self.assertTrue(
admin.uaccess.can_share_resource(
bikes, PrivilegeCodes.VIEW))
# test django admin can share a resource with a specific user
admin.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
assertResourceUserState(self, bikes, [george, alva], [john], [])
admin.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.CHANGE)
assertResourceUserState(self, bikes, [george], [john, alva], [])
admin.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
assertResourceUserState(self, bikes, [george], [john], [alva])
# test django admin can unshare a resource with a specific user
admin.uaccess.unshare_resource_with_user(bikes, alva)
assertResourceUserState(self, bikes, [george], [john], [])
# test django admin can share a group with a user
self.assertEqual(bikers.gaccess.members.count(), 1)
self.assertFalse(admin.uaccess.owns_group(bikers))
admin.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.OWNER)
self.assertEqual(alva.uaccess.owned_groups.count(), 1)
self.assertEqual(bikers.gaccess.members.count(), 2)
# test django admin can share resource with a group
self.assertFalse(
admin.uaccess .can_share_resource_with_group(
bikes, harpers, PrivilegeCodes.OWNER))
self.assertTrue(
admin.uaccess .can_share_resource_with_group(
bikes, harpers, PrivilegeCodes.CHANGE))
admin.uaccess.share_resource_with_group(
bikes, harpers, PrivilegeCodes.CHANGE)
self.assertTrue(bikes in harpers.gaccess.edit_resources)
self.assertTrue(
admin.uaccess .can_share_resource_with_group(
bikes, harpers, PrivilegeCodes.VIEW))
admin.uaccess.share_resource_with_group(
bikes, harpers, PrivilegeCodes.VIEW)
self.assertTrue(bikes in harpers.gaccess.view_resources)
# test django admin can unshare a user with a group
self.assertTrue(
admin.uaccess.can_unshare_group_with_user(
bikers, alva))
admin.uaccess.unshare_group_with_user(bikers, alva)
self.assertTrue(bikers.gaccess.members.count(), 1)
self.assertEqual(alva.uaccess.owned_groups.count(), 0)
def test_share_inactive_user(self):
"""
Inactive grantor can't grant permission
Inactive grantee can't be granted permission
"""
george = self.george
alva = self.alva
john = self.john
bikes = self.bikes
self.assertEqual(
bikes.raccess.get_effective_privilege(alva),
PrivilegeCodes.NONE)
# inactive users can't be granted access
# set john to an inactive user
john.is_active = False
john.save()
with self.assertRaises(PermissionDenied):
george.uaccess.share_resource_with_user(
bikes, john, PrivilegeCodes.CHANGE)
john.is_active = True
john.save()
# inactive grantor can't grant access
# let's first grant John access privilege
george.uaccess.share_resource_with_user(
bikes, john, PrivilegeCodes.CHANGE)
self.assertEqual(
bikes.raccess.get_effective_privilege(john),
PrivilegeCodes.CHANGE)
john.is_active = False
john.save()
with self.assertRaises(PermissionDenied):
john.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
| |
#!/usr/bin/env python
# Copyright (c) 2017 Vivaldi Technologies AS. All rights reserved
"""
This script uses the vivaldi section of prefs_definitions.json in order to
generate a list of preference constants for the various preference paths
declared in that file.
It then uses both the vivaldi and chromium sections to build a list of
preferences that the javascript side is allowed to read from the C++ side.
"""
import argparse
import json
import os
import re
PREFS_GEN_FILE_HEAD_TEMPLATE = """\
// Copyright (c) 2017 Vivaldi Technologies AS. All rights reserved
//
// This file was automatically generated by prefs_files_generator.py from
// %(origin_name)s. Do not edit.
"""
PREFS_NAMES_H_TEMPLATE = """\
%(top_comments)s
#ifndef %(header_guard)s
#define %(header_guard)s
#include "base/strings/string_piece.h"
namespace vivaldiprefs {
%(pref_names)s
extern const char kPlatformDefaultKeyName[];
extern const base::StringPiece g_mergeable_lists[%(mergeable_lists_count)d];
} // namespace vivaldiprefs
#endif // %(header_guard)s
"""
PREFS_NAMES_CC_TEMPLATE = """\
%(top_comments)s
#include "%(header_name)s"
namespace vivaldiprefs {
%(pref_constants)s
const char kPlatformDefaultKeyName[] = "default_%(target_os)s";
const base::StringPiece g_mergeable_lists[] = {
%(mergeable_lists)s};
} // namespace vivaldiprefs
"""
PREFS_ENUMS_H_TEMPLATE = """\
%(top_comments)s
#ifndef %(header_guard)s
#define %(header_guard)s
namespace vivaldiprefs {
%(pref_enums)s
} // namespace vivaldiprefs
#endif // %(header_guard)s
"""
ENUM_DEFINITION_TEMPLATE = """\
enum class %(enum_name)s {
%(enum_values)s
};
"""
ENUM_VALUE_TEMPLATE = """\
k%(name)s = %(value)s,"""
def generate_prefs_list(current_path, prefs_definitions):
"""Parser preference definitions recursively """
if 'type' not in prefs_definitions:
prefs = []
for definition in prefs_definitions:
prefs.extend(generate_prefs_list(current_path + '.' + definition,
prefs_definitions[definition]))
return prefs
pref_name = ''.join([part.capitalize() for part in
re.split('[._]+', current_path)[1:]])
result = {}
if prefs_definitions['type'] == 'enum':
result["enum"] = pref_name + 'Values'
result["enum_values"] = prefs_definitions['enum_values']
result['name'] = 'k' + pref_name
result['path'] = current_path
result['comment'] = prefs_definitions.get('description', None)
result['type'] = prefs_definitions['type']
result['syncable'] = prefs_definitions['syncable']
sync_method = prefs_definitions.get('sync_method', 'copy')
allowed_sync_methods = ['copy', 'merge']
if sync_method not in allowed_sync_methods:
raise TypeError(
'Unknown sync method "{0}" in {1}. The value must be one of ({2})'.
format(sync_method, pref_name, ' '.join(allowed_sync_methods))
)
if sync_method == 'merge' and result['type'] != 'list':
raise TypeError(
'"{0}" as sync_method in {1} is not supported for ' +
'preference type {2}.'.
format(sync_method, pref_name, result['type'])
)
result['sync_method'] = sync_method
return [result]
def main():
"""Main script code"""
parser = argparse.ArgumentParser(
description='Generates relevant c++ files from vivalid_prefs.json and chrome_prefs.json')
parser.add_argument('--prefs-definitions', type=argparse.FileType('r'))
parser.add_argument('--prefs-names-h', type=argparse.FileType('w'))
parser.add_argument('--prefs-names-cc', type=argparse.FileType('w'))
parser.add_argument('--prefs-enums-h', type=argparse.FileType('w'))
parser.add_argument('--root')
parser.add_argument('--target-os')
args = parser.parse_args()
prefs_definitions = json.load(args.prefs_definitions)
prefs_list = generate_prefs_list('vivaldi', prefs_definitions['vivaldi'])
prefs_list.sort(key=lambda i: i["name"])
if not re.match('^(android|linux|mac|win)$', args.target_os):
raise TypeError('unsupported target-os - ' + args.target_os)
mergeable_lists = [
pref['name'] for pref in prefs_list
if pref['type'] == 'list' and pref['syncable'] and
pref['sync_method'] == 'merge'
]
top_comments = PREFS_GEN_FILE_HEAD_TEMPLATE % {
"origin_name": args.prefs_definitions.name
}
relative_prefs_names_header = os.path.relpath(os.path.normpath(
args.prefs_names_h.name), args.root)
relative_prefs_enums_header = os.path.relpath(os.path.normpath(
args.prefs_enums_h.name), args.root)
prefs_names_header_guard = '_'.join(
[part.upper() for part in re.split('[./\\\\]+', relative_prefs_names_header)]) + '_'
prefs_enums_header_guard = '_'.join(
[part.upper() for part in re.split('[./\\\\]+', relative_prefs_enums_header)]) + '_'
args.prefs_names_h.write(PREFS_NAMES_H_TEMPLATE % {
"top_comments": top_comments,
"header_guard": prefs_names_header_guard,
"pref_names":
'\n'.join(['%(comment)sextern const char %(name)s[];' % {
'name': pref['name'],
'comment':
('// '+ pref['comment'] + '\n') if pref['comment'] else ''
} for pref in prefs_list]),
'mergeable_lists_count': len(mergeable_lists),
})
args.prefs_names_cc.write(PREFS_NAMES_CC_TEMPLATE % {
"top_comments": top_comments,
"header_name": relative_prefs_names_header,
"pref_constants":
'\n'.join(['const char %(name)s[] = "%(path)s";' % {
'name': pref['name'],
'path': pref['path']
} for pref in prefs_list]),
"target_os": args.target_os,
'mergeable_lists': ''.join([
' base::StringPiece(%s),\n' % (list_name,)
for list_name in mergeable_lists
]),
})
args.prefs_enums_h.write(PREFS_ENUMS_H_TEMPLATE % {
"top_comments": top_comments,
"header_guard": prefs_enums_header_guard,
"pref_enums":
'\n'.join([ENUM_DEFINITION_TEMPLATE % {
'enum_name': pref['enum'],
'enum_values': '\n'.join([ENUM_VALUE_TEMPLATE % {
'name': ''.join([part.capitalize()
for part in name.split("_")]),
'value': value
} for name, value in pref['enum_values'].items()])
} for pref in prefs_list if 'enum' in pref])
})
if __name__ == "__main__":
main()
| |
#!/usr/bin/env python
__author__ = 'Stephen P. Henrie'
import unittest
from mock import Mock, patch
from pyon.util.unit_test import PyonTestCase
from pyon.util.int_test import IonIntegrationTestCase
from nose.plugins.attrib import attr
from pyon.core.exception import BadRequest, Conflict, Inconsistent, NotFound
from pyon.public import PRED, RT, IonObject, OT
from ion.services.coi.policy_management_service import PolicyManagementService
from interface.services.coi.ipolicy_management_service import PolicyManagementServiceClient
@attr('UNIT', group='coi')
class TestPolicyManagementService(PyonTestCase):
def setUp(self):
mock_clients = self._create_service_mock('policy_management')
self.policy_management_service = PolicyManagementService()
self.policy_management_service.clients = mock_clients
# Rename to save some typing
self.mock_create = mock_clients.resource_registry.create
self.mock_read = mock_clients.resource_registry.read
self.mock_update = mock_clients.resource_registry.update
self.mock_delete = mock_clients.resource_registry.delete
self.mock_create_association = mock_clients.resource_registry.create_association
self.mock_delete_association = mock_clients.resource_registry.delete_association
self.mock_find_objects = mock_clients.resource_registry.find_objects
self.mock_find_resources = mock_clients.resource_registry.find_resources
self.mock_find_subjects = mock_clients.resource_registry.find_subjects
# Policy
self.policy = Mock()
self.policy.name = "Foo"
self.policy.description ="This is a test policy"
self.policy.policy_type.policy_rule = '<Rule id="%s"> <description>%s</description></Rule>'
# UserRole
self.user_role = Mock()
self.user_role.name = 'COI Test Administrator'
self.user_role.governance_name = 'COI_Test_Administrator'
# Resource
self.resource = Mock()
self.resource._id = '123'
self.resource.name = "Foo"
# Policy to Resource association
self.policy_to_resource_association = Mock()
self.policy_to_resource_association._id = '555'
self.policy_to_resource_association.s = "123"
self.policy_to_resource_association.st = RT.Resource
self.policy_to_resource_association.p = PRED.hasPolicy
self.policy_to_resource_association.o = "111"
self.policy_to_resource_association.ot = RT.Policy
def test_create_policy(self):
self.mock_create.return_value = ['111', 1]
policy_id = self.policy_management_service.create_policy(self.policy)
assert policy_id == '111'
self.mock_create.assert_called_once_with(self.policy)
def test_read_and_update_policy(self):
self.mock_read.return_value = self.policy
policy = self.policy_management_service.read_policy('111')
assert policy is self.mock_read.return_value
self.mock_read.assert_called_once_with('111', '')
policy.name = 'Bar'
self.mock_update.return_value = ['111', 2]
self.policy_management_service.update_policy(policy)
self.mock_update.assert_called_once_with(policy)
def test_delete_policy(self):
self.mock_read.return_value = self.policy
self.mock_find_subjects.return_value = ([self.resource], [self.policy_to_resource_association])
self.policy_management_service.delete_policy('111')
self.mock_delete.assert_called_once_with('111')
def test_read_policy_not_found(self):
self.mock_read.return_value = None
# TEST: Execute the service operation call
with self.assertRaises(NotFound) as cm:
self.policy_management_service.read_policy('bad')
ex = cm.exception
self.assertEqual(ex.message, 'Policy bad does not exist')
self.mock_read.assert_called_once_with('bad', '')
def test_delete_policy_not_found(self):
self.mock_read.return_value = None
# TEST: Execute the service operation call
with self.assertRaises(NotFound) as cm:
self.policy_management_service.delete_policy('bad')
ex = cm.exception
self.assertEqual(ex.message, 'Policy bad does not exist')
self.mock_read.assert_called_once_with('bad', '')
def test_create_user_role(self):
self.mock_create.return_value = ['123', 1]
role_id = self.policy_management_service.create_role(self.user_role)
assert role_id == '123'
self.mock_create.assert_called_once_with(self.user_role)
def test_read_and_update_user_role(self):
self.mock_read.return_value = self.user_role
user_role = self.policy_management_service.read_role('123')
assert user_role is self.mock_read.return_value
self.mock_read.assert_called_once_with('123', '')
user_role.name = 'New_test_Admin'
self.mock_update.return_value = ['123', 2]
self.policy_management_service.update_role(user_role)
self.mock_update.assert_called_once_with(user_role)
def test_delete_user_role(self):
self.mock_read.return_value = self.user_role
self.mock_find_subjects.return_value = ([], [])
self.policy_management_service.delete_role('123')
self.mock_delete.assert_called_once_with('123')
def test_read_user_role_not_found(self):
self.mock_read.return_value = None
# TEST: Execute the service operation call
with self.assertRaises(NotFound) as cm:
self.policy_management_service.read_role('bad role')
ex = cm.exception
self.assertEqual(ex.message, 'Role bad role does not exist')
self.mock_read.assert_called_once_with('bad role', '')
def test_delete_user_role_not_found(self):
self.mock_read.return_value = None
# TEST: Execute the service operation call
with self.assertRaises(NotFound) as cm:
self.policy_management_service.delete_role('bad role')
ex = cm.exception
self.assertEqual(ex.message, 'Role bad role does not exist')
self.mock_read.assert_called_once_with('bad role', '')
@attr('INT', group='coi')
class TestPolicyManagementServiceInt(IonIntegrationTestCase):
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2coi.yml')
self.policy_management_service = PolicyManagementServiceClient(node=self.container.node)
def test_policy_crud(self):
res_policy_obj = IonObject(OT.ResourceAccessPolicy, policy_rule='<Rule id="%s"> <description>%s</description></Rule>')
policy_obj = IonObject(RT.Policy, name='Test_Policy',
description='This is a test policy',
policy_type=res_policy_obj)
policy_obj.name = ' '
with self.assertRaises(BadRequest):
self.policy_management_service.create_policy(policy_obj)
policy_obj.name = 'Test_Policy'
policy_id = self.policy_management_service.create_policy(policy_obj)
self.assertNotEqual(policy_id, None)
with self.assertRaises(BadRequest):
self.policy_management_service.read_policy()
policy = None
policy = self.policy_management_service.read_policy(policy_id)
self.assertNotEqual(policy, None)
policy.name = ' '
with self.assertRaises(BadRequest):
self.policy_management_service.update_policy(policy)
policy.name = 'Updated_Test_Policy'
self.policy_management_service.update_policy(policy)
policy = None
policy = self.policy_management_service.read_policy(policy_id)
self.assertNotEqual(policy, None)
self.assertEqual(policy.name, 'Updated_Test_Policy')
with self.assertRaises(BadRequest):
self.policy_management_service.create_resource_access_policy()
with self.assertRaises(BadRequest):
self.policy_management_service.create_resource_access_policy(policy_id)
with self.assertRaises(BadRequest):
self.policy_management_service.create_resource_access_policy(policy_id, policy.name)
with self.assertRaises(BadRequest):
self.policy_management_service.create_resource_access_policy(policy_id, policy.name, "description")
#p_id = self.policy_management_service.create_resource_access_policy(policy_id, "Resource_access_name", "Policy Description", "Test_Rule")
#self.assertNotEqual(p_id, None)
with self.assertRaises(BadRequest):
self.policy_management_service.create_service_access_policy()
with self.assertRaises(BadRequest):
self.policy_management_service.create_service_access_policy(service_name="service_name")
with self.assertRaises(BadRequest):
self.policy_management_service.create_service_access_policy(service_name="service_name", policy_name="policy_name")
with self.assertRaises(BadRequest):
self.policy_management_service.create_service_access_policy(service_name="service_name", policy_name="policy_name", description="description")
#p_obj = self.policy_management_service.create_service_access_policy("service_name", "policy_name", "description", "policy_rule")
#self.assertNotEqual(p_obj, None)
with self.assertRaises(BadRequest):
self.policy_management_service.create_common_service_access_policy()
with self.assertRaises(BadRequest):
self.policy_management_service.create_common_service_access_policy(policy_name="policy_name")
with self.assertRaises(BadRequest):
self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description")
#p_id = self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description", policy_rule="test_rule")
#self.assertNotEqual(p_id, None)
with self.assertRaises(BadRequest):
self.policy_management_service.add_process_operation_precondition_policy()
with self.assertRaises(BadRequest):
self.policy_management_service.add_process_operation_precondition_policy(process_name="process_name")
with self.assertRaises(BadRequest):
self.policy_management_service.add_process_operation_precondition_policy(process_name="process_name", op="op")
self.policy_management_service.enable_policy(policy_id)
self.policy_management_service.enable_policy(policy_id)
with self.assertRaises(BadRequest):
self.policy_management_service.delete_policy()
self.policy_management_service.delete_policy(policy_id)
with self.assertRaises(NotFound) as cm:
self.policy_management_service.read_policy(policy_id)
self.assertIn("does not exist", cm.exception.message)
with self.assertRaises(NotFound) as cm:
self.policy_management_service.delete_policy(policy_id)
self.assertIn("does not exist", cm.exception.message)
def test_role_crud(self):
with self.assertRaises(BadRequest) as br:
self.policy_management_service.create_role(IonObject("UserRole", {"name": "Test User Role", "governance_name": "Test User Role"}))
self.assertTrue("can only contain alphanumeric and underscore characters" in br.exception.message)
user_role_obj = IonObject("UserRole", {"name": "Test User Role"})
user_role_id = self.policy_management_service.create_role(user_role_obj)
self.assertNotEqual(user_role_id, None)
user_role = None
user_role = self.policy_management_service.read_role(user_role_id)
self.assertNotEqual(user_role, None)
self.assertEqual(user_role.governance_name, 'Test_User_Role')
user_role.name = 'Test User Role 2'
self.policy_management_service.update_role(user_role)
user_role = None
user_role = self.policy_management_service.read_role(user_role_id)
self.assertNotEqual(user_role, None)
self.assertEqual(user_role.name, 'Test User Role 2')
self.assertEqual(user_role.governance_name, 'Test_User_Role')
self.policy_management_service.delete_role(user_role_id)
with self.assertRaises(NotFound) as cm:
self.policy_management_service.read_role(user_role_id)
self.assertIn("does not exist", cm.exception.message)
with self.assertRaises(NotFound) as cm:
self.policy_management_service.delete_role(user_role_id)
self.assertIn("does not exist", cm.exception.message)
| |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from collections import OrderedDict
from bokeh.util.future import collections_abc # goes away with py2
Iterable = collections_abc.Iterable # NOQA
Sequence = collections_abc.Sequence # NOQA
import difflib
import itertools
import re
import textwrap
import warnings
# External imports
import numpy as np
import sys
from six import string_types, reraise
# Bokeh imports
from ..models import (
BoxSelectTool, BoxZoomTool, CategoricalAxis, MercatorAxis,
TapTool, CrosshairTool, DataRange1d, DatetimeAxis,
FactorRange, Grid, HelpTool, HoverTool, LassoSelectTool, Legend, LegendItem, LinearAxis,
LogAxis, PanTool, ZoomInTool, ZoomOutTool, PolySelectTool, ContinuousTicker,
SaveTool, Range, Range1d, UndoTool, RedoTool, ResetTool, Tool,
WheelPanTool, WheelZoomTool, ColumnarDataSource, ColumnDataSource,
LogScale, LinearScale, CategoricalScale, Circle, MultiLine,
BoxEditTool, PointDrawTool, PolyDrawTool, PolyEditTool,
)
from ..models.markers import Marker
from ..models.renderers import GlyphRenderer
from ..core.properties import ColorSpec, Datetime, value, field
from ..transform import stack
from ..util.dependencies import import_optional
from ..util.string import nice_join
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
pd = import_optional('pandas')
__all__ = (
'get_default_color',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
def get_default_color(plot=None):
colors = [
"#1f77b4",
"#ff7f0e", "#ffbb78",
"#2ca02c", "#98df8a",
"#d62728", "#ff9896",
"#9467bd", "#c5b0d5",
"#8c564b", "#c49c94",
"#e377c2", "#f7b6d2",
"#7f7f7f",
"#bcbd22", "#dbdb8d",
"#17becf", "#9edae5"
]
if plot:
renderers = plot.renderers
renderers = [x for x in renderers if x.__view_model__ == "GlyphRenderer"]
num_renderers = len(renderers)
return colors[num_renderers]
else:
return colors[0]
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def _stack(stackers, spec0, spec1, **kw):
for name in (spec0, spec1):
if name in kw:
raise ValueError("Stack property '%s' cannot appear in keyword args" % name)
lengths = { len(x) for x in kw.values() if isinstance(x, (list, tuple)) }
# lengths will be empty if there are no kwargs supplied at all
if len(lengths) > 0:
if len(lengths) != 1:
raise ValueError("Keyword argument sequences for broadcasting must all be the same lengths. Got lengths: %r" % sorted(list(lengths)))
if lengths.pop() != len(stackers):
raise ValueError("Keyword argument sequences for broadcasting must be the same length as stackers")
s0 = []
s1 = []
_kw = []
for i, val in enumerate(stackers):
d = {'name': val}
s0 = list(s1)
s1.append(val)
d[spec0] = stack(*s0)
d[spec1] = stack(*s1)
for k, v in kw.items():
if isinstance(v, (list, tuple)):
d[k] = v[i]
else:
d[k] = v
_kw.append(d)
return _kw
def _graph(node_source, edge_source, **kwargs):
if not isinstance(node_source, ColumnarDataSource):
try:
# try converting the soruce to ColumnDataSource
node_source = ColumnDataSource(node_source)
except ValueError as err:
msg = "Failed to auto-convert {curr_type} to ColumnDataSource.\n Original error: {err}".format(
curr_type=str(type(node_source)),
err=err.message
)
reraise(ValueError, ValueError(msg), sys.exc_info()[2])
if not isinstance(edge_source, ColumnarDataSource):
try:
# try converting the soruce to ColumnDataSource
edge_source = ColumnDataSource(edge_source)
except ValueError as err:
msg = "Failed to auto-convert {curr_type} to ColumnDataSource.\n Original error: {err}".format(
curr_type=str(type(edge_source)),
err=err.message
)
reraise(ValueError, ValueError(msg), sys.exc_info()[2])
## node stuff
if any(x.startswith('node_selection_') for x in kwargs):
snode_ca = _pop_colors_and_alpha(Circle, kwargs, prefix="node_selection_")
else:
snode_ca = None
if any(x.startswith('node_hover_') for x in kwargs):
hnode_ca = _pop_colors_and_alpha(Circle, kwargs, prefix="node_hover_")
else:
hnode_ca = None
if any(x.startswith('node_muted_') for x in kwargs):
mnode_ca = _pop_colors_and_alpha(Circle, kwargs, prefix="node_muted_")
else:
mnode_ca = None
nsnode_ca = _pop_colors_and_alpha(Circle, kwargs, prefix="node_nonselection_")
node_ca = _pop_colors_and_alpha(Circle, kwargs, prefix="node_")
## edge stuff
if any(x.startswith('edge_selection_') for x in kwargs):
sedge_ca = _pop_colors_and_alpha(MultiLine, kwargs, prefix="edge_selection_")
else:
sedge_ca = None
if any(x.startswith('edge_hover_') for x in kwargs):
hedge_ca = _pop_colors_and_alpha(MultiLine, kwargs, prefix="edge_hover_")
else:
hedge_ca = None
if any(x.startswith('edge_muted_') for x in kwargs):
medge_ca = _pop_colors_and_alpha(MultiLine, kwargs, prefix="edge_muted_")
else:
medge_ca = None
nsedge_ca = _pop_colors_and_alpha(MultiLine, kwargs, prefix="edge_nonselection_")
edge_ca = _pop_colors_and_alpha(MultiLine, kwargs, prefix="edge_")
## node stuff
node_kwargs = {k.lstrip('node_'): v for k, v in kwargs.copy().items() if k.lstrip('node_') in Circle.properties()}
node_glyph = _make_glyph(Circle, node_kwargs, node_ca)
nsnode_glyph = _make_glyph(Circle, node_kwargs, nsnode_ca)
snode_glyph = _make_glyph(Circle, node_kwargs, snode_ca)
hnode_glyph = _make_glyph(Circle, node_kwargs, hnode_ca)
mnode_glyph = _make_glyph(Circle, node_kwargs, mnode_ca)
node_renderer = GlyphRenderer(glyph=node_glyph,
nonselection_glyph=nsnode_glyph,
selection_glyph=snode_glyph,
hover_glyph=hnode_glyph,
muted_glyph=mnode_glyph,
data_source=node_source)
## edge stuff
edge_kwargs = {k.lstrip('edge_'): v for k, v in kwargs.copy().items() if k.lstrip('edge_') in MultiLine.properties()}
edge_glyph = _make_glyph(MultiLine, edge_kwargs, edge_ca)
nsedge_glyph = _make_glyph(MultiLine, edge_kwargs, nsedge_ca)
sedge_glyph = _make_glyph(MultiLine, edge_kwargs, sedge_ca)
hedge_glyph = _make_glyph(MultiLine, edge_kwargs, hedge_ca)
medge_glyph = _make_glyph(MultiLine, edge_kwargs, medge_ca)
edge_renderer = GlyphRenderer(glyph=edge_glyph,
nonselection_glyph=nsedge_glyph,
selection_glyph=sedge_glyph,
hover_glyph=hedge_glyph,
muted_glyph=medge_glyph,
data_source=edge_source)
_RENDERER_ARGS = ['name', 'level', 'visible', 'x_range_name', 'y_range_name',
'selection_policy', 'inspection_policy']
renderer_kwargs = {attr: kwargs.pop(attr) for attr in _RENDERER_ARGS if attr in kwargs}
renderer_kwargs["node_renderer"] = node_renderer
renderer_kwargs["edge_renderer"] = edge_renderer
return renderer_kwargs
_RENDERER_ARGS = ['name', 'x_range_name', 'y_range_name',
'level', 'view', 'visible', 'muted']
def _pop_renderer_args(kwargs):
result = {attr: kwargs.pop(attr)
for attr in _RENDERER_ARGS
if attr in kwargs}
result['data_source'] = kwargs.pop('source', ColumnDataSource())
return result
def _pop_colors_and_alpha(glyphclass, kwargs, prefix="", default_alpha=1.0):
"""
Given a kwargs dict, a prefix, and a default value, looks for different
color and alpha fields of the given prefix, and fills in the default value
if it doesn't exist.
"""
result = dict()
# TODO: The need to do this and the complexity of managing this kind of
# thing throughout the codebase really suggests that we need to have
# a real stylesheet class, where defaults and Types can declaratively
# substitute for this kind of imperative logic.
color = kwargs.pop(prefix + "color", get_default_color())
for argname in ("fill_color", "line_color"):
if argname not in glyphclass.properties():
continue
result[argname] = kwargs.pop(prefix + argname, color)
# NOTE: text fill color should really always default to black, hard coding
# this here now until the stylesheet solution exists
if "text_color" in glyphclass.properties():
result["text_color"] = kwargs.pop(prefix + "text_color", "black")
alpha = kwargs.pop(prefix + "alpha", default_alpha)
for argname in ("fill_alpha", "line_alpha", "text_alpha"):
if argname not in glyphclass.properties():
continue
result[argname] = kwargs.pop(prefix + argname, alpha)
return result
def _get_legend_item_label(kwargs):
legend = kwargs.pop('legend', None)
source = kwargs.get('source')
legend_item_label = None
if legend:
if isinstance(legend, string_types):
# Do the simple thing first
legend_item_label = value(legend)
# But if there's a source - try and do something smart
if source is not None and hasattr(source, 'column_names'):
if legend in source.column_names:
legend_item_label = field(legend)
else:
legend_item_label = legend
return legend_item_label
_GLYPH_SOURCE_MSG = """
Expected %s to reference fields in the supplied data source.
When a 'source' argument is passed to a glyph method, values that are sequences
(like lists or arrays) must come from references to data columns in the source.
For instance, as an example:
source = ColumnDataSource(data=dict(x=a_list, y=an_array))
p.circle(x='x', y='y', source=source, ...) # pass column names and a source
Alternatively, *all* data sequences may be provided as literals as long as a
source is *not* provided:
p.circle(x=a_list, y=an_array, ...) # pass actual sequences and no source
"""
def _process_sequence_literals(glyphclass, kwargs, source, is_user_source):
incompatible_literal_spec_values = []
dataspecs = glyphclass.dataspecs_with_props()
for var, val in kwargs.items():
# ignore things that are not iterable
if not isinstance(val, Iterable):
continue
# pass dicts (i.e., values or fields) on as-is
if isinstance(val, dict):
continue
# let any non-dataspecs do their own validation (e.g., line_dash properties)
if var not in dataspecs:
continue
# strings sequences are handled by the dataspec as-is
if isinstance(val, string_types):
continue
# similarly colorspecs handle color tuple sequences as-is
if (isinstance(dataspecs[var].property, ColorSpec) and isinstance(val, tuple)):
continue
if isinstance(val, np.ndarray) and val.ndim != 1:
raise RuntimeError("Columns need to be 1D (%s is not)" % var)
if is_user_source:
incompatible_literal_spec_values.append(var)
else:
source.add(val, name=var)
kwargs[var] = var
return incompatible_literal_spec_values
def _make_glyph(glyphclass, kws, extra):
if extra is None:
return None
kws = kws.copy()
kws.update(extra)
return glyphclass(**kws)
def _update_legend(plot, legend_item_label, glyph_renderer):
# Get the plot's legend
legends = plot.select(type=Legend)
if not legends:
legend = Legend()
plot.add_layout(legend)
elif len(legends) == 1:
legend = legends[0]
else:
raise RuntimeError("Plot %s configured with more than one legend renderer" % plot)
# If there is an existing legend with a matching label, then put the
# renderer on that (if the source matches). Otherwise add a new one.
added = False
for item in legend.items:
if item.label == legend_item_label:
if item.label.get('value'):
item.renderers.append(glyph_renderer)
added = True
break
if item.label.get('field') and \
glyph_renderer.data_source is item.renderers[0].data_source:
item.renderers.append(glyph_renderer)
added = True
break
if not added:
new_item = LegendItem(label=legend_item_label, renderers=[glyph_renderer])
legend.items.append(new_item)
def _get_range(range_input):
if range_input is None:
return DataRange1d()
if pd and isinstance(range_input, pd.core.groupby.GroupBy):
return FactorRange(factors=sorted(list(range_input.groups.keys())))
if isinstance(range_input, Range):
return range_input
if pd and isinstance(range_input, pd.Series):
range_input = range_input.values
if isinstance(range_input, (Sequence, np.ndarray)):
if all(isinstance(x, string_types) for x in range_input):
return FactorRange(factors=list(range_input))
if len(range_input) == 2:
try:
return Range1d(start=range_input[0], end=range_input[1])
except ValueError: # @mattpap suggests ValidationError instead
pass
raise ValueError("Unrecognized range input: '%s'" % str(range_input))
def _get_scale(range_input, axis_type):
if isinstance(range_input, (DataRange1d, Range1d)) and axis_type in ["linear", "datetime", "mercator", "auto", None]:
return LinearScale()
elif isinstance(range_input, (DataRange1d, Range1d)) and axis_type == "log":
return LogScale()
elif isinstance(range_input, FactorRange):
return CategoricalScale()
else:
raise ValueError("Unable to determine proper scale for: '%s'" % str(range_input))
def _get_axis_class(axis_type, range_input, dim):
if axis_type is None:
return None, {}
elif axis_type == "linear":
return LinearAxis, {}
elif axis_type == "log":
return LogAxis, {}
elif axis_type == "datetime":
return DatetimeAxis, {}
elif axis_type == "mercator":
return MercatorAxis, {'dimension': 'lon' if dim == 0 else 'lat'}
elif axis_type == "auto":
if isinstance(range_input, FactorRange):
return CategoricalAxis, {}
elif isinstance(range_input, Range1d):
try:
# Easier way to validate type of Range1d parameters
Datetime.validate(Datetime(), range_input.start)
return DatetimeAxis, {}
except ValueError:
pass
return LinearAxis, {}
else:
raise ValueError("Unrecognized axis_type: '%r'" % axis_type)
def _get_num_minor_ticks(axis_class, num_minor_ticks):
if isinstance(num_minor_ticks, int):
if num_minor_ticks <= 1:
raise ValueError("num_minor_ticks must be > 1")
return num_minor_ticks
if num_minor_ticks is None:
return 0
if num_minor_ticks == 'auto':
if axis_class is LogAxis:
return 10
return 5
_known_tools = {
"pan": lambda: PanTool(dimensions='both'),
"xpan": lambda: PanTool(dimensions='width'),
"ypan": lambda: PanTool(dimensions='height'),
"xwheel_pan": lambda: WheelPanTool(dimension="width"),
"ywheel_pan": lambda: WheelPanTool(dimension="height"),
"wheel_zoom": lambda: WheelZoomTool(dimensions='both'),
"xwheel_zoom": lambda: WheelZoomTool(dimensions='width'),
"ywheel_zoom": lambda: WheelZoomTool(dimensions='height'),
"zoom_in": lambda: ZoomInTool(dimensions='both'),
"xzoom_in": lambda: ZoomInTool(dimensions='width'),
"yzoom_in": lambda: ZoomInTool(dimensions='height'),
"zoom_out": lambda: ZoomOutTool(dimensions='both'),
"xzoom_out": lambda: ZoomOutTool(dimensions='width'),
"yzoom_out": lambda: ZoomOutTool(dimensions='height'),
"click": lambda: TapTool(behavior="inspect"),
"tap": lambda: TapTool(),
"crosshair": lambda: CrosshairTool(),
"box_select": lambda: BoxSelectTool(),
"xbox_select": lambda: BoxSelectTool(dimensions='width'),
"ybox_select": lambda: BoxSelectTool(dimensions='height'),
"poly_select": lambda: PolySelectTool(),
"lasso_select": lambda: LassoSelectTool(),
"box_zoom": lambda: BoxZoomTool(dimensions='both'),
"xbox_zoom": lambda: BoxZoomTool(dimensions='width'),
"ybox_zoom": lambda: BoxZoomTool(dimensions='height'),
"hover": lambda: HoverTool(tooltips=[
("index", "$index"),
("data (x, y)", "($x, $y)"),
("screen (x, y)", "($sx, $sy)"),
]),
"save": lambda: SaveTool(),
"previewsave": "save",
"undo": lambda: UndoTool(),
"redo": lambda: RedoTool(),
"reset": lambda: ResetTool(),
"help": lambda: HelpTool(),
"box_edit": lambda: BoxEditTool(),
"point_draw": lambda: PointDrawTool(),
"poly_draw": lambda: PolyDrawTool(),
"poly_edit": lambda: PolyEditTool()
}
def _tool_from_string(name):
""" Takes a string and returns a corresponding `Tool` instance. """
known_tools = sorted(_known_tools.keys())
if name in known_tools:
tool_fn = _known_tools[name]
if isinstance(tool_fn, string_types):
tool_fn = _known_tools[tool_fn]
return tool_fn()
else:
matches, text = difflib.get_close_matches(name.lower(), known_tools), "similar"
if not matches:
matches, text = known_tools, "possible"
raise ValueError("unexpected tool name '%s', %s tools are %s" % (name, text, nice_join(matches)))
def _process_axis_and_grid(plot, axis_type, axis_location, minor_ticks, axis_label, rng, dim):
axiscls, axiskw = _get_axis_class(axis_type, rng, dim)
if axiscls:
axis = axiscls(**axiskw)
if isinstance(axis.ticker, ContinuousTicker):
axis.ticker.num_minor_ticks = _get_num_minor_ticks(axiscls, minor_ticks)
axis_label = axis_label
if axis_label:
axis.axis_label = axis_label
grid = Grid(dimension=dim, ticker=axis.ticker)
plot.add_layout(grid, "center")
if axis_location is not None:
getattr(plot, axis_location).append(axis)
def _process_tools_arg(plot, tools, tooltips=None):
""" Adds tools to the plot object
Args:
plot (Plot): instance of a plot object
tools (seq[Tool or str]|str): list of tool types or string listing the
tool names. Those are converted using the _tool_from_string
function. I.e.: `wheel_zoom,box_zoom,reset`.
tooltips (string or seq[tuple[str, str]], optional):
tooltips to use to configure a HoverTool
Returns:
list of Tools objects added to plot, map of supplied string names to tools
"""
tool_objs = []
tool_map = {}
temp_tool_str = ""
repeated_tools = []
if isinstance(tools, (list, tuple)):
for tool in tools:
if isinstance(tool, Tool):
tool_objs.append(tool)
elif isinstance(tool, string_types):
temp_tool_str += tool + ','
else:
raise ValueError("tool should be a string or an instance of Tool class")
tools = temp_tool_str
for tool in re.split(r"\s*,\s*", tools.strip()):
# re.split will return empty strings; ignore them.
if tool == "":
continue
tool_obj = _tool_from_string(tool)
tool_objs.append(tool_obj)
tool_map[tool] = tool_obj
for typename, group in itertools.groupby(
sorted(tool.__class__.__name__ for tool in tool_objs)):
if len(list(group)) > 1:
repeated_tools.append(typename)
if repeated_tools:
warnings.warn("%s are being repeated" % ",".join(repeated_tools))
if tooltips is not None:
for tool_obj in tool_objs:
if isinstance(tool_obj, HoverTool):
tool_obj.tooltips = tooltips
break
else:
tool_objs.append(HoverTool(tooltips=tooltips))
return tool_objs, tool_map
def _process_active_tools(toolbar, tool_map, active_drag, active_inspect, active_scroll, active_tap):
""" Adds tools to the plot object
Args:
toolbar (Toolbar): instance of a Toolbar object
tools_map (dict[str]|Tool): tool_map from _process_tools_arg
active_drag (str or Tool): the tool to set active for drag
active_inspect (str or Tool): the tool to set active for inspect
active_scroll (str or Tool): the tool to set active for scroll
active_tap (str or Tool): the tool to set active for tap
Returns:
None
Note:
This function sets properties on Toolbar
"""
if active_drag in ['auto', None] or isinstance(active_drag, Tool):
toolbar.active_drag = active_drag
elif active_drag in tool_map:
toolbar.active_drag = tool_map[active_drag]
else:
raise ValueError("Got unknown %r for 'active_drag', which was not a string supplied in 'tools' argument" % active_drag)
if active_inspect in ['auto', None] or isinstance(active_inspect, Tool) or all(isinstance(t, Tool) for t in active_inspect):
toolbar.active_inspect = active_inspect
elif active_inspect in tool_map:
toolbar.active_inspect = tool_map[active_inspect]
else:
raise ValueError("Got unknown %r for 'active_inspect', which was not a string supplied in 'tools' argument" % active_scroll)
if active_scroll in ['auto', None] or isinstance(active_scroll, Tool):
toolbar.active_scroll = active_scroll
elif active_scroll in tool_map:
toolbar.active_scroll = tool_map[active_scroll]
else:
raise ValueError("Got unknown %r for 'active_scroll', which was not a string supplied in 'tools' argument" % active_scroll)
if active_tap in ['auto', None] or isinstance(active_tap, Tool):
toolbar.active_tap = active_tap
elif active_tap in tool_map:
toolbar.active_tap = tool_map[active_tap]
else:
raise ValueError("Got unknown %r for 'active_tap', which was not a string supplied in 'tools' argument" % active_tap)
def _get_argspecs(glyphclass):
argspecs = OrderedDict()
for arg in glyphclass._args:
spec = {}
descriptor = getattr(glyphclass, arg)
# running python with -OO will discard docstrings -> __doc__ is None
if descriptor.__doc__:
spec['desc'] = "\n ".join(textwrap.dedent(descriptor.__doc__).split("\n"))
else:
spec['desc'] = ""
spec['default'] = descriptor.class_default(glyphclass)
spec['type'] = descriptor.property._sphinx_type()
argspecs[arg] = spec
return argspecs
# This template generates the following:
#
# def foo(self, x, y=10, kwargs):
# kwargs['x'] = x
# kwargs['y'] = y
# return func(self, **kwargs)
_sigfunc_template = """
def %s(self, %s, **kwargs):
%s
return func(self, **kwargs)
"""
def _get_sigfunc(func_name, func, argspecs):
# This code is to wrap the generic func(*args, **kw) glyph method so that
# a much better signature is available to users. E.g., for ``square`` we have:
#
# Signature: p.square(x, y, size=4, angle=0.0, **kwargs)
#
# which provides descriptive names for positional args, as well as any defaults
func_args_with_defaults = []
for arg, spec in argspecs.items():
if spec['default'] is None:
func_args_with_defaults.append(arg)
else:
func_args_with_defaults.append("%s=%r" % (arg, spec['default']))
args_text = ", ".join(func_args_with_defaults)
kwargs_assign_text = "\n".join(" kwargs[%r] = %s" % (x, x) for x in argspecs)
func_text = _sigfunc_template % (func_name, args_text, kwargs_assign_text)
func_code = compile(func_text, "fakesource", "exec")
func_globals = {}
eval(func_code, {"func": func}, func_globals)
return func_globals[func_name]
_arg_template = """ %s (%s) : %s
(default: %r)
"""
_doc_template = """ Configure and add :class:`~bokeh.models.%s.%s` glyphs to this Figure.
Args:
%s
Keyword Args:
%s
Other Parameters:
alpha (float) : an alias to set all alpha keyword args at once
color (Color) : an alias to set all color keyword args at once
source (ColumnDataSource) : a user supplied data source
legend (str) : a legend tag for this glyph
x_range_name (str) : name an extra range to use for mapping x-coordinates
y_range_name (str) : name an extra range to use for mapping y-coordinates
level (Enum) : control the render level order for this glyph
It is also possible to set the color and alpha parameters of a "nonselection"
glyph. To do so, prefix any visual parameter with ``'nonselection_'``.
For example, pass ``nonselection_alpha`` or ``nonselection_fill_alpha``.
Returns:
GlyphRenderer
"""
def _add_sigfunc_info(func, argspecs, glyphclass, extra_docs):
func.__name__ = glyphclass.__name__
omissions = {'js_event_callbacks', 'js_property_callbacks', 'subscribed_events'}
kwlines = []
kws = glyphclass.properties() - set(argspecs)
for kw in kws:
# these are not really useful, and should also really be private, just skip them
if kw in omissions: continue
descriptor = getattr(glyphclass, kw)
typ = descriptor.property._sphinx_type()
if descriptor.__doc__:
desc = "\n ".join(textwrap.dedent(descriptor.__doc__).split("\n"))
else:
desc = ""
kwlines.append(_arg_template % (kw, typ, desc, descriptor.class_default(glyphclass)))
extra_kws = getattr(glyphclass, '_extra_kws', {})
for kw, (typ, desc) in extra_kws.items():
kwlines.append(" %s (%s) : %s" % (kw, typ, desc))
kwlines.sort()
arglines = []
for arg, spec in argspecs.items():
arglines.append(_arg_template % (arg, spec['type'], spec['desc'], spec['default']))
mod = "markers" if issubclass(glyphclass, Marker) else "glyphs"
func.__doc__ = _doc_template % (mod, func.__name__, "\n".join(arglines), "\n".join(kwlines))
if extra_docs:
func.__doc__ += extra_docs
def _glyph_function(glyphclass, extra_docs=None):
def func(self, **kwargs):
# Convert data source, if necesary
is_user_source = kwargs.get('source', None) is not None
if is_user_source:
source = kwargs['source']
if not isinstance(source, ColumnarDataSource):
try:
# try converting the soruce to ColumnDataSource
source = ColumnDataSource(source)
except ValueError as err:
msg = "Failed to auto-convert {curr_type} to ColumnDataSource.\n Original error: {err}".format(
curr_type=str(type(source)),
err=err.message
)
reraise(ValueError, ValueError(msg), sys.exc_info()[2])
# update reddered_kws so that others can use the new source
kwargs['source'] = source
# Process legend kwargs and remove legend before we get going
legend_item_label = _get_legend_item_label(kwargs)
# Need to check if user source is present before _pop_renderer_args
renderer_kws = _pop_renderer_args(kwargs)
source = renderer_kws['data_source']
# Assign global_alpha from alpha if glyph type is an image
if 'alpha' in kwargs and glyphclass.__name__ in ('Image', 'ImageRGBA', 'ImageURL'):
kwargs['global_alpha'] = kwargs['alpha']
# handle the main glyph, need to process literals
glyph_ca = _pop_colors_and_alpha(glyphclass, kwargs)
incompatible_literal_spec_values = []
incompatible_literal_spec_values += _process_sequence_literals(glyphclass, kwargs, source, is_user_source)
incompatible_literal_spec_values += _process_sequence_literals(glyphclass, glyph_ca, source, is_user_source)
if incompatible_literal_spec_values:
raise RuntimeError(_GLYPH_SOURCE_MSG % nice_join(incompatible_literal_spec_values, conjuction="and"))
# handle the nonselection glyph, we always set one
nsglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='nonselection_', default_alpha=0.1)
# handle the selection glyph, if any properties were given
if any(x.startswith('selection_') for x in kwargs):
sglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='selection_')
else:
sglyph_ca = None
# handle the hover glyph, if any properties were given
if any(x.startswith('hover_') for x in kwargs):
hglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='hover_')
else:
hglyph_ca = None
# handle the mute glyph, if any properties were given
if any(x.startswith('muted_') for x in kwargs):
mglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='muted_')
else:
mglyph_ca = None
glyph = _make_glyph(glyphclass, kwargs, glyph_ca)
nsglyph = _make_glyph(glyphclass, kwargs, nsglyph_ca)
sglyph = _make_glyph(glyphclass, kwargs, sglyph_ca)
hglyph = _make_glyph(glyphclass, kwargs, hglyph_ca)
mglyph = _make_glyph(glyphclass, kwargs, mglyph_ca)
glyph_renderer = GlyphRenderer(glyph=glyph,
nonselection_glyph=nsglyph,
selection_glyph=sglyph,
hover_glyph=hglyph,
muted_glyph=mglyph,
**renderer_kws)
if legend_item_label:
_update_legend(self, legend_item_label, glyph_renderer)
self.renderers.append(glyph_renderer)
return glyph_renderer
argspecs = _get_argspecs(glyphclass)
sigfunc = _get_sigfunc(glyphclass.__name__.lower(), func, argspecs)
sigfunc.glyph_method = True
_add_sigfunc_info(sigfunc, argspecs, glyphclass, extra_docs)
return sigfunc
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| |
# -*- coding: utf-8 -*-
"""Copyright 2015 Roger R Labbe Jr.
Code supporting the book
Kalman and Bayesian Filters in Python
https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python
This is licensed under an MIT license. See the LICENSE.txt file
for more information.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from filterpy.monte_carlo import stratified_resample, residual_resample
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
from numpy.random import randn, random, uniform, multivariate_normal, seed
import scipy.stats
class ParticleFilter(object):
def __init__(self, N, x_dim, y_dim):
self.particles = np.empty((N, 3)) # x, y, heading
self.N = N
self.x_dim = x_dim
self.y_dim = y_dim
# distribute particles randomly with uniform weight
self.weights = np.empty(N)
self.weights.fill(1./N)
self.particles[:, 0] = uniform(0, x_dim, size=N)
self.particles[:, 1] = uniform(0, y_dim, size=N)
self.particles[:, 2] = uniform(0, 2*np.pi, size=N)
def predict(self, u, std):
""" move according to control input u with noise std"""
self.particles[:, 2] += u[0] + randn(self.N) * std[0]
self.particles[:, 2] %= 2 * np.pi
d = u[1] + randn(self.N)
self.particles[:, 0] += np.cos(self.particles[:, 2]) * d
self.particles[:, 1] += np.sin(self.particles[:, 2]) * d
self.particles[:, 0:2] += u + randn(self.N, 2) * std
def weight(self, z, var):
dist = np.sqrt((self.particles[:, 0] - z[0])**2 +
(self.particles[:, 1] - z[1])**2)
# simplification assumes variance is invariant to world projection
n = scipy.stats.norm(0, np.sqrt(var))
prob = n.pdf(dist)
# particles far from a measurement will give us 0.0 for a probability
# due to floating point limits. Once we hit zero we can never recover,
# so add some small nonzero value to all points.
prob += 1.e-12
self.weights += prob
self.weights /= sum(self.weights) # normalize
def neff(self):
return 1. / np.sum(np.square(self.weights))
def resample(self):
p = np.zeros((self.N, 3))
w = np.zeros(self.N)
cumsum = np.cumsum(self.weights)
for i in range(self.N):
index = np.searchsorted(cumsum, random())
p[i] = self.particles[index]
w[i] = self.weights[index]
self.particles = p
self.weights.fill(1.0 / self.N)
def estimate(self):
""" returns mean and variance """
pos = self.particles[:, 0:2]
mu = np.average(pos, weights=self.weights, axis=0)
var = np.average((pos - mu)**2, weights=self.weights, axis=0)
return mu, var
def plot_random_pd():
def norm(x, x0, sigma):
return np.exp(-0.5 * (x - x0) ** 2 / sigma ** 2)
def sigmoid(x, x0, alpha):
return 1. / (1. + np.exp(- (x - x0) / alpha))
x = np.linspace(0, 1, 100)
y2 = (0.1 * np.sin(norm(x, 0.2, 0.05)) + 0.25 * norm(x, 0.6, 0.05) +
.5*norm(x, .5, .08) +
np.sqrt(norm(x, 0.8, 0.06)) +0.1 * (1 - sigmoid(x, 0.45, 0.15)))
# hack because of bug `with plt.xkcd()` doesn't return context correctly
saved_state = mpl.rcParams.copy()
plt.xkcd()
plt.axes(xticks=[], yticks=[], frameon=False)
plt.plot(x, y2)
plt.ylim([0, max(y2)+.1])
mpl.rcParams.update(saved_state)
def plot_monte_carlo_ukf():
def f(x,y):
return x+y, .1*x**2 + y*y
mean = (0, 0)
p = np.array([[32, 15], [15., 40.]])
# Compute linearized mean
mean_fx = f(*mean)
#generate random points
xs, ys = multivariate_normal(mean=mean, cov=p, size=3000).T
fxs, fys = f(xs, ys)
plt.subplot(121)
plt.gca().grid(b=False)
plt.scatter(xs, ys, marker='.', alpha=.2, color='k')
plt.xlim(-25, 25)
plt.ylim(-25, 25)
plt.subplot(122)
plt.gca().grid(b=False)
plt.scatter(fxs, fys, marker='.', alpha=0.2, color='k')
plt.ylim([-10, 200])
plt.xlim([-100, 100])
plt.show()
def plot_pf(pf, xlim=100, ylim=100, weights=True):
if weights:
a = plt.subplot(221)
a.cla()
plt.xlim(0, ylim)
#plt.ylim(0, 1)
a.set_yticklabels('')
plt.scatter(pf.particles[:, 0], pf.weights, marker='.', s=1, color='k')
a.set_ylim(bottom=0)
a = plt.subplot(224)
a.cla()
a.set_xticklabels('')
plt.scatter(pf.weights, pf.particles[:, 1], marker='.', s=1, color='k')
plt.ylim(0, xlim)
a.set_xlim(left=0)
#plt.xlim(0, 1)
a = plt.subplot(223)
a.cla()
else:
plt.cla()
plt.scatter(pf.particles[:, 0], pf.particles[:, 1], marker='.', s=1, color='k')
plt.xlim(0, xlim)
plt.ylim(0, ylim)
def Gaussian(mu, sigma, x):
# calculates the probability of x for 1-dim Gaussian with mean mu and var. sigma
g = (np.exp(-((mu - x) ** 2) / (sigma ** 2) / 2.0) /
np.sqrt(2.0 * np.pi * (sigma ** 2)))
for i in range(len(g)):
g[i] = max(g[i], 1.e-29)
return g
def test_gaussian(N):
for i in range(N):
mean, std, x = randn(3)
std = abs(std)
d = Gaussian(mean, std, x) - scipy.stats.norm(mean, std).pdf(x)
assert abs(d) < 1.e-8, "{}, {}, {}, {}, {}, {}".format(d, mean, std, x, Gaussian(mean, std, x), scipy.stats.norm(mean, std).pdf(x))
def show_two_pf_plots():
""" Displays results of PF after 1 and 10 iterations for the book.
Note the book says this solves the full robot localization problem.
It doesn't bother simulating landmarks as this is just an illustration.
"""
seed(1234)
N = 3000
pf = ParticleFilter(N, 20, 20)
z = np.array([20, 20])
#plot(pf, weights=False)
for x in range(10):
z[0] = x+1 + randn()*0.3
z[1] = x+1 + randn()*0.3
pf.predict((1,1), (0.2, 0.2))
pf.weight(z=z, var=.8)
pf.resample()
if x == 0:
plt.subplot(121)
elif x == 9:
plt.subplot(122)
if x == 0 or x == 9:
mu, var = pf.estimate()
plot_pf(pf, 20, 20, weights=False)
if x == 0:
plt.scatter(mu[0], mu[1], color='g', s=100)
plt.scatter(x+1, x+1, marker='x', color='r', s=180, lw=3)
else:
plt.scatter(mu[0], mu[1], color='g', s=100, label="PF")
plt.scatter([x+1], [x+1], marker='x', color='r', s=180, label="True", lw=3)
plt.legend(scatterpoints=1)
plt.tight_layout()
def plot_cumsum(a):
fig = plt.figure()
N = len(a)
cmap = mpl.colors.ListedColormap([[0., .4, 1.],
[0., .8, 1.],
[1., .8, 0.],
[1., .4, 0.]]*(int(N/4) + 1))
cumsum = np.cumsum(np.asarray(a) / np.sum(a))
cumsum = np.insert(cumsum, 0, 0)
#fig = plt.figure(figsize=(6,3))
fig=plt.gcf()
ax = fig.add_axes([0.05, 0.475, 0.9, 0.15])
norm = mpl.colors.BoundaryNorm(cumsum, cmap.N)
bar = mpl.colorbar.ColorbarBase(ax, cmap=cmap,
norm=norm,
drawedges=False,
spacing='proportional',
orientation='horizontal')
if N > 10:
bar.set_ticks([])
def plot_stratified_resample(a):
N = len(a)
cmap = mpl.colors.ListedColormap([[0., .4, 1.],
[0., .8, 1.],
[1., .8, 0.],
[1., .4, 0.]]*(int(N/4) + 1))
cumsum = np.cumsum(np.asarray(a) / np.sum(a))
cumsum = np.insert(cumsum, 0, 0)
fig = plt.figure()
ax = plt.gcf().add_axes([0.05, 0.475, 0.9, 0.15])
norm = mpl.colors.BoundaryNorm(cumsum, cmap.N)
bar = mpl.colorbar.ColorbarBase(ax, cmap=cmap,
norm=norm,
drawedges=False,
spacing='proportional',
orientation='horizontal')
xs = np.linspace(0., 1.-1./N, N)
ax.vlines(xs, 0, 1, lw=2)
# make N subdivisions, and chose a random position within each one
b = (random(N) + range(N)) / N
plt.scatter(b, [.5]*len(b), s=60, facecolor='k', edgecolor='k')
bar.set_ticks([])
plt.title('stratified resampling')
def plot_systematic_resample(a):
N = len(a)
cmap = mpl.colors.ListedColormap([[0., .4, 1.],
[0., .8, 1.],
[1., .8, 0.],
[1., .4, 0.]]*(int(N/4) + 1))
cumsum = np.cumsum(np.asarray(a) / np.sum(a))
cumsum = np.insert(cumsum, 0, 0)
fig = plt.figure()
ax = plt.gcf().add_axes([0.05, 0.475, 0.9, 0.15])
norm = mpl.colors.BoundaryNorm(cumsum, cmap.N)
bar = mpl.colorbar.ColorbarBase(ax, cmap=cmap,
norm=norm,
drawedges=False,
spacing='proportional',
orientation='horizontal')
xs = np.linspace(0., 1.-1./N, N)
ax.vlines(xs, 0, 1, lw=2)
# make N subdivisions, and chose a random position within each one
b = (random() + np.array(range(N))) / N
plt.scatter(b, [.5]*len(b), s=60, facecolor='k', edgecolor='k')
bar.set_ticks([])
plt.title('systematic resampling')
def plot_multinomial_resample(a):
N = len(a)
cmap = mpl.colors.ListedColormap([[0., .4, 1.],
[0., .8, 1.],
[1., .8, 0.],
[1., .4, 0.]]*(int(N/4) + 1))
cumsum = np.cumsum(np.asarray(a) / np.sum(a))
cumsum = np.insert(cumsum, 0, 0)
fig = plt.figure()
ax = plt.gcf().add_axes([0.05, 0.475, 0.9, 0.15])
norm = mpl.colors.BoundaryNorm(cumsum, cmap.N)
bar = mpl.colorbar.ColorbarBase(ax, cmap=cmap,
norm=norm,
drawedges=False,
spacing='proportional',
orientation='horizontal')
# make N subdivisions, and chose a random position within each one
b = random(N)
plt.scatter(b, [.5]*len(b), s=60, facecolor='k', edgecolor='k')
bar.set_ticks([])
plt.title('multinomial resampling')
def plot_residual_resample(a):
N = len(a)
a_norm = np.asarray(a) / np.sum(a)
cumsum = np.cumsum(a_norm)
cumsum = np.insert(cumsum, 0, 0)
cmap = mpl.colors.ListedColormap([[0., .4, 1.],
[0., .8, 1.],
[1., .8, 0.],
[1., .4, 0.]]*(int(N/4) + 1))
fig = plt.figure()
ax = plt.gcf().add_axes([0.05, 0.475, 0.9, 0.15])
norm = mpl.colors.BoundaryNorm(cumsum, cmap.N)
bar = mpl.colorbar.ColorbarBase(ax, cmap=cmap,
norm=norm,
drawedges=False,
spacing='proportional',
orientation='horizontal')
indexes = residual_resample(a_norm)
bins = np.bincount(indexes)
for i in range(1, N):
n = bins[i-1] # number particles in this sample
if n > 0:
b = np.linspace(cumsum[i-1], cumsum[i], n+2)[1:-1]
plt.scatter(b, [.5]*len(b), s=60, facecolor='k', edgecolor='k')
bar.set_ticks([])
plt.title('residual resampling')
if __name__ == '__main__':
show_two_pf_plots()
#plot_residual_resample([.1, .2, .3, .4, .2, .3, .1])
#example()
#show_two_pf_plots()
#a = [.1, .2, .1, .6]
#plot_cumsum(a)
#test_pf()
| |
import discord
from discord.ext import commands
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search, Q, DocType, Text, Float, Keyword
from elasticsearch_dsl.connections import connections
from monacle_scraper import MonacleScraper, Team
from collections import defaultdict
from requests.exceptions import HTTPError
import humanize
import datetime
import re
import uuid
import trainerdex
RE_MENTION = re.compile('\<@\d+\>')
TEAM_COLORS = {
0: 0xD3D3D3,
1: 0x0000FF,
2: 0xFF0000,
3: 0xFFFF00
}
MOVES = defaultdict(lambda: '?', { #From https://github.com/Noctem/Monocle/blob/a2e3c61b2ddd7772ae3c62a6f252476cce0e804b/monocle/names.py#L261
1: 'Thunder Shock',
2: 'Quick Attack',
3: 'Scratch',
4: 'Ember',
5: 'Vine Whip',
6: 'Tackle',
7: 'Razor Leaf',
8: 'Take Down',
9: 'Water Gun',
10: 'Bite',
11: 'Pound',
12: 'Double Slap',
13: 'Wrap',
14: 'Hyper Beam',
15: 'Lick',
16: 'Dark Pulse',
17: 'Smog',
18: 'Sludge',
19: 'Metal Claw',
20: 'Vice Grip',
21: 'Flame Wheel',
22: 'Megahorn',
23: 'Wing Attack',
24: 'Flamethrower',
25: 'Sucker Punch',
26: 'Dig',
27: 'Low Kick',
28: 'Cross Chop',
29: 'Psycho Cut',
30: 'Psybeam',
31: 'Earthquake',
32: 'Stone Edge',
33: 'Ice Punch',
34: 'Heart Stamp',
35: 'Discharge',
36: 'Flash Cannon',
37: 'Peck',
38: 'Drill Peck',
39: 'Ice Beam',
40: 'Blizzard',
41: 'Air Slash',
42: 'Heat Wave',
43: 'Twineedle',
44: 'Poison Jab',
45: 'Aerial Ace',
46: 'Drill Run',
47: 'Petal Blizzard',
48: 'Mega Drain',
49: 'Bug Buzz',
50: 'Poison Fang',
51: 'Night Slash',
52: 'Slash',
53: 'Bubble Beam',
54: 'Submission',
55: 'Karate Chop',
56: 'Low Sweep',
57: 'Aqua Jet',
58: 'Aqua Tail',
59: 'Seed Bomb',
60: 'Psyshock',
61: 'Rock Throw',
62: 'Ancient Power',
63: 'Rock Tomb',
64: 'Rock Slide',
65: 'Power Gem',
66: 'Shadow Sneak',
67: 'Shadow Punch',
68: 'Shadow Claw',
69: 'Ominous Wind',
70: 'Shadow Ball',
71: 'Bullet Punch',
72: 'Magnet Bomb',
73: 'Steel Wing',
74: 'Iron Head',
75: 'Parabolic Charge',
76: 'Spark',
77: 'Thunder Punch',
78: 'Thunder',
79: 'Thunderbolt',
80: 'Twister',
81: 'Dragon Breath',
82: 'Dragon Pulse',
83: 'Dragon Claw',
84: 'Disarming Voice',
85: 'Draining Kiss',
86: 'Dazzling Gleam',
87: 'Moonblast',
88: 'Play Rough',
89: 'Cross Poison',
90: 'Sludge Bomb',
91: 'Sludge Wave',
92: 'Gunk Shot',
93: 'Mud Shot',
94: 'Bone Club',
95: 'Bulldoze',
96: 'Mud Bomb',
97: 'Fury Cutter',
98: 'Bug Bite',
99: 'Signal Beam',
100: 'X-Scissor',
101: 'Flame Charge',
102: 'Flame Burst',
103: 'Fire Blast',
104: 'Brine',
105: 'Water Pulse',
106: 'Scald',
107: 'Hydro Pump',
108: 'Psychic',
109: 'Psystrike',
110: 'Ice Shard',
111: 'Icy Wind',
112: 'Frost Breath',
113: 'Absorb',
114: 'Giga Drain',
115: 'Fire Punch',
116: 'Solar Beam',
117: 'Leaf Blade',
118: 'Power Whip',
119: 'Splash',
120: 'Acid',
121: 'Air Cutter',
122: 'Hurricane',
123: 'Brick Break',
124: 'Cut',
125: 'Swift',
126: 'Horn Attack',
127: 'Stomp',
128: 'Headbutt',
129: 'Hyper Fang',
130: 'Slam',
131: 'Body Slam',
132: 'Rest',
133: 'Struggle',
134: 'Scald',
135: 'Hydro Pump',
136: 'Wrap',
137: 'Wrap',
200: 'Fury Cutter',
201: 'Bug Bite',
202: 'Bite',
203: 'Sucker Punch',
204: 'Dragon Breath',
205: 'Thunder Shock',
206: 'Spark',
207: 'Low Kick',
208: 'Karate Chop',
209: 'Ember',
210: 'Wing Attack',
211: 'Peck',
212: 'Lick',
213: 'Shadow Claw',
214: 'Vine Whip',
215: 'Razor Leaf',
216: 'Mud Shot',
217: 'Ice Shard',
218: 'Frost Breath',
219: 'Quick Attack',
220: 'Scratch',
221: 'Tackle',
222: 'Pound',
223: 'Cut',
224: 'Poison Jab',
225: 'Acid',
226: 'Psycho Cut',
227: 'Rock Throw',
228: 'Metal Claw',
229: 'Bullet Punch',
230: 'Water Gun',
231: 'Splash',
232: 'Water Gun',
233: 'Mud Slap',
234: 'Zen Headbutt',
235: 'Confusion',
236: 'Poison Sting',
237: 'Bubble',
238: 'Feint Attack',
239: 'Steel Wing',
240: 'Fire Fang',
241: 'Rock Smash',
242: 'Transform',
243: 'Counter',
244: 'Powder Snow',
245: 'Close Combat',
246: 'Dynamic Punch',
247: 'Focus Blast',
248: 'Aurora Beam',
249: 'Charge Beam',
250: 'Volt Switch',
251: 'Wild Charge',
252: 'Zap Cannon',
253: 'Dragon Tail',
254: 'Avalanche',
255: 'Air Slash',
256: 'Brave Bird',
257: 'Sky Attack',
258: 'Sand Tomb',
259: 'Rock Blast',
260: 'Infestation',
261: 'Struggle Bug',
262: 'Silver Wind',
263: 'Astonish',
264: 'Hex',
265: 'Night Shade',
266: 'Iron Tail',
267: 'Gyro Ball',
268: 'Heavy Slam',
269: 'Fire Spin',
270: 'Overheat',
271: 'Bullet Seed',
272: 'Grass Knot',
273: 'Energy Ball',
274: 'Extrasensory',
275: 'Future Sight',
276: 'Mirror Coat',
277: 'Outrage',
278: 'Snarl',
279: 'Crunch',
280: 'Foul Play',
281: 'Hidden Power'
})
class Gym(DocType):
title = Text(analyzer='snowball', fields={'raw': Keyword()})
description = Text(analyzer='snowball')
latitude = Float()
longitude = Float()
url = Text()
image = Text()
monacle_id = Text()
class Meta:
index = 'marker'
def format_list(items):
if len(items) > 1:
message = ", ".join([item for item in items[:-1]])+" and {0}".format(items[-1])
else:
message = "{0}".format(items[0])
return message
def get_display_name(member):
try:
return trainerdex.Client().get_discord_user(member.id).owner().trainer(all_=False).username
except:
return member.display_name
connections.create_connection(hosts=['localhost'])
class Gyms:
"""Pokemon Go Gyms!"""
def __init__(self, bot):
self.bot = bot
self.client = Elasticsearch()
self.monacle = MonacleScraper('https://kentpogomap.uk/raw_data', 'BIDoJSaHxR0Cz3mqJvI5kShtUc0CW/HPwK/CrRtEZhU=')
self.going_users = defaultdict(set) # gym.id: list of users
self.arrived_users = defaultdict(set) # gym.id: list of users
self.users_going = {} # user_id: gym.id
self.users_arrived = {} # user_id: gym.id
self.user_groups = defaultdict(set) # user_id: list of users
async def find_gym(self, gym):
s = Search(using=self.client, index="marker").query("match", title={'query': gym, 'fuzziness': 2, 'slop': 1})
response = s.execute()
if response.hits.total == 0:
await self.bot.say("I couldn't find that gym")
return None, None
hit = response[0]
monacle_gym = await self.get_monacle_gym(hit)
return hit, monacle_gym
async def get_monacle_gym(self, hit):
return None
@commands.command(pass_context=True)
async def gym(self, ctx, *, gym: str):
"""
Lookup a gym, responds with an image, title, description and a google maps link.
Gyms that have active raids are prioritized over gyms that do not.
"""
hit, monacle_gym = await self.find_gym(gym)
if not hit:
return
description = "{}\n[Get Directions](https://www.google.com/maps/?daddr={},{})".format(hit.description, hit.latitude, hit.longitude)
embed=discord.Embed(title=hit.title, url='https://www.pokemongomap.info'+hit.url, description=description)
embed.set_thumbnail(url=hit.image)
if monacle_gym:
embed.set_image(url='https://maps.googleapis.com/maps/api/staticmap?center={0},{1}&zoom=15&size=250x125&maptype=roadmap&markers=color:{3}%7C{0},{1}&key={2}'.format(hit.latitude, hit.longitude, 'AIzaSyCEadifeA8X02v2OKv-orZWm8nQf1Q2EZ4', "0x{:02X}".format(TEAM_COLORS[monacle_gym.team])))
embed.color = TEAM_COLORS[monacle_gym.team]
if monacle_gym.slots_available > 0:
embed.add_field(name='Slots available', value=monacle_gym.slots_available)
embed.add_field(name='Owned by', value=monacle_gym.team_name)
if monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
embed.add_field(name='Raid level', value=monacle_gym.raid_level)
embed.add_field(name='Raid Pokemon', value=monacle_gym.raid_pokemon.name)
embed.add_field(name='CP', value=monacle_gym.raid_pokemon.cp)
embed.add_field(name='Moveset', value=MOVES[monacle_gym.raid_pokemon.move_1]+' / '+MOVES[monacle_gym.raid_pokemon.move_2])
embed.add_field(name='Started at', value=monacle_gym.raid_start.strftime("%H:%M:%S"))
embed.add_field(name='Ends at', value="{} ({})".format(monacle_gym.raid_end.strftime("%H:%M:%S"), humanize.naturaltime(datetime.datetime.now()-monacle_gym.raid_end)))
else:
embed.set_image(url='https://maps.googleapis.com/maps/api/staticmap?center={0},{1}&zoom=15&size=250x125&maptype=roadmap&markers=color:{3}%7C{0},{1}&key={2}'.format(hit.latitude, hit.longitude, 'AIzaSyCEadifeA8X02v2OKv-orZWm8nQf1Q2EZ4', 'white'))
await self.bot.say(embed=embed)
@commands.command(pass_context=True, no_pm=True)
async def interested(self, ctx, *, gym: str):
"""State you're interested in going to a raid"""
gym = re.sub(RE_MENTION, '', gym).strip()
hit, monacle_gym = await self.find_gym(gym)
if not hit:
return
message = get_display_name(ctx.message.author)
if monacle_gym and monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
message += " is interested in the {0} raid".format(monacle_gym.raid_pokemon.name)
else:
await self.bot.say("I can't see a raid at {}, sorry.".format(hit.title))
return self.bot.delete_message(ctx.message)
message += ' at {}'.format(hit.title)
message += "."
await self.bot.say(message)
if discord.utils.get(ctx.message.server.channels, name='ticker'):
ticker = discord.utils.get(ctx.message.server.channels, name='ticker')
await self.bot.send_message(ticker, message)
await self.bot.delete_message(ctx.message)
@commands.command(pass_context=True, no_pm=True)
async def addgoing(self, ctx, *, gym: str):
"""Used to set other trainers as going to a raid"""
return await self._going(ctx, gym, False)
@commands.command(pass_context=True, no_pm=True)
async def going(self, ctx, *, gym: str):
"""Used to set yourself and possibly other trainers as going to a raid"""
return await self._going(ctx, gym, True)
async def _going(self, ctx, gym, add_author_to_group):
gym = re.sub(RE_MENTION, '', gym).strip()
hit, monacle_gym = await self.find_gym(gym)
if ctx.message.author in dict(list(self.users_going.items()) + list(self.users_arrived.items())) and add_author_to_group:
await self._notgoing(ctx)
temp1 = await self.bot.say('You forgot to do `.done` at your last raid but I sorted that.')
extra_users = re.search(r'\+(\d+)', gym)
if not hit:
return
message = get_display_name(ctx.message.author)
if extra_users:
extra_users = int(extra_users.group(0))
message += " +{}".format(extra_users)
else:
extra_users = 0
if add_author_to_group:
self.going_users[hit.meta.id].add(ctx.message.author)
group = set()
users = []
if add_author_to_group:
group.add(ctx.message.author)
users.append(ctx.message.author)
if ctx.message.mentions:
group.update(ctx.message.mentions)
users = list(group) # remove duplicates
if ctx.message.author in ctx.message.mentions:
users.remove(ctx.message.author) # can't raid with yourself
for user in users:
self.going_users[hit.meta.id].add(user)
message = format_list(["{0}".format(get_display_name(user)) for user in users])
if len(users) == 1:
message += ' is'
else:
message += ' are'
message += ' going to {}'.format(hit.title)
if monacle_gym and monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
message += " for a raid on {0}".format(monacle_gym.raid_pokemon.name)
message += "."
for user in users:
self.users_going[user] = hit.meta.id
self.user_groups[user].update(group)
await self.bot.say(message)
if discord.utils.get(ctx.message.server.channels, name='ticker'):
ticker = discord.utils.get(ctx.message.server.channels, name='ticker')
await self.bot.send_message(ticker, message)
await self.bot.delete_message(ctx.message)
@commands.command(pass_context=True, no_pm=True)
async def notgoing(self, ctx):
"""No, not going anymore m8"""
message = await self._notgoing(ctx)
await self.bot.say(message)
if discord.utils.get(ctx.message.server.channels, name='ticker'):
ticker = discord.utils.get(ctx.message.server.channels, name='ticker')
await self.bot.send_message(ticker, message)
return await self.bot.delete_message(ctx.message)
async def _notgoing(self, ctx):
gym_id = self.users_arrived.get(ctx.message.author, None)
if not gym_id:
gym_id = self.users_going.get(ctx.message.author, None)
if not gym_id:
await self.bot.say('You are not marked as going to any raids')
return
gym = Gym.get(id=gym_id)
monacle_gym = await self.get_monacle_gym(gym)
if monacle_gym and monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
message = "{} is not going to the {} raid at {}".format(get_display_name(ctx.message.author), monacle_gym.raid_pokemon.name, gym.title)
else:
message = "{} is not going to {}".format(get_display_name(ctx.message.author), gym.title)
self.arrived_users[gym_id].discard(ctx.message.author)
self.going_users[gym_id].discard(ctx.message.author)
if ctx.message.author in self.users_arrived:
del self.users_arrived[ctx.message.author]
if ctx.message.author in self.users_going:
del self.users_going[ctx.message.author]
for user in self.user_groups[ctx.message.author]:
if user != ctx.message.author:
self.user_groups[user].discard(ctx.message.author)
del self.user_groups[ctx.message.author]
return message
@commands.command(pass_context=True)
async def who(self, ctx, *, gym: str):
"""
People try to put us down
Just because we get around
Things they do look awful cold
I hope I die before I get old
"""
hit, monacle_gym = await self.find_gym(gym)
if not hit:
return
message = ""
if len(self.going_users[hit.meta.id]) == 0 and len(self.arrived_users[hit.meta.id]) == 0:
message = "Nobody is going"
if len(self.going_users[hit.meta.id]) > 0:
message += format_list([get_display_name(user) for user in self.going_users[hit.meta.id]])
message += " are" if len(self.going_users[hit.meta.id]) > 1 else " is"
message += " on the way"
if len(self.arrived_users[hit.meta.id]) > 0 and len(self.going_users[hit.meta.id]) > 0:
message += " and "
if len(self.arrived_users[hit.meta.id]) > 0:
message += format_list([get_display_name(user) for user in self.arrived_users[hit.meta.id]])
message += " have arrived at"
else:
message += " to"
if monacle_gym and monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
message += " the {} raid at {}.\n".format(monacle_gym.raid_pokemon.name, hit.title)
else:
message += " "+hit.title
await self.bot.say(message)
await self.bot.delete_message(ctx.message)
@commands.command(pass_context=True)
async def arrived(self, ctx, *members: discord.Member):
"""You know when you were at school and they would do the register and you'd get really paranoid about how you said 'here'. No worries here, only one way to say it- [p]arrived!"""
gym_id = self.users_arrived.get(ctx.message.author, None)
if not gym_id:
gym_id = self.users_going.get(ctx.message.author, None)
if not gym_id:
await self.bot.say('You are not marked as going to any raids')
return
gym = Gym.get(id=gym_id)
monacle_gym = await self.get_monacle_gym(gym)
arrived = set(self.user_groups[ctx.message.author])
for member in members:
arrived.update(self.user_groups[member])
message = format_list([get_display_name(user) for user in arrived])
if len(arrived) == 1:
message += ' has'
else:
message += ' have'
message += ' arrived at {}'.format(gym.title)
if monacle_gym and monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
message += " for the raid on {0}".format(monacle_gym.raid_pokemon.name)
message += "."
self.users_arrived[ctx.message.author] = gym_id
for user in arrived:
if user in self.user_groups:
del self.user_groups[user]
if user in self.users_going:
del self.users_going[user]
self.arrived_users[gym_id].add(user)
self.going_users[gym_id].remove(user)
await self.bot.say(message)
if discord.utils.get(ctx.message.server.channels, name='ticker'):
ticker = discord.utils.get(ctx.message.server.channels, name='ticker')
await self.bot.send_message(ticker, message)
await self.bot.delete_message(ctx.message)
@commands.command(pass_context=True)
async def done(self, ctx):
"""Finished already? That was quick!"""
gym_id = self.users_arrived.get(ctx.message.author, None)
if not gym_id:
gym_id = self.users_going.get(ctx.message.author, None)
if not gym_id:
await self.bot.say('You are not marked as going to any raids')
return
gym = Gym.get(id=gym_id)
monacle_gym = await self.get_monacle_gym(gym)
if monacle_gym and monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
message = "{} has finished the {} raid at {}".format(get_display_name(ctx.message.author), monacle_gym.raid_pokemon.name, gym.title)
else:
message = "{} is finished at {}".format(get_display_name(ctx.message.author), gym.title)
self.arrived_users[gym_id].discard(ctx.message.author)
self.going_users[gym_id].discard(ctx.message.author)
if ctx.message.author in self.users_arrived:
del self.users_arrived[ctx.message.author]
if ctx.message.author in self.users_going:
del self.users_going[ctx.message.author]
for user in self.user_groups[ctx.message.author]:
if user != ctx.message.author:
self.user_groups[user].discard(ctx.message.author)
del self.user_groups[ctx.message.author]
await self.bot.say(message)
if discord.utils.get(ctx.message.server.channels, name='ticker'):
ticker = discord.utils.get(ctx.message.server.channels, name='ticker')
await self.bot.send_message(ticker, message)
await self.bot.delete_message(ctx.message)
@commands.command(pass_context=True)
async def raids(self, ctx):
"""Not a list of active raids"""
message = ''
gyms = set(list(self.going_users.keys())+list(self.arrived_users.keys()))
if not gyms:
message = 'There are no raids on at the moment'
for gym_id in gyms:
gym = Gym.get(id=gym_id)
monacle_gym = await self.get_monacle_gym(gym)
if monacle_gym and monacle_gym.raid_start and monacle_gym.raid_start <= datetime.datetime.now() and monacle_gym.raid_end >= datetime.datetime.now():
num_users = len(self.going_users[gym_id]) + len(self.arrived_users[gym_id])
message += str(num_users)
if num_users == 1:
message += ' user is'
else:
message += ' users are'
message += ' on the way to the {} raid at {} - ends at {} ({}).\n'.format(monacle_gym.raid_pokemon.name, gym.title, monacle_gym.raid_end.strftime("%H:%M:%S"), humanize.naturaltime(datetime.datetime.now()-monacle_gym.raid_end))
await self.bot.say(message)
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(Gyms(bot))
| |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys
from .compat import sysconfig, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
get_executable, in_venv)
logger = logging.getLogger(__name__)
_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>'''.strip()
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
def _resolve(module, func):
__import__(module)
mod = sys.modules[module]
parts = func.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
try:
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
sys.stderr.write('%%s\\n' %% e)
rc = 1
sys.exit(rc)
'''
class ScriptMaker(object):
"""
A class to copy or create scripts from source scripts or callable
specifications.
"""
script_template = SCRIPT_TEMPLATE
executable = None # for shebangs
def __init__(self, source_dir, target_dir, add_launchers=True,
dry_run=False, fileop=None):
self.source_dir = source_dir
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.clobber = False
# It only makes sense to set mode bits on POSIX.
self.set_mode = (os.name == 'posix') or (os.name == 'java' and
os._name == 'posix')
self.variants = set(('', 'X.Y'))
self._fileop = fileop or FileOperator(dry_run)
def _get_alternate_executable(self, executable, options):
if options.get('gui', False) and os.name == 'nt':
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
if sys.platform.startswith('java'): # pragma: no cover
def _is_shell(self, executable):
"""
Determine if the specified executable is a script
(contains a #! line)
"""
try:
with open(executable) as fp:
return fp.read(2) == '#!'
except (OSError, IOError):
logger.warning('Failed to open %s', executable)
return False
def _fix_jython_executable(self, executable):
if self._is_shell(executable):
# Workaround for Jython is not needed on Linux systems.
import java
if java.lang.System.getProperty('os.name') == 'Linux':
return executable
elif executable.lower().endswith('jython.exe'):
# Use wrapper exe for Jython on Windows
return executable
return '/usr/bin/env %s' % executable
def _get_shebang(self, encoding, question_interp=b'', options=None):
enquote = True
if self.executable:
executable = self.executable
enquote = False # assume this will be taken care of
elif not sysconfig.is_python_build():
executable = get_executable()
elif in_venv():
executable = os.path.join(sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else:
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if options:
executable = self._get_alternate_executable(executable, options)
if sys.platform.startswith('java'): # pragma: no cover
executable = self._fix_jython_executable(executable)
# Normalise case for Windows
executable = os.path.normcase(executable)
# If the user didn't specify an executable, it may be necessary to
# cater for executable paths with spaces (not uncommon on Windows)
if enquote and ' ' in executable:
executable = '"%s"' % executable
# Issue #51: don't use fsencode, since we later try to
# check that the shebang is decodable using utf-8.
executable = executable.encode('utf-8')
# in case of IronPython, play safe and enable frames support
if (sys.platform == 'cli' and '-X:Frames' not in question_interp
and '-X:FullFrames' not in question_interp):
question_interp += b' -X:Frames'
shebang = b'#!' + executable + question_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
# written before. So the shebang has to be decodable from
# UTF-8.
try:
shebang.decode('utf-8')
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable from utf-8' % shebang)
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be decodable from
# the script encoding too.
if encoding != 'utf-8':
try:
shebang.decode(encoding)
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable '
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, entry):
return self.script_template % dict(module=entry.prefix,
func=entry.suffix)
manifest = _DEFAULT_MANIFEST
def get_manifest(self, exename):
base = os.path.basename(exename)
return self.manifest % base
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and os.name == 'nt'
linesep = os.linesep.encode('utf-8')
if not use_launcher:
script_bytes = shebang + linesep + script_bytes
else:
if ext == 'py':
launcher = self._get_launcher('t')
else:
launcher = self._get_launcher('w')
stream = BytesIO()
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher:
n, e = os.path.splitext(outname)
if e.startswith('.py'):
outname = n
outname = '%s.exe' % outname
try:
self._fileop.write_binary_file(outname, script_bytes)
except Exception:
# Failed writing an executable - it might be in use.
logger.warning('Failed to write executable - trying to '
'use .deleteme logic')
dfname = '%s.deleteme' % outname
if os.path.exists(dfname):
os.remove(dfname) # Not allowed to fail here
os.rename(outname, dfname) # nor here
self._fileop.write_binary_file(outname, script_bytes)
logger.debug('Able to replace executable using '
'.deleteme logic')
try:
os.remove(dfname)
except Exception:
pass # still in use - ignore error
else:
if os.name == 'nt' and not outname.endswith('.' + ext):
outname = '%s.%s' % (outname, ext)
if os.path.exists(outname) and not self.clobber:
logger.warning('Skipping existing file %s', outname)
continue
self._fileop.write_binary_file(outname, script_bytes)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
def _make_script(self, entry, filenames, options=None):
question_interp = b''
if options:
args = options.get('interpreter_args', [])
if args:
args = ' %s' % ' '.join(args)
question_interp = args.encode('utf-8')
shebang = self._get_shebang('utf-8', question_interp, options=options)
script = self._get_script_text(entry).encode('utf-8')
name = entry.name
scriptnames = set()
if '' in self.variants:
scriptnames.add(name)
if 'X' in self.variants:
scriptnames.add('%s%s' % (name, sys.version[0]))
if 'X.Y' in self.variants:
scriptnames.add('%s-%s' % (name, sys.version[:3]))
if options and options.get('gui', False):
ext = 'pyw'
else:
ext = 'py'
self._write_script(scriptnames, shebang, script, filenames, ext)
def _copy_script(self, script, filenames):
adjust = False
script = os.path.join(self.source_dir, convert_path(script))
outname = os.path.join(self.target_dir, os.path.basename(script))
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, 'rb')
except IOError:
if not self.dry_run:
raise
f = None
else:
encoding, lines = detect_encoding(f.readline)
f.seek(0)
first_line = f.readline()
if not first_line:
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
if match:
adjust = True
question_interp = match.group(1) or b''
if not adjust:
if f:
f.close()
self._fileop.copy_file(script, outname)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
shebang = self._get_shebang(encoding, question_interp)
if b'pythonw' in first_line:
ext = 'pyw'
else:
ext = 'py'
n = os.path.basename(outname)
self._write_script([n], shebang, f.read(), filenames, ext)
if f:
f.close()
@property
def dry_run(self):
return self._fileop.dry_run
@dry_run.setter
def dry_run(self, value):
self._fileop.dry_run = value
if os.name == 'nt':
# Executable launcher support.
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
def _get_launcher(self, kind):
if struct.calcsize('P') == 8: # 64-bit
bits = '64'
else:
bits = '32'
name = '%s%s.exe' % (kind, bits)
# Issue 31: don't hardcode an absolute package name, but
# determine it relative to the current package
distlib_package = __name__.rsplit('.', 1)[0]
result = finder(distlib_package).find(name).bytes
return result
# Public API follows
def make(self, specification, options=None):
"""
Make a script.
:param specification: The specification, which is either a valid export
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:param options: A dictionary of options controlling script generation.
:return: A list of all absolute pathnames written to.
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames, options=options)
return filenames
def make_multiple(self, specifications, options=None):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
:return: A list of all absolute pathnames written to,
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification, options))
return filenames
| |
"""
Contains a CLICommand that can issue PUT requests.
Uses the following from :py:class:`swiftly.cli.context.CLIContext`:
=============== ====================================================
cdn True if the CDN Management URL should be used
instead of the Storage URL.
client_manager For connecting to Swift.
concurrency The number of concurrent actions that can be
performed.
different Set to True to check if the local file is different
than an existing object before uploading.
empty Set to True if you wish to send an empty body with
the PUT rather than reading from the io_manager's
stdin.
headers A dict of headers to send.
input\_ A string representing where input should be obtained
from. If None, the io_manager's stdin will be used.
If a directory path is specified, a set of PUTs will
be generated for each item in the directory
structure. If a file path is specified, that single
file will be used as input.
io_manager For directing output and obtaining input if needed.
newer Set to True to check if the local file is newer than
an existing object before uploading.
query A dict of query parameters to send.
seek Where to seek to in the input\_ before uploading;
usually just used by recursive calls with segmented
objects.
segment_size The max size of a file before switching to a
segmented object and the max size of each object
segment.
static_segments Set to True to use static large object support
instead of dynamic large object support.
=============== ====================================================
"""
"""
Copyright 2011-2013 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import os
from swiftly.cli.command import CLICommand, ReturnCode
from swiftly.concurrency import Concurrency
from swiftly.dencrypt import AES256CBC, aes_encrypt
from swiftly.filelikeiter import FileLikeIter
def cli_put_directory_structure(context, path):
"""
Performs PUTs rooted at the path using a directory structure
pointed to by context.input\_.
See :py:mod:`swiftly.cli.put` for context usage information.
See :py:class:`CLIPut` for more information.
"""
if not context.input_:
raise ReturnCode(
'called cli_put_directory_structure without context.input_ set')
if not os.path.isdir(context.input_):
raise ReturnCode(
'%r is not a directory' % context.input_)
if not path:
raise ReturnCode(
'uploading a directory structure requires at least a container '
'name')
new_context = context.copy()
new_context.input_ = None
container = path.split('/', 1)[0]
cli_put_container(new_context, container)
ilen = len(context.input_)
if not context.input_.endswith(os.sep):
ilen += 1
conc = Concurrency(context.concurrency)
for (dirpath, dirnames, filenames) in os.walk(context.input_):
if not dirnames and not filenames:
new_context = context.copy()
new_context.headers = dict(context.headers)
new_context.headers['content-type'] = 'text/directory'
new_context.headers['x-object-meta-mtime'] = \
'%f' % os.path.getmtime(context.input_)
new_context.input_ = None
new_context.empty = True
new_path = path
if path[-1] != '/':
new_path += '/'
new_path += dirpath[ilen:]
for (exc_type, exc_value, exc_tb, result) in \
conc.get_results().itervalues():
if exc_value:
conc.join()
raise exc_value
conc.spawn(new_path, cli_put_object, new_context, new_path)
else:
for fname in filenames:
new_context = context.copy()
new_context.input_ = os.path.join(dirpath, fname)
new_path = path
if path[-1] != '/':
new_path += '/'
if dirpath[ilen:]:
new_path += dirpath[ilen:] + '/'
new_path += fname
for (exc_type, exc_value, exc_tb, result) in \
conc.get_results().itervalues():
if exc_value:
conc.join()
raise exc_value
conc.spawn(new_path, cli_put_object, new_context, new_path)
conc.join()
for (exc_type, exc_value, exc_tb, result) in \
conc.get_results().itervalues():
if exc_value:
raise exc_value
def cli_put_account(context):
"""
Performs a PUT on the account.
See :py:mod:`swiftly.cli.put` for context usage information.
See :py:class:`CLIPut` for more information.
"""
body = None
if context.input_:
if context.input_ == '-':
body = context.io_manager.get_stdin()
else:
body = open(context.input_, 'rb')
with context.client_manager.with_client() as client:
status, reason, headers, contents = client.put_account(
headers=context.headers, query=context.query, cdn=context.cdn,
body=body)
if hasattr(contents, 'read'):
contents.read()
if status // 100 != 2:
raise ReturnCode('putting account: %s %s' % (status, reason))
def cli_put_container(context, path):
"""
Performs a PUT on the container.
See :py:mod:`swiftly.cli.put` for context usage information.
See :py:class:`CLIPut` for more information.
"""
path = path.rstrip('/')
if '/' in path:
raise ReturnCode('called cli_put_container with object %r' % path)
body = None
if context.input_:
if context.input_ == '-':
body = context.io_manager.get_stdin()
else:
body = open(context.input_, 'rb')
with context.client_manager.with_client() as client:
status, reason, headers, contents = client.put_container(
path, headers=context.headers, query=context.query,
cdn=context.cdn, body=body)
if hasattr(contents, 'read'):
contents.read()
if status // 100 != 2:
raise ReturnCode(
'putting container %r: %s %s' % (path, status, reason))
def cli_put_object(context, path):
"""
Performs a PUT on the object.
See :py:mod:`swiftly.cli.put` for context usage information.
See :py:class:`CLIPut` for more information.
"""
if context.different and context.encrypt:
raise ReturnCode(
'context.different will not work properly with context.encrypt '
'since encryption may change the object size')
put_headers = dict(context.headers)
if context.empty:
body = ''
put_headers['content-length'] = '0'
elif not context.input_ or context.input_ == '-':
body = context.io_manager.get_stdin()
elif context.seek is not None:
if context.encrypt:
raise ReturnCode(
'putting object %r: Cannot use encryption and context.seek' %
path)
body = open(context.input_, 'rb')
body.seek(context.seek)
else:
l_mtime = os.path.getmtime(context.input_)
l_size = os.path.getsize(context.input_)
put_headers['content-length'] = str(l_size)
if context.newer or context.different:
r_mtime = None
r_size = None
with context.client_manager.with_client() as client:
status, reason, headers, contents = client.head_object(
*path.split('/', 1), headers=context.headers,
query=context.query, cdn=context.cdn)
if hasattr(contents, 'read'):
contents.read()
if status // 100 == 2:
r_mtime = headers.get('x-object-meta-mtime')
if r_mtime:
try:
r_mtime = float(r_mtime)
except ValueError:
r_mtime = None
r_size = headers.get('content-length')
if r_size:
try:
r_size = int(r_size)
except ValueError:
r_size = None
elif status != 404:
raise ReturnCode(
'could not head %r for conditional check; skipping put: '
'%s %s' % (path, status, reason))
if context.newer and r_mtime is not None or l_mtime <= r_mtime:
return
if context.different and r_mtime is not None and \
l_mtime == r_mtime and r_size is not None and \
l_size == r_size:
return
put_headers['x-object-meta-mtime'] = '%f' % l_mtime
size = os.path.getsize(context.input_)
if size > context.segment_size:
if context.encrypt:
raise ReturnCode(
'putting object %r: Cannot use encryption for objects '
'greater than the segment size' % path)
new_context = context.copy()
new_context.input_ = None
new_context.headers = None
new_context.query = None
container = path.split('/', 1)[0] + '_segments'
cli_put_container(new_context, container)
prefix = container + '/' + path.split('/', 1)[1]
prefix = '%s/%s/%s/' % (prefix, l_mtime, size)
conc = Concurrency(context.concurrency)
start = 0
segment = 0
path2info = {}
while start < size:
new_context = context.copy()
new_context.headers = dict(context.headers)
new_context.headers['content-length'] = str(min(
size - start, context.segment_size))
new_context.seek = start
new_path = '%s%08d' % (prefix, segment)
for (ident, (exc_type, exc_value, exc_tb, result)) in \
conc.get_results().iteritems():
if exc_value:
conc.join()
raise exc_value
path2info[ident] = result
conc.spawn(
new_path, cli_put_object, new_context, new_path)
segment += 1
start += context.segment_size
conc.join()
for (ident, (exc_type, exc_value, exc_tb, result)) in \
conc.get_results().iteritems():
if exc_value:
raise exc_value
path2info[ident] = result
if context.static_segments:
body = json.dumps([
{'path': '/' + p, 'size_bytes': s, 'etag': e}
for p, (s, e) in sorted(path2info.iteritems())])
put_headers['content-length'] = str(len(body))
context.query['multipart-manifest'] = 'put'
else:
body = ''
put_headers['content-length'] = '0'
put_headers['x-object-manifest'] = prefix
else:
body = open(context.input_, 'rb')
with context.client_manager.with_client() as client:
if context.encrypt:
content_length = put_headers.get('content-length')
if content_length:
content_length = int(content_length)
if hasattr(body, 'read'):
body = FileLikeIter(aes_encrypt(
context.encrypt, body, preamble=AES256CBC,
chunk_size=getattr(client, 'chunk_size', 65536),
content_length=content_length))
else:
body = FileLikeIter(aes_encrypt(
context.encrypt, FileLikeIter([body]), preamble=AES256CBC,
chunk_size=getattr(client, 'chunk_size', 65536),
content_length=content_length))
if 'content-length' in put_headers:
del put_headers['content-length']
container, obj = path.split('/', 1)
status, reason, headers, contents = client.put_object(
container, obj, body, headers=put_headers, query=context.query,
cdn=context.cdn)
if hasattr(contents, 'read'):
contents = contents.read()
if status // 100 != 2:
raise ReturnCode(
'putting object %r: %s %s %r' % (path, status, reason, contents))
if context.seek is not None:
content_length = put_headers.get('content-length')
etag = headers.get('etag')
if content_length and etag:
content_length = int(content_length)
else:
with context.client_manager.with_client() as client:
container, obj = path.split('/', 1)
status, reason, headers, contents = client.head_object(
container, obj, cdn=context.cdn)
if hasattr(contents, 'read'):
contents = contents.read()
if status // 100 != 2:
raise ReturnCode(
'heading object %r: %s %s %r' %
(path, status, reason, contents))
content_length = headers.get('content-length')
etag = headers.get('etag')
if content_length:
content_length = int(content_length)
return content_length, etag
def cli_put(context, path):
"""
Performs a PUT on the item (account, container, or object).
See :py:mod:`swiftly.cli.put` for context usage information.
See :py:class:`CLIPut` for more information.
"""
path = path.lstrip('/') if path else ''
if context.input_ and os.path.isdir(context.input_):
return cli_put_directory_structure(context, path)
if not path:
return cli_put_account(context)
elif '/' not in path.rstrip('/'):
return cli_put_container(context, path)
else:
return cli_put_object(context, path)
class CLIPut(CLICommand):
"""
A CLICommand that can issue PUT requests.
See the output of ``swiftly help put`` for more information.
"""
def __init__(self, cli):
super(CLIPut, self).__init__(
cli, 'put', max_args=1, usage="""
Usage: %prog [main_options] put [options] [path]
For help on [main_options] run %prog with no args.
Performs a PUT request on the <path> given. If the <path> is an object, the
contents for the object are read from standard input.
Special Note About Segmented Objects:
For object uploads exceeding the -s [size] (default: 5G) the object will be
uploaded in segments. At this time, auto-segmenting only works for objects
uploaded from source files -- objects sourced from standard input cannot exceed
the maximum object size for the cluster.
A segmented object is one that has its contents in several other objects. On
download, these other objects are concatenated into a single object stream.
Segmented objects can be useful to greatly exceed the maximum single object
size, speed up uploading large objects with concurrent segment uploading, and
provide the option to replace, insert, and delete segments within a whole
object without having to alter or reupload any of the other segments.
The main object of a segmented object is called the "manifest object". This
object just has an X-Object-Manifest header that points to another path where
the segments for the object contents are stored. For Swiftly, this header value
is auto-generated as the same name as the manifest object, but with "_segments"
added to the container name. This keeps the segments out of the main container
listing, which is often useful.
By default, Swift's dynamic large object support is used since it was
implemented first. However, if you prefix the [size] with an 's', as in '-s
s1048576' Swiftly will use static large object support. These static large
objects are very similar as described above, except the manifest contains a
static list of the object segments. For more information on the tradeoffs, see
http://greg.brim.net/post/2013/05/16/1834.html""".strip())
self.option_parser.add_option(
'-h', '-H', '--header', dest='header', action='append',
metavar='HEADER:VALUE',
help='Add a header to the request. This can be used multiple '
'times for multiple headers. Examples: '
'-hx-object-meta-color:blue -h "Content-Type: text/html"')
self.option_parser.add_option(
'-q', '--query', dest='query', action='append',
metavar='NAME[=VALUE]',
help='Add a query parameter to the request. This can be used '
'multiple times for multiple query parameters. Example: '
'-qmultipart-manifest=get')
self.option_parser.add_option(
'-i', '--input', dest='input_', metavar='PATH',
help='Indicates where to read the contents from; default is '
'standard input. If the PATH is a directory, all files in '
'the directory will be uploaded as similarly named objects '
'and empty directories will create text/directory marker '
'objects. Use a dash (as in "-i -") to specify standard '
'input for account and container PUTs, as those do not '
'normally take input. This is useful with '
'-qextract-archive=<format> bulk upload requests. For '
'example: tar zc . | swiftly put -qextract-archive=tar.gz -i '
'- container')
self.option_parser.add_option(
'-n', '--newer', dest='newer', action='store_true',
help='For PUTs with an --input option, first performs a HEAD on '
'the object and compares the X-Object-Meta-Mtime header with '
'the modified time of the PATH obtained from the --input '
'option and then PUTs the object only if the local time is '
'newer. When the --input PATH is a directory, this offers an '
'easy way to upload only the newer files since the last '
'upload (at the expense of HEAD requests). NOTE THAT THIS '
'WILL NOT UPLOAD CHANGED FILES THAT DO NOT HAVE A NEWER '
'LOCAL MODIFIED TIME! NEWER does not mean DIFFERENT.')
self.option_parser.add_option(
'-d', '--different', dest='different', action='store_true',
help='For PUTs with an --input option, first performs a HEAD on '
'the object and compares the X-Object-Meta-Mtime header with '
'the modified time of the PATH obtained from the --input '
'option and then PUTs the object only if the local time is '
'different. It will also check the local and remote sizes '
'and PUT if they differ. ETag/MD5sum checking are not done '
'(an option may be provided in the future) since this is '
'usually much more disk intensive. When the --input PATH is '
'a directory, this offers an easy way to upload only the '
'differing files since the last upload (at the expense of '
'HEAD requests). NOTE THAT THIS CAN UPLOAD OLDER FILES OVER '
'NEWER ONES! DIFFERENT does not mean NEWER.')
self.option_parser.add_option(
'-e', '--empty', dest='empty', action='store_true',
help='Indicates a zero-byte object should be PUT.')
self.option_parser.add_option(
'-s', '--segment-size', dest='segment_size', metavar='BYTES',
help='Indicates the maximum size of an object before uploading it '
'as a segmented object. See full help text for more '
'information.')
self.option_parser.add_option(
'--encrypt', dest='encrypt', metavar='KEY',
help='Will encrypt the uploaded object data with KEY. This '
'currently uses AES 256 in CBC mode but other algorithms may '
'be offered in the future. You may specify a single dash "-" '
'as the KEY and instead the KEY will be loaded from the '
'SWIFTLY_CRYPT_KEY environment variable.')
def __call__(self, args):
options, args, context = self.parse_args_and_create_context(args)
context.headers = self.options_list_to_lowered_dict(options.header)
context.query = self.options_list_to_lowered_dict(options.query)
context.input_ = options.input_
context.segment_size = options.segment_size
context.static_segments = False
if context.segment_size and context.segment_size[0].lower() == 's':
context.static_segments = True
context.segment_size = context.segment_size[1:]
context.segment_size = int(
context.segment_size or 5 * 1024 * 1024 * 1024)
if context.segment_size < 1:
raise ReturnCode('invalid segment size %s' % options.segment_size)
context.empty = options.empty
context.newer = options.newer
context.different = options.different
context.encrypt = options.encrypt
if context.encrypt == '-':
context.encrypt = os.environ.get('SWIFTLY_CRYPT_KEY')
if not context.encrypt:
raise ReturnCode(
'A single dash "-" was given as the encryption key, but '
'no key was found in the SWIFTLY_CRYPT_KEY environment '
'variable.')
if context.encrypt and context.different:
raise ReturnCode(
'--different will not work properly with --encrypt since '
'encryption may change the object size')
path = args.pop(0).lstrip('/') if args else None
return cli_put(context, path)
| |
#!/usr/bin/python3
# Halide tutorial lesson 12.
# This lesson demonstrates how to use Halide to run code on a GPU.
# This lesson can be built by invoking the command:
# make tutorial_lesson_12_using_the_gpu
# in a shell with the current directory at the top of the halide source tree.
# Otherwise, see the platform-specific compiler invocations below.
# On linux, you can compile and run it like so:
# g++ lesson_12*.cpp -g -std=c++11 -I ../include -L ../bin -lHalide `libpng-config --cflags --ldflags` -lpthread -ldl -o lesson_12
# LD_LIBRARY_PATH=../bin ./lesson_12
# On os x:
# g++ lesson_12*.cpp -g -std=c++11 -I ../include -L ../bin -lHalide `libpng-config --cflags --ldflags` -o lesson_12
# DYLD_LIBRARY_PATH=../bin ./lesson_12
#include "Halide.h"
#include <stdio.h>
#using namespace Halide
import halide as hl
# Include some support code for loading pngs.
#include "image_io.h"
from scipy.misc import imread
import os.path
# Include a clock to do performance testing.
#include "clock.h"
from datetime import datetime
# Define some Vars to use.
x, y, c, i, ii, xi, yi = hl.Var("x"), hl.Var("y"), hl.Var("c"), hl.Var("i"), hl.Var("ii"), hl.Var("xi"), hl.Var("yi")
# We're going to want to schedule a pipeline in several ways, so we
# define the pipeline in a class so that we can recreate it several
# times with different schedules.
class MyPipeline:
def __init__(self, input):
assert type(input) == hl.Buffer_uint8
self.lut = hl.Func("lut")
self.padded = hl.Func("padded")
self.padded16 = hl.Func("padded16")
self.sharpen = hl.Func("sharpen")
self.curved = hl.Func("curved")
self.input = input
# For this lesson, we'll use a two-stage pipeline that sharpens
# and then applies a look-up-table (LUT).
# First we'll define the LUT. It will be a gamma curve.
self.lut[i] = hl.cast(hl.UInt(8), hl.clamp(pow(i / 255.0, 1.2) * 255.0, 0, 255))
# Augment the input with a boundary condition.
self.padded[x, y, c] = input[hl.clamp(x, 0, input.width()-1),
hl.clamp(y, 0, input.height()-1), c]
# Cast it to 16-bit to do the math.
self.padded16[x, y, c] = hl.cast(hl.UInt(16), self.padded[x, y, c])
# Next we sharpen it with a five-tap filter.
self.sharpen[x, y, c] = (self.padded16[x, y, c] * 2-
(self.padded16[x - 1, y, c] +
self.padded16[x, y - 1, c] +
self.padded16[x + 1, y, c] +
self.padded16[x, y + 1, c]) / 4)
# Then apply the LUT.
self.curved[x, y, c] = self.lut[self.sharpen[x, y, c]]
# Now we define methods that give our pipeline several different
# schedules.
def schedule_for_cpu(self):
# Compute the look-up-table ahead of time.
self.lut.compute_root()
# Compute color channels innermost. Promise that there will
# be three of them and unroll across them.
self.curved.reorder(c, x, y) \
.bound(c, 0, 3) \
.unroll(c)
# Look-up-tables don't vectorize well, so just parallelize
# curved in slices of 16 scanlines.
yo, yi = hl.Var("yo"), hl.Var("yi")
self.curved.split(y, yo, yi, 16) \
.parallel(yo)
# Compute sharpen as needed per scanline of curved, reusing
# previous values computed within the same strip of 16
# scanlines.
self.sharpen.store_at(self.curved, yo) \
.compute_at(self.curved, yi)
# Vectorize the sharpen. It's 16-bit so we'll vectorize it 8-wide.
self.sharpen.vectorize(x, 8)
# Compute the padded input at the same granularity as the
# sharpen. We'll leave the hl.cast to 16-bit inlined into
# sharpen.
self.padded.store_at(self.curved, yo) \
.compute_at(self.curved, yi)
# Also vectorize the padding. It's 8-bit, so we'll vectorize
# 16-wide.
self.padded.vectorize(x, 16)
# JIT-compile the pipeline for the CPU.
self.curved.compile_jit()
return
# Now a schedule that uses CUDA or OpenCL.
def schedule_for_gpu(self):
# We make the decision about whether to use the GPU for each
# hl.Func independently. If you have one hl.Func computed on the
# CPU, and the next computed on the GPU, Halide will do the
# copy-to-gpu under the hood. For this pipeline, there's no
# reason to use the CPU for any of the stages. Halide will
# copy the input image to the GPU the first time we run the
# pipeline, and leave it there to reuse on subsequent runs.
# As before, we'll compute the LUT once at the start of the
# pipeline.
self.lut.compute_root()
# Let's compute the look-up-table using the GPU in 16-wide
# one-dimensional thread blocks. First we split the index
# into blocks of size 16:
block, thread = hl.Var("block"), hl.Var("thread")
self.lut.split(i, block, thread, 16)
# Then we tell cuda that our Vars 'block' and 'thread'
# correspond to CUDA's notions of blocks and threads, or
# OpenCL's notions of thread groups and threads.
self.lut.gpu_blocks(block) \
.gpu_threads(thread)
# This is a very common scheduling pattern on the GPU, so
# there's a shorthand for it:
# lut.gpu_tile(i, ii, 16)
# hl.Func::gpu_tile method is similar to hl.Func::tile, except that
# it also specifies that the tile coordinates correspond to
# GPU blocks, and the coordinates within each tile correspond
# to GPU threads.
# Compute color channels innermost. Promise that there will
# be three of them and unroll across them.
self.curved.reorder(c, x, y) \
.bound(c, 0, 3) \
.unroll(c)
# Compute curved in 2D 8x8 tiles using the GPU.
self.curved.gpu_tile(x, y, xi, yi, 8, 8)
# This is equivalent to:
# curved.tile(x, y, xo, yo, xi, yi, 8, 8)
# .gpu_blocks(xo, yo)
# .gpu_threads(xi, yi)
# We'll leave sharpen as inlined into curved.
# Compute the padded input as needed per GPU block, storing the
# intermediate result in shared memory. hl.Var::gpu_blocks, and
# hl.Var::gpu_threads exist to help you schedule producers within
# GPU threads and blocks.
self.padded.compute_at(self.curved, x)
# Use the GPU threads for the x and y coordinates of the
# padded input.
self.padded.gpu_threads(x, y)
# JIT-compile the pipeline for the GPU. CUDA or OpenCL are
# not enabled by default. We have to construct a hl.Target
# object, enable one of them, and then pass that target
# object to compile_jit. Otherwise your CPU will very slowly
# pretend it's a GPU, and use one thread per output pixel.
# Start with a target suitable for the machine you're running
# this on.
target = hl.get_host_target()
# Then enable OpenCL or CUDA.
#use_opencl = False
use_opencl = True
if use_opencl:
# We'll enable OpenCL here, because it tends to give better
# performance than CUDA, even with NVidia's drivers, because
# NVidia's open source LLVM backend doesn't seem to do all
# the same optimizations their proprietary compiler does.
target.set_feature(hl.TargetFeature.OpenCL)
print("(Using OpenCL)")
else:
# Uncomment the next line and comment out the line above to
# try CUDA instead.
target.set_feature(hl.TargetFeature.CUDA)
print("(Using CUDA)")
# If you want to see all of the OpenCL or CUDA API calls done
# by the pipeline, you can also enable the Debug
# flag. This is helpful for figuring out which stages are
# slow, or when CPU -> GPU copies happen. It hurts
# performance though, so we'll leave it commented out.
# target.set_feature(hl.TargetFeature.Debug)
self.curved.compile_jit(target)
def test_performance(self):
# Test the performance of the scheduled MyPipeline.
output = hl.Buffer(hl.UInt(8),
self.input.width(),
self.input.height(),
self.input.channels())
# Run the filter once to initialize any GPU runtime state.
self.curved.realize(output)
# Now take the best of 3 runs for timing.
best_time = float("inf")
for i in range(3):
t1 = datetime.now()
# Run the filter 100 times.
for j in range(100):
self.curved.realize(output)
# Force any GPU code to finish by copying the buffer back to the CPU.
output.copy_to_host()
t2 = datetime.now()
elapsed = (t2 - t1).total_seconds()
if elapsed < best_time:
best_time = elapsed
# end of "best of three times"
print("%1.4f milliseconds" % (best_time * 1000))
def test_correctness(self, reference_output):
assert type(reference_output) == hl.Buffer_uint8
output = self.curved.realize(self.input.width(),
self.input.height(),
self.input.channels())
assert type(output) == hl.Buffer_uint8
# Check against the reference output.
for c in range(self.input.channels()):
for y in range(self.input.height()):
for x in range(self.input.width()):
if output(x, y, c) != reference_output(x, y, c):
print(
"Mismatch between output (%d) and "
"reference output (%d) at %d, %d, %d" % (
output(x, y, c),
reference_output(x, y, c),
x, y, c))
return
print("CPU and GPU outputs are consistent.")
def main():
# Load an input image.
image_path = os.path.join(os.path.dirname(__file__), "../../tutorial/images/rgb.png")
input_data = imread(image_path)
input = hl.Buffer(input_data)
# Allocated an image that will store the correct output
reference_output = hl.Buffer(hl.UInt(8), input.width(), input.height(), input.channels())
print("Testing performance on CPU:")
p1 = MyPipeline(input)
p1.schedule_for_cpu()
p1.test_performance()
p1.curved.realize(reference_output)
if have_opencl():
print("Testing performance on GPU:")
p2 = MyPipeline(input)
p2.schedule_for_gpu()
p2.test_performance()
p2.test_correctness(reference_output)
else:
print("Not testing performance on GPU, "
"because I can't find the opencl library")
return 0
def have_opencl():
"""
A helper function to check if OpenCL seems to exist on this machine.
:return: bool
"""
import ctypes
import platform
try:
if platform.system() == "Windows":
ret = ctypes.windll.LoadLibrary("OpenCL.dll") != None
elif platform.system() == "Darwin": # apple
ret = ctypes.cdll.LoadLibrary("/System/Library/Frameworks/OpenCL.framework/Versions/Current/OpenCL") != None
elif platform.system() == "Linux":
ret = ctypes.cdll.LoadLibrary("libOpenCL.so") != None
else:
raise Exception("Cannot check for opencl presence "
"on unknown system '%s'" % platform.system())
except OSError:
ret = False
return ret
if __name__ == "__main__":
main()
| |
import math as python_lib_Math
import math as Math
import inspect as python_lib_Inspect
class Enum:
_hx_class_name = "Enum"
_hx_fields = ["tag", "index", "params"]
_hx_methods = ["__str__"]
def __init__(self,tag,index,params):
self.tag = None
self.index = None
self.params = None
self.tag = tag
self.index = index
self.params = params
def __str__(self):
if (self.params is None):
return self.tag
else:
return (((HxOverrides.stringOrNull(self.tag) + "(") + HxOverrides.stringOrNull(",".join([python_Boot.toString1(x1,'') for x1 in self.params]))) + ")")
class Main:
_hx_class_name = "Main"
_hx_statics = ["main"]
@staticmethod
def main():
ten = "10"
eight = 8
str_eight = Std.string(eight)
int_ten = Std.parseInt(ten)
class Std:
_hx_class_name = "Std"
_hx_statics = ["string", "parseInt", "shortenPossibleNumber", "parseFloat"]
@staticmethod
def string(s):
return python_Boot.toString1(s,"")
@staticmethod
def parseInt(x):
if (x is None):
return None
try:
return int(x)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
try:
prefix = None
_this = HxString.substr(x,0,2)
prefix = _this.lower()
if (prefix == "0x"):
return int(x,16)
raise _HxException("fail")
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e1 = _hx_e1
r = None
x1 = Std.parseFloat(x)
try:
r = int(x1)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e2 = _hx_e1
r = None
if (r is None):
r1 = Std.shortenPossibleNumber(x)
if (r1 != x):
return Std.parseInt(r1)
else:
return None
return r
@staticmethod
def shortenPossibleNumber(x):
r = ""
_g1 = 0
_g = len(x)
while (_g1 < _g):
i = _g1
_g1 = (_g1 + 1)
c = None
if ((i < 0) or ((i >= len(x)))):
c = ""
else:
c = x[i]
_g2 = HxString.charCodeAt(c,0)
if (_g2 is not None):
if (((((((((((_g2 == 46) or ((_g2 == 57))) or ((_g2 == 56))) or ((_g2 == 55))) or ((_g2 == 54))) or ((_g2 == 53))) or ((_g2 == 52))) or ((_g2 == 51))) or ((_g2 == 50))) or ((_g2 == 49))) or ((_g2 == 48))):
r = (("null" if r is None else r) + ("null" if c is None else c))
else:
break
else:
break
return r
@staticmethod
def parseFloat(x):
try:
return float(x)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
if (x is not None):
r1 = Std.shortenPossibleNumber(x)
if (r1 != x):
return Std.parseFloat(r1)
return Math.NaN
class python_Boot:
_hx_class_name = "python.Boot"
_hx_statics = ["keywords", "toString1", "fields", "simpleField", "getInstanceFields", "getSuperClass", "getClassFields", "prefixLength", "unhandleKeywords"]
@staticmethod
def toString1(o,s):
if (o is None):
return "null"
if isinstance(o,str):
return o
if (s is None):
s = ""
if (len(s) >= 5):
return "<...>"
if isinstance(o,bool):
if o:
return "true"
else:
return "false"
if isinstance(o,int):
return str(o)
if isinstance(o,float):
try:
if (o == int(o)):
def _hx_local_1():
def _hx_local_0():
v = o
return Math.floor((v + 0.5))
return str(_hx_local_0())
return _hx_local_1()
else:
return str(o)
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e = _hx_e1
return str(o)
if isinstance(o,list):
o1 = o
l = len(o1)
st = "["
s = (("null" if s is None else s) + "\t")
_g = 0
while (_g < l):
i = _g
_g = (_g + 1)
prefix = ""
if (i > 0):
prefix = ","
st = (("null" if st is None else st) + HxOverrides.stringOrNull(((("null" if prefix is None else prefix) + HxOverrides.stringOrNull(python_Boot.toString1((o1[i] if i >= 0 and i < len(o1) else None),s))))))
st = (("null" if st is None else st) + "]")
return st
try:
if hasattr(o,"toString"):
return o.toString()
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
if (python_lib_Inspect.isfunction(o) or python_lib_Inspect.ismethod(o)):
return "<function>"
if hasattr(o,"__class__"):
if isinstance(o,_hx_AnonObject):
toStr = None
try:
fields = python_Boot.fields(o)
fieldsStr = None
_g1 = []
_g11 = 0
while (_g11 < len(fields)):
f = (fields[_g11] if _g11 >= 0 and _g11 < len(fields) else None)
_g11 = (_g11 + 1)
x = ((("" + ("null" if f is None else f)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f),(("null" if s is None else s) + "\t"))))
_g1.append(x)
fieldsStr = _g1
toStr = (("{ " + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr]))) + " }")
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
e2 = _hx_e1
return "{ ... }"
if (toStr is None):
return "{ ... }"
else:
return toStr
if isinstance(o,Enum):
o2 = o
l1 = len(o2.params)
hasParams = (l1 > 0)
if hasParams:
paramsStr = ""
_g2 = 0
while (_g2 < l1):
i1 = _g2
_g2 = (_g2 + 1)
prefix1 = ""
if (i1 > 0):
prefix1 = ","
paramsStr = (("null" if paramsStr is None else paramsStr) + HxOverrides.stringOrNull(((("null" if prefix1 is None else prefix1) + HxOverrides.stringOrNull(python_Boot.toString1((o2.params[i1] if i1 >= 0 and i1 < len(o2.params) else None),s))))))
return (((HxOverrides.stringOrNull(o2.tag) + "(") + ("null" if paramsStr is None else paramsStr)) + ")")
else:
return o2.tag
if hasattr(o,"_hx_class_name"):
if (o.__class__.__name__ != "type"):
fields1 = python_Boot.getInstanceFields(o)
fieldsStr1 = None
_g3 = []
_g12 = 0
while (_g12 < len(fields1)):
f1 = (fields1[_g12] if _g12 >= 0 and _g12 < len(fields1) else None)
_g12 = (_g12 + 1)
x1 = ((("" + ("null" if f1 is None else f1)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f1),(("null" if s is None else s) + "\t"))))
_g3.append(x1)
fieldsStr1 = _g3
toStr1 = (((HxOverrides.stringOrNull(o._hx_class_name) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr1]))) + " )")
return toStr1
else:
fields2 = python_Boot.getClassFields(o)
fieldsStr2 = None
_g4 = []
_g13 = 0
while (_g13 < len(fields2)):
f2 = (fields2[_g13] if _g13 >= 0 and _g13 < len(fields2) else None)
_g13 = (_g13 + 1)
x2 = ((("" + ("null" if f2 is None else f2)) + " : ") + HxOverrides.stringOrNull(python_Boot.toString1(python_Boot.simpleField(o,f2),(("null" if s is None else s) + "\t"))))
_g4.append(x2)
fieldsStr2 = _g4
toStr2 = (((("#" + HxOverrides.stringOrNull(o._hx_class_name)) + "( ") + HxOverrides.stringOrNull(", ".join([x1 for x1 in fieldsStr2]))) + " )")
return toStr2
if (o == str):
return "#String"
if (o == list):
return "#Array"
if callable(o):
return "function"
try:
if hasattr(o,"__repr__"):
return o.__repr__()
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
if hasattr(o,"__str__"):
return o.__str__([])
if hasattr(o,"__name__"):
return o.__name__
return "???"
else:
return str(o)
@staticmethod
def fields(o):
a = []
if (o is not None):
if hasattr(o,"_hx_fields"):
fields = o._hx_fields
return list(fields)
if isinstance(o,_hx_AnonObject):
d = o.__dict__
keys = d.keys()
handler = python_Boot.unhandleKeywords
for k in keys:
a.append(handler(k))
elif hasattr(o,"__dict__"):
a1 = []
d1 = o.__dict__
keys1 = d1.keys()
for k in keys1:
a.append(k)
return a
@staticmethod
def simpleField(o,field):
if (field is None):
return None
field1 = None
if field in python_Boot.keywords:
field1 = ("_hx_" + field)
elif ((((len(field) > 2) and ((ord(field[0]) == 95))) and ((ord(field[1]) == 95))) and ((ord(field[(len(field) - 1)]) != 95))):
field1 = ("_hx_" + field)
else:
field1 = field
if hasattr(o,field1):
return getattr(o,field1)
else:
return None
@staticmethod
def getInstanceFields(c):
f = None
if hasattr(c,"_hx_fields"):
f = c._hx_fields
else:
f = []
if hasattr(c,"_hx_methods"):
a = c._hx_methods
f = (f + a)
sc = python_Boot.getSuperClass(c)
if (sc is None):
return f
else:
scArr = python_Boot.getInstanceFields(sc)
scMap = set(scArr)
res = []
_g = 0
while (_g < len(f)):
f1 = (f[_g] if _g >= 0 and _g < len(f) else None)
_g = (_g + 1)
if (not f1 in scMap):
scArr.append(f1)
return scArr
@staticmethod
def getSuperClass(c):
if (c is None):
return None
try:
if hasattr(c,"_hx_super"):
return c._hx_super
return None
except Exception as _hx_e:
_hx_e1 = _hx_e.val if isinstance(_hx_e, _HxException) else _hx_e
pass
return None
@staticmethod
def getClassFields(c):
if hasattr(c,"_hx_statics"):
x = c._hx_statics
return list(x)
else:
return []
@staticmethod
def unhandleKeywords(name):
if (HxString.substr(name,0,python_Boot.prefixLength) == "_hx_"):
real = HxString.substr(name,python_Boot.prefixLength,None)
if real in python_Boot.keywords:
return real
return name
class _hx_AnonObject:
_hx_class_name = "_hx_AnonObject"
class python_internal_ArrayImpl:
_hx_class_name = "python.internal.ArrayImpl"
_hx_statics = ["_get"]
@staticmethod
def _get(x,idx):
if ((idx > -1) and ((idx < len(x)))):
return x[idx]
else:
return None
class _HxException(Exception):
_hx_class_name = "_HxException"
_hx_fields = ["val"]
_hx_methods = []
_hx_statics = []
_hx_super = Exception
def __init__(self,val):
self.val = None
message = str(val)
super().__init__(message)
self.val = val
class HxOverrides:
_hx_class_name = "HxOverrides"
_hx_statics = ["eq", "stringOrNull"]
@staticmethod
def eq(a,b):
if (isinstance(a,list) or isinstance(b,list)):
return a is b
return (a == b)
@staticmethod
def stringOrNull(s):
if (s is None):
return "null"
else:
return s
class HxString:
_hx_class_name = "HxString"
_hx_statics = ["charCodeAt", "substr"]
@staticmethod
def charCodeAt(s,index):
if ((((s is None) or ((len(s) == 0))) or ((index < 0))) or ((index >= len(s)))):
return None
else:
return ord(s[index])
@staticmethod
def substr(s,startIndex,_hx_len = None):
if (_hx_len is None):
return s[startIndex:]
else:
if (_hx_len == 0):
return ""
return s[startIndex:(startIndex + _hx_len)]
Math.NEGATIVE_INFINITY = float("-inf")
Math.POSITIVE_INFINITY = float("inf")
Math.NaN = float("nan")
Math.PI = python_lib_Math.pi
python_Boot.keywords = set(["and", "del", "from", "not", "with", "as", "elif", "global", "or", "yield", "assert", "else", "if", "pass", "None", "break", "except", "import", "raise", "True", "class", "exec", "in", "return", "False", "continue", "finally", "is", "try", "def", "for", "lambda", "while"])
python_Boot.prefixLength = len("_hx_")
Main.main()
| |
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class NewApplication(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'name': 'str',
'description': 'str',
'timezone': 'str',
'currency': 'str',
'case_sensitivity': 'str',
'attributes': 'object',
'limits': 'list[LimitConfig]',
'campaign_priority': 'str',
'exclusive_campaigns_strategy': 'str',
'default_discount_scope': 'str',
'enable_cascading_discounts': 'bool',
'enable_flattened_cart_items': 'bool',
'attributes_settings': 'AttributesSettings',
'sandbox': 'bool',
'key': 'str'
}
attribute_map = {
'name': 'name',
'description': 'description',
'timezone': 'timezone',
'currency': 'currency',
'case_sensitivity': 'caseSensitivity',
'attributes': 'attributes',
'limits': 'limits',
'campaign_priority': 'campaignPriority',
'exclusive_campaigns_strategy': 'exclusiveCampaignsStrategy',
'default_discount_scope': 'defaultDiscountScope',
'enable_cascading_discounts': 'enableCascadingDiscounts',
'enable_flattened_cart_items': 'enableFlattenedCartItems',
'attributes_settings': 'attributesSettings',
'sandbox': 'sandbox',
'key': 'key'
}
def __init__(self, name=None, description=None, timezone=None, currency=None, case_sensitivity=None, attributes=None, limits=None, campaign_priority=None, exclusive_campaigns_strategy=None, default_discount_scope=None, enable_cascading_discounts=None, enable_flattened_cart_items=None, attributes_settings=None, sandbox=None, key=None, local_vars_configuration=None): # noqa: E501
"""NewApplication - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self._description = None
self._timezone = None
self._currency = None
self._case_sensitivity = None
self._attributes = None
self._limits = None
self._campaign_priority = None
self._exclusive_campaigns_strategy = None
self._default_discount_scope = None
self._enable_cascading_discounts = None
self._enable_flattened_cart_items = None
self._attributes_settings = None
self._sandbox = None
self._key = None
self.discriminator = None
self.name = name
if description is not None:
self.description = description
self.timezone = timezone
self.currency = currency
if case_sensitivity is not None:
self.case_sensitivity = case_sensitivity
if attributes is not None:
self.attributes = attributes
if limits is not None:
self.limits = limits
if campaign_priority is not None:
self.campaign_priority = campaign_priority
if exclusive_campaigns_strategy is not None:
self.exclusive_campaigns_strategy = exclusive_campaigns_strategy
if default_discount_scope is not None:
self.default_discount_scope = default_discount_scope
if enable_cascading_discounts is not None:
self.enable_cascading_discounts = enable_cascading_discounts
if enable_flattened_cart_items is not None:
self.enable_flattened_cart_items = enable_flattened_cart_items
if attributes_settings is not None:
self.attributes_settings = attributes_settings
if sandbox is not None:
self.sandbox = sandbox
if key is not None:
self.key = key
@property
def name(self):
"""Gets the name of this NewApplication. # noqa: E501
The name of this application. # noqa: E501
:return: The name of this NewApplication. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this NewApplication.
The name of this application. # noqa: E501
:param name: The name of this NewApplication. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
name is not None and len(name) < 1):
raise ValueError("Invalid value for `name`, length must be greater than or equal to `1`") # noqa: E501
self._name = name
@property
def description(self):
"""Gets the description of this NewApplication. # noqa: E501
A longer description of the application. # noqa: E501
:return: The description of this NewApplication. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this NewApplication.
A longer description of the application. # noqa: E501
:param description: The description of this NewApplication. # noqa: E501
:type: str
"""
self._description = description
@property
def timezone(self):
"""Gets the timezone of this NewApplication. # noqa: E501
A string containing an IANA timezone descriptor. # noqa: E501
:return: The timezone of this NewApplication. # noqa: E501
:rtype: str
"""
return self._timezone
@timezone.setter
def timezone(self, timezone):
"""Sets the timezone of this NewApplication.
A string containing an IANA timezone descriptor. # noqa: E501
:param timezone: The timezone of this NewApplication. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and timezone is None: # noqa: E501
raise ValueError("Invalid value for `timezone`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
timezone is not None and len(timezone) < 1):
raise ValueError("Invalid value for `timezone`, length must be greater than or equal to `1`") # noqa: E501
self._timezone = timezone
@property
def currency(self):
"""Gets the currency of this NewApplication. # noqa: E501
A string describing a default currency for new customer sessions. # noqa: E501
:return: The currency of this NewApplication. # noqa: E501
:rtype: str
"""
return self._currency
@currency.setter
def currency(self, currency):
"""Sets the currency of this NewApplication.
A string describing a default currency for new customer sessions. # noqa: E501
:param currency: The currency of this NewApplication. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and currency is None: # noqa: E501
raise ValueError("Invalid value for `currency`, must not be `None`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
currency is not None and len(currency) < 1):
raise ValueError("Invalid value for `currency`, length must be greater than or equal to `1`") # noqa: E501
self._currency = currency
@property
def case_sensitivity(self):
"""Gets the case_sensitivity of this NewApplication. # noqa: E501
A string indicating how should campaigns in this application deal with case sensitivity on coupon codes. # noqa: E501
:return: The case_sensitivity of this NewApplication. # noqa: E501
:rtype: str
"""
return self._case_sensitivity
@case_sensitivity.setter
def case_sensitivity(self, case_sensitivity):
"""Sets the case_sensitivity of this NewApplication.
A string indicating how should campaigns in this application deal with case sensitivity on coupon codes. # noqa: E501
:param case_sensitivity: The case_sensitivity of this NewApplication. # noqa: E501
:type: str
"""
allowed_values = ["sensitive", "insensitive-uppercase", "insensitive-lowercase"] # noqa: E501
if self.local_vars_configuration.client_side_validation and case_sensitivity not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `case_sensitivity` ({0}), must be one of {1}" # noqa: E501
.format(case_sensitivity, allowed_values)
)
self._case_sensitivity = case_sensitivity
@property
def attributes(self):
"""Gets the attributes of this NewApplication. # noqa: E501
Arbitrary properties associated with this campaign # noqa: E501
:return: The attributes of this NewApplication. # noqa: E501
:rtype: object
"""
return self._attributes
@attributes.setter
def attributes(self, attributes):
"""Sets the attributes of this NewApplication.
Arbitrary properties associated with this campaign # noqa: E501
:param attributes: The attributes of this NewApplication. # noqa: E501
:type: object
"""
self._attributes = attributes
@property
def limits(self):
"""Gets the limits of this NewApplication. # noqa: E501
Default limits for campaigns created in this application # noqa: E501
:return: The limits of this NewApplication. # noqa: E501
:rtype: list[LimitConfig]
"""
return self._limits
@limits.setter
def limits(self, limits):
"""Sets the limits of this NewApplication.
Default limits for campaigns created in this application # noqa: E501
:param limits: The limits of this NewApplication. # noqa: E501
:type: list[LimitConfig]
"""
self._limits = limits
@property
def campaign_priority(self):
"""Gets the campaign_priority of this NewApplication. # noqa: E501
Default priority for campaigns created in this application, can be one of (universal, stackable, exclusive). If no value is provided, this is set to \"universal\" # noqa: E501
:return: The campaign_priority of this NewApplication. # noqa: E501
:rtype: str
"""
return self._campaign_priority
@campaign_priority.setter
def campaign_priority(self, campaign_priority):
"""Sets the campaign_priority of this NewApplication.
Default priority for campaigns created in this application, can be one of (universal, stackable, exclusive). If no value is provided, this is set to \"universal\" # noqa: E501
:param campaign_priority: The campaign_priority of this NewApplication. # noqa: E501
:type: str
"""
allowed_values = ["universal", "stackable", "exclusive"] # noqa: E501
if self.local_vars_configuration.client_side_validation and campaign_priority not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `campaign_priority` ({0}), must be one of {1}" # noqa: E501
.format(campaign_priority, allowed_values)
)
self._campaign_priority = campaign_priority
@property
def exclusive_campaigns_strategy(self):
"""Gets the exclusive_campaigns_strategy of this NewApplication. # noqa: E501
The strategy used when choosing exclusive campaigns for evaluation, can be one of (listOrder, lowestDiscount, highestDiscount). If no value is provided, this is set to \"listOrder\" # noqa: E501
:return: The exclusive_campaigns_strategy of this NewApplication. # noqa: E501
:rtype: str
"""
return self._exclusive_campaigns_strategy
@exclusive_campaigns_strategy.setter
def exclusive_campaigns_strategy(self, exclusive_campaigns_strategy):
"""Sets the exclusive_campaigns_strategy of this NewApplication.
The strategy used when choosing exclusive campaigns for evaluation, can be one of (listOrder, lowestDiscount, highestDiscount). If no value is provided, this is set to \"listOrder\" # noqa: E501
:param exclusive_campaigns_strategy: The exclusive_campaigns_strategy of this NewApplication. # noqa: E501
:type: str
"""
allowed_values = ["listOrder", "lowestDiscount", "highestDiscount"] # noqa: E501
if self.local_vars_configuration.client_side_validation and exclusive_campaigns_strategy not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `exclusive_campaigns_strategy` ({0}), must be one of {1}" # noqa: E501
.format(exclusive_campaigns_strategy, allowed_values)
)
self._exclusive_campaigns_strategy = exclusive_campaigns_strategy
@property
def default_discount_scope(self):
"""Gets the default_discount_scope of this NewApplication. # noqa: E501
The default scope to apply \"setDiscount\" effects on if no scope was provided with the effect. # noqa: E501
:return: The default_discount_scope of this NewApplication. # noqa: E501
:rtype: str
"""
return self._default_discount_scope
@default_discount_scope.setter
def default_discount_scope(self, default_discount_scope):
"""Sets the default_discount_scope of this NewApplication.
The default scope to apply \"setDiscount\" effects on if no scope was provided with the effect. # noqa: E501
:param default_discount_scope: The default_discount_scope of this NewApplication. # noqa: E501
:type: str
"""
allowed_values = ["sessionTotal", "cartItems", "additionalCosts"] # noqa: E501
if self.local_vars_configuration.client_side_validation and default_discount_scope not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `default_discount_scope` ({0}), must be one of {1}" # noqa: E501
.format(default_discount_scope, allowed_values)
)
self._default_discount_scope = default_discount_scope
@property
def enable_cascading_discounts(self):
"""Gets the enable_cascading_discounts of this NewApplication. # noqa: E501
Flag indicating if discounts should cascade for this application # noqa: E501
:return: The enable_cascading_discounts of this NewApplication. # noqa: E501
:rtype: bool
"""
return self._enable_cascading_discounts
@enable_cascading_discounts.setter
def enable_cascading_discounts(self, enable_cascading_discounts):
"""Sets the enable_cascading_discounts of this NewApplication.
Flag indicating if discounts should cascade for this application # noqa: E501
:param enable_cascading_discounts: The enable_cascading_discounts of this NewApplication. # noqa: E501
:type: bool
"""
self._enable_cascading_discounts = enable_cascading_discounts
@property
def enable_flattened_cart_items(self):
"""Gets the enable_flattened_cart_items of this NewApplication. # noqa: E501
Flag indicating if cart items of quantity larger than one should be separated into different items of quantity one # noqa: E501
:return: The enable_flattened_cart_items of this NewApplication. # noqa: E501
:rtype: bool
"""
return self._enable_flattened_cart_items
@enable_flattened_cart_items.setter
def enable_flattened_cart_items(self, enable_flattened_cart_items):
"""Sets the enable_flattened_cart_items of this NewApplication.
Flag indicating if cart items of quantity larger than one should be separated into different items of quantity one # noqa: E501
:param enable_flattened_cart_items: The enable_flattened_cart_items of this NewApplication. # noqa: E501
:type: bool
"""
self._enable_flattened_cart_items = enable_flattened_cart_items
@property
def attributes_settings(self):
"""Gets the attributes_settings of this NewApplication. # noqa: E501
:return: The attributes_settings of this NewApplication. # noqa: E501
:rtype: AttributesSettings
"""
return self._attributes_settings
@attributes_settings.setter
def attributes_settings(self, attributes_settings):
"""Sets the attributes_settings of this NewApplication.
:param attributes_settings: The attributes_settings of this NewApplication. # noqa: E501
:type: AttributesSettings
"""
self._attributes_settings = attributes_settings
@property
def sandbox(self):
"""Gets the sandbox of this NewApplication. # noqa: E501
Flag indicating if this is a live or sandbox application # noqa: E501
:return: The sandbox of this NewApplication. # noqa: E501
:rtype: bool
"""
return self._sandbox
@sandbox.setter
def sandbox(self, sandbox):
"""Sets the sandbox of this NewApplication.
Flag indicating if this is a live or sandbox application # noqa: E501
:param sandbox: The sandbox of this NewApplication. # noqa: E501
:type: bool
"""
self._sandbox = sandbox
@property
def key(self):
"""Gets the key of this NewApplication. # noqa: E501
Hex key for HMAC-signing API calls as coming from this application (16 hex digits) # noqa: E501
:return: The key of this NewApplication. # noqa: E501
:rtype: str
"""
return self._key
@key.setter
def key(self, key):
"""Sets the key of this NewApplication.
Hex key for HMAC-signing API calls as coming from this application (16 hex digits) # noqa: E501
:param key: The key of this NewApplication. # noqa: E501
:type: str
"""
if (self.local_vars_configuration.client_side_validation and
key is not None and not re.search(r'^[a-fA-F0-9]{16}$', key)): # noqa: E501
raise ValueError(r"Invalid value for `key`, must be a follow pattern or equal to `/^[a-fA-F0-9]{16}$/`") # noqa: E501
self._key = key
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NewApplication):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, NewApplication):
return True
return self.to_dict() != other.to_dict()
| |
#! /usr/bin/env python
import sys
from scapy.all import * # noqa
import json
from daemon import Daemon
from datapoller import DataPoller
from filters import get_scapy_filter_from_querybuilder_rules
from utils import get_byte_size
from messages import (
Beacon, BEACON_TYPES, message_test_data, decode, MESSAGE_FORMATS)
import api
from lpexceptions import MalformedBeacon
from message_responses.responses import get_response_by_name
class BeaconFilterList(list):
def __init__(self, *args, **kwargs):
super(BeaconFilterList, self).__init__(*args)
self.on_add = kwargs.pop('on_add', None)
self.on_remove = kwargs.pop('on_remove', None)
def beacon_update(self, list2):
if self == list2:
# Beacon Filter queue is up-to-date
return
for filter in list2:
if filter not in self:
# Execute callback method for New or Updated Beacon Filter
self.on_add(filter)
for filter in self:
if filter not in list2:
# Execute callback method for Removal of a Beacon Filter
self.on_remove(filter)
class ImplantTaskQueue(dict):
def __init__(self, *args, **kwargs):
super(ImplantTaskQueue, self).__init__(*args, **kwargs)
def __delitem__(self, key):
# Remove task from controller
super(ImplantTaskQueue, self).__delitem__(key)
def add_task(self, implant_uuid, task):
if implant_uuid not in self.keys():
self[implant_uuid] = list()
if task in self[implant_uuid]:
return
print "Add task: %s ==> %s" % (implant_uuid, task)
self[implant_uuid].append(task)
def remove_task(self, implant_uuid, task):
if task in self[implant_uuid]:
self[implant_uuid].remove(task)
if len(self[implant_uuid]) < 1:
del self[implant_uuid]
api.remove_task(implant_uuid, task['id'])
def get_next_task(self, implant_uuid):
if implant_uuid in self.keys():
return self[implant_uuid][0]
return None
class LP(Daemon):
"""Listening Post (LP) service for receiving and processing Command &
Control Beacons from implants.
This LP allows the user to register custom handlers for different packet
types.
For example, the user could have a DNS beacon handler that processes
only DNS packets.
Attributes:
verbose: Enables verbose logging
handlers: Stores registered handlers
"""
def __init__(self, *args, **kwargs):
"""Constructor
"""
super(LP, self).__init__(*args, **kwargs)
self.verbose = False
self.filters = {}
self.beacon_filters = BeaconFilterList(
on_add=self._new_beacon_filter_callback,
on_remove=self._remove_beacon_filter_callback)
self.task_queue = ImplantTaskQueue()
def _pkt_handler(self, pkt):
"""Process a packet
:type pkt: scapy.Packet
:param pkt: Packet to be processed
"""
for lambda_filter, beacon_filter in self.filters.iteritems():
packet = filter(lambda_filter, pkt)
if packet:
# Packet matches a registered filter
self._log("Packet matches filter: %s" % lambda_filter)
for beacon_data in beacon_filter:
data_map_list = json.loads(
beacon_data['beacon_data_mapping'])
try:
beacon = self.extract_beacon_from_packet(
packet[0], data_map_list)
except:
self._log("Error trying to extract beacon")
return
self._log("Received beacon: %s" % beacon)
# Process any queued tasking for this implant
task = self.task_queue.get_next_task(beacon.uuid)
if task:
self.task_queue.remove_task(beacon.uuid, task)
self.send_implant_task(
pkt,
beacon_data['response_data_type'],
json.loads(beacon_data['response_data_mapping']),
task)
self.send_beacon_to_controller(beacon)
def send_implant_task(
self, pkt, response_data_type, response_data_mapping, task):
"""Send tasking to an Implant by responding to a Beacon"""
self._log("Sending task to implant: %s" % task)
response_factory = get_response_by_name(response_data_type)()
response = response_factory.create_response(pkt)
response_factory.add_response_data(
response, task, response_data_mapping, format=MESSAGE_FORMATS['xor'])
# Send the response packet
send(response)
def send_beacon_to_controller(self, beacon):
api.send_beacon(beacon)
def _parse_data_map(self, data_map_list):
data_mapping_dict = {}
# Figure out which beacon fields are packed into
# the same packet field
for packet_field, beacon_field in data_map_list:
if packet_field in data_mapping_dict:
data_mapping_dict[packet_field].append(beacon_field)
else:
data_mapping_dict[packet_field] = [beacon_field]
return data_mapping_dict
def extract_beacon_from_packet(self, packet, data_map_list):
"""
packet: scapy Packet object
list_of_beacon_data_mappings: list of data mappings
"""
beacon = Beacon()
beacon.external_ip_address = packet['IP'].src
mapped_data = self._parse_data_map(data_map_list).iteritems()
for packet_field, beacon_fields in mapped_data:
field_protocol, field_subfield = packet_field.split(".")
# Use scapy to extract the data from the packet field
layer = packet.getlayer(eval(field_protocol))
packet_field_value = layer.getfieldval(field_subfield)
"""
packet_field_value = eval(
"packet['%s'].%s" % (field_protocol, field_subfield))
"""
if len(beacon_fields) > 1:
# More than one beacon field within same packet field
offset = 0
for beacon_field in beacon_fields:
format = MESSAGE_FORMATS['plain']
data_size = get_byte_size(
message_test_data[beacon_field])
if 'format' in beacon.keys():
format = beacon.format
if beacon_field == 'data':
try:
data_size = beacon.data_length
except AttributeError:
# Normal if Beacon doesn't contain data
data_size = 0
if beacon_field == 'data_length' and not (
(beacon.type & 0xf) == BEACON_TYPES['data']):
beacon['%s' % beacon_field] = 0
continue
# decode all data except type
if beacon_field != type:
value = decode(packet_field_value[offset:offset+data_size], format)
else:
value = packet_field_value[offset:offset+data_size]
try:
#beacon['%s' % beacon_field] = packet_field_value[
# offset:offset+data_size]
beacon['%s' % beacon_field] = value
self._log("beacon[%s] => %s" % (beacon_field, packet_field_value[offset:offset+data_size].encode('hex')))
except MalformedBeacon:
self._log("Malformed Beacon:")
break
offset += data_size
else:
beacon['%s' % beacon_field] = packet_field_value
return beacon
def _new_beacon_filter_callback(self, beacon_filter):
"""Callback method called when a new filter is added to the queue"""
self._log("Adding Beacon Filter")
scapy_filter = eval(get_scapy_filter_from_querybuilder_rules(
beacon_filter['beacon_filter']))
self.register_filter(scapy_filter, beacon_filter)
self.beacon_filters.append(beacon_filter)
def _remove_beacon_filter_callback(self, beacon_filter):
"""Callback method called when a filter is removed from the queue"""
self._log("Remove Beacon from the list")
scapy_filter = eval(get_scapy_filter_from_querybuilder_rules(
beacon_filter['beacon_filter']))
self.unregister_filter(scapy_filter, beacon_filter)
self.beacon_filters.remove(beacon_filter)
def register_filter(self, scapy_filter, beacon_filter):
"""Add a new packet filter
"""
try:
if beacon_filter not in self.filters[scapy_filter]:
self.filters[scapy_filter].append(beacon_filter)
except KeyError:
self.filters[scapy_filter] = [beacon_filter]
self._log("Registered new filter: %s\n%s" % (
scapy_filter, beacon_filter))
def unregister_filter(self, scapy_filter, beacon_filter):
"""Remove a packet handler from the list of handlers"""
if len(self.filters[scapy_filter]) == 1:
del self.filters[scapy_filter]
else:
self.filters[scapy_filter].remove(beacon_filter)
def _log(self, msg, msg_type="Info"):
"""Private logger for messages"""
if self.verbose:
sys.stderr.write("%s\n" % str(msg))
# TODO: uncomment
#api.send_log(msg, msg_type)
def _start_sniff(self):
"""Start listening for incoming packets"""
self._log("Starting the packet sniffer")
sniff(prn=self._pkt_handler, store=0)
def start_data_poller(self):
self._log("Starting the data poller")
self.dp = DataPoller(
beacon_filters=self.beacon_filters,
logger=self._log, task_queue=self.task_queue)
self.dp.start()
def stop(self):
self.dp.stop()
super(LP, self).stop()
def run(self):
"""Run forever"""
self.start_data_poller()
self._start_sniff()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(prog='Lp')
parser.add_argument("-v", "--verbose", help="increase output verbosity",
action="store_true", default=False)
parser.add_argument("-d", "--daemon", help="run in background (daemonize)",
choices=['start', 'stop', 'restart'],
default=False)
args = parser.parse_args()
lp = LP('/var/run/lp.pid')
lp.verbose = args.verbose
# TCP Handler
def _handler_tcp(pkt):
print "Called tcp handler"
_filter_tcp = lambda p: TCP in p
#lp.register_handler(_filter_tcp, _handler_tcp)
# UDP Handler
def _handler_udp(pkt):
print "Called udp handler"
_filter_udp = lambda p: UDP in p
#lp.register_handler(_filter_udp, _handler_udp)
if args.daemon == 'start':
print "Starting"
lp.start()
elif args.daemon == 'stop':
print "Stopping"
lp.stop()
elif args.daemon == 'restart':
print "Restarting"
lp.restart()
else:
lp.run()
| |
from __future__ import unicode_literals
from future.utils import native_str
from django.contrib import admin
from django.contrib.auth import logout
from django.contrib.messages import error
from django.contrib.redirects.models import Redirect
from django.core.exceptions import MiddlewareNotUsed
from django.core.urlresolvers import reverse, resolve
from django.http import (HttpResponse, HttpResponseRedirect,
HttpResponsePermanentRedirect, HttpResponseGone)
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.template import Template, RequestContext
from django.utils.cache import get_max_age
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from zhiliao.conf import settings
from .models import SitePermission
from .management.commands.createdb import (DEFAULT_USERNAME,
DEFAULT_PASSWORD)
from zhiliao.utils.cache import (cache_key_prefix, nevercache_token,
cache_get, cache_set, cache_installed)
from zhiliao.utils.device import templates_for_device
from zhiliao.utils.sites import current_site_id, templates_for_host
from zhiliao.utils.urls import next_url
_deprecated = {
"AdminLoginInterfaceSelector": "AdminLoginInterfaceSelectorMiddleware",
"DeviceAwareUpdateCacheMiddleware": "UpdateCacheMiddleware",
"DeviceAwareFetchFromCacheMiddleware": "FetchFromCacheMiddleware",
}
class _Deprecated(object):
def __init__(self, *args, **kwargs):
from warnings import warn
msg = "zhiliao.core.middleware.%s is deprecated." % self.old
if self.new:
msg += (" Please change the MIDDLEWARE_CLASSES setting to use "
"zhiliao.core.middleware.%s" % self.new)
warn(msg)
for old, new in _deprecated.items():
globals()[old] = type(native_str(old),
(_Deprecated,),
{"old": old, "new": new})
class AdminLoginInterfaceSelectorMiddleware(object):
"""
Checks for a POST from the admin login view and if authentication is
successful and the "site" interface is selected, redirect to the site.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
login_type = request.POST.get("mezzanine_login_interface")
if login_type and not request.user.is_authenticated():
response = view_func(request, *view_args, **view_kwargs)
if request.user.is_authenticated():
if login_type == "admin":
next = next_url(request) or request.get_full_path()
username = request.user.get_username()
if (username == DEFAULT_USERNAME and
request.user.check_password(DEFAULT_PASSWORD)):
error(request, mark_safe(_(
"Your account is using the default password, "
"please <a href='%s'>change it</a> immediately.")
% reverse("user_change_password",
args=(request.user.id,))))
else:
next = next_url(request) or "/"
return HttpResponseRedirect(next)
else:
return response
return None
class SitePermissionMiddleware(object):
"""
Marks the current user with a ``has_site_permission`` which is
used in place of ``user.is_staff`` to achieve per-site staff
access.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
has_site_permission = False
if request.user.is_superuser:
has_site_permission = True
elif request.user.is_staff:
lookup = {"user": request.user, "sites": current_site_id()}
try:
SitePermission.objects.get(**lookup)
except SitePermission.DoesNotExist:
admin_index = reverse("admin:index")
if request.path.startswith(admin_index):
logout(request)
view_func = admin.site.login
extra_context = {"no_site_permission": True}
return view_func(request, extra_context=extra_context)
else:
has_site_permission = True
request.user.has_site_permission = has_site_permission
class TemplateForDeviceMiddleware(object):
"""
Inserts device-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
templates = templates_for_device(request,
response.template_name)
response.template_name = templates
return response
class TemplateForHostMiddleware(object):
"""
Inserts host-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
response.template_name = templates_for_host(
response.template_name)
return response
class UpdateCacheMiddleware(object):
"""
Response phase for Mezzanine's cache middleware. Handles caching
the response, and then performing the second phase of rendering,
for content enclosed by the ``nevercache`` tag.
"""
def process_response(self, request, response):
# Caching is only applicable for text-based, non-streaming
# responses. We also skip it for non-200 statuses during
# development, so that stack traces are correctly rendered.
is_text = response.get("content-type", "").startswith("text")
valid_status = response.status_code == 200
streaming = getattr(response, "streaming", False)
if not is_text or streaming or (settings.DEBUG and not valid_status):
return response
# Cache the response if all the required conditions are met.
# Response must be marked for updating by the
# ``FetchFromCacheMiddleware`` having a cache get miss, the
# user must not be authenticated, the HTTP status must be OK
# and the response mustn't include an expiry age, indicating it
# shouldn't be cached.
marked_for_update = getattr(request, "_update_cache", False)
anon = hasattr(request, "user") and not request.user.is_authenticated()
timeout = get_max_age(response)
if timeout is None:
timeout = settings.CACHE_MIDDLEWARE_SECONDS
if anon and valid_status and marked_for_update and timeout:
cache_key = cache_key_prefix(request) + request.get_full_path()
_cache_set = lambda r: cache_set(cache_key, r.content, timeout)
if callable(getattr(response, "render", None)):
response.add_post_render_callback(_cache_set)
else:
_cache_set(response)
# Second phase rendering for non-cached template code and
# content. Split on the delimiter the ``nevercache`` tag
# wrapped its contents in, and render only the content
# enclosed by it, to avoid possible template code injection.
token = nevercache_token()
try:
token = token.encode('utf-8')
except AttributeError:
pass
parts = response.content.split(token)
# Restore csrf token from cookie - check the response
# first as it may be being set for the first time.
csrf_token = None
try:
csrf_token = response.cookies[settings.CSRF_COOKIE_NAME].value
except KeyError:
try:
csrf_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
pass
if csrf_token:
request.META["CSRF_COOKIE"] = csrf_token
context = RequestContext(request)
for i, part in enumerate(parts):
if i % 2:
part = Template(part).render(context).encode("utf-8")
parts[i] = part
response.content = b"".join(parts)
response["Content-Length"] = len(response.content)
if hasattr(request, '_messages'):
# Required to clear out user messages.
request._messages.update(response)
return response
class FetchFromCacheMiddleware(object):
"""
Request phase for Mezzanine cache middleware. Return a response
from cache if found, othwerwise mark the request for updating
the cache in ``UpdateCacheMiddleware``.
"""
def process_request(self, request):
if (cache_installed() and request.method == "GET" and
not request.user.is_authenticated()):
cache_key = cache_key_prefix(request) + request.get_full_path()
response = cache_get(cache_key)
# We need to force a csrf token here, as new sessions
# won't receieve one on their first request, with cache
# middleware running.
csrf_mw_name = "django.middleware.csrf.CsrfViewMiddleware"
if csrf_mw_name in settings.MIDDLEWARE_CLASSES:
csrf_mw = CsrfViewMiddleware()
csrf_mw.process_view(request, lambda x: None, None, None)
get_token(request)
if response is None:
request._update_cache = True
else:
return HttpResponse(response)
class SSLRedirectMiddleware(object):
"""
Handles redirections required for SSL when ``SSL_ENABLED`` is ``True``.
If ``SSL_FORCE_HOST`` is ``True``, and is not the current host,
redirect to it.
Also ensure URLs defined by ``SSL_FORCE_URL_PREFIXES`` are redirect
to HTTPS, and redirect all other URLs to HTTP if on HTTPS.
"""
def process_request(self, request):
force_host = settings.SSL_FORCE_HOST
response = None
if force_host and request.get_host().split(":")[0] != force_host:
url = "http://%s%s" % (force_host, request.get_full_path())
response = HttpResponsePermanentRedirect(url)
elif settings.SSL_ENABLED and not settings.DEV_SERVER:
url = "%s%s" % (request.get_host(), request.get_full_path())
if request.path.startswith(settings.SSL_FORCE_URL_PREFIXES):
if not request.is_secure():
response = HttpResponseRedirect("https://%s" % url)
elif request.is_secure() and settings.SSL_FORCED_PREFIXES_ONLY:
response = HttpResponseRedirect("http://%s" % url)
if response and request.method == "POST":
if resolve(request.get_full_path()).url_name == "fb_do_upload":
# The handler for the flash file uploader in filebrowser
# doesn't have access to the http headers Django will use
# to determine whether the request is secure or not, so
# in this case we don't attempt a redirect - note that
# when /admin is restricted to SSL using Mezzanine's SSL
# setup, the flash uploader will post over SSL, so
# someone would need to explictly go out of their way to
# trigger this.
return
# Tell the client they need to re-POST.
response.status_code = 307
return response
class RedirectFallbackMiddleware(object):
"""
Port of Django's ``RedirectFallbackMiddleware`` that uses
Mezzanine's approach for determining the current site.
"""
def __init__(self):
if "django.contrib.redirects" not in settings.INSTALLED_APPS:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.status_code == 404:
lookup = {
"site_id": current_site_id(),
"old_path": request.get_full_path(),
}
try:
redirect = Redirect.objects.get(**lookup)
except Redirect.DoesNotExist:
pass
else:
if not redirect.new_path:
response = HttpResponseGone()
else:
response = HttpResponsePermanentRedirect(redirect.new_path)
return response
| |
'''
Plots and calculations directly for the paper
Authors: Chang and MJ
'''
import os
import corner
import pickle
import numpy as np
import util as ut
import data as Data
from prior import PriorRange
from hod_sim import ABC_HODsim
import matplotlib.pyplot as plt
from matplotlib import lines as mlines
from matplotlib import gridspec
from scipy.stats import norm
from matplotlib.colors import colorConverter
from ChangTools.plotting import prettyplot
from ChangTools.plotting import prettycolors
def TrueObservables(Mr=21, b_normal=0.25):
''' Plot xi and gmf for the data
'''
# xi data
xi_data = Data.data_xi(Mr=Mr, b_normal=b_normal)
cov_data = Data.data_cov(Mr=Mr, b_normal=b_normal, inference='mcmc')
data_xi_cov = cov_data[1:16, 1:16]
xi_r_bin = Data.data_xi_bin(Mr=Mr)
# gmf data
data_gmf = Data.data_gmf(Mr=Mr, b_normal=b_normal)
cov_data = Data.data_cov(Mr=Mr, b_normal=b_normal, inference='mcmc')
data_gmf_cov = cov_data[16:, 16:]
r_binedge = Data.data_gmf_bins()
gmf_r_bin = 0.5 * (r_binedge[:-1] + r_binedge[1:])
prettyplot()
pretty_colors = prettycolors()
fig = plt.figure(figsize=(12,6))
sub_xi = fig.add_subplot(121)
sub_gmf = fig.add_subplot(122)
#sub_xi.errorbar(xi_r_bin, xi_data, yerr = np.sqrt(np.diag(data_xi_cov)), fmt="o", color='k',
# markersize=0, lw=0, capsize=3, elinewidth=1.5)
#sub_xi.scatter(xi_r_bin, xi_data, c='k', s=10, lw=0)
sub_xi.fill_between(xi_r_bin,
xi_data-np.sqrt(np.diag(data_xi_cov)), xi_data+np.sqrt(np.diag(data_xi_cov)),
color=pretty_colors[1])
sub_xi.set_yscale('log')
sub_xi.set_xscale('log')
sub_xi.set_xlim(0.1, 20)
sub_xi.set_xlabel(r'$\mathtt{r}\; [\mathtt{Mpc}/h]$', fontsize=25)
sub_xi.set_ylabel(r'$\xi(r)$', fontsize=25)
#sub_gmf.errorbar(gmf_r_bin, data_gmf, yerr=np.sqrt(np.diag(data_gmf_cov)),
# fmt="o", color='k',
# markersize=0, lw=0, capsize=4, elinewidth=2)
#sub_gmf.scatter(gmf_r_bin, data_gmf, s=15, lw=0, c='k', label='Mock Observation')
sub_gmf.fill_between(gmf_r_bin,
data_gmf-np.sqrt(np.diag(data_gmf_cov)), data_gmf+np.sqrt(np.diag(data_gmf_cov)),
color=pretty_colors[1])
sub_gmf.set_xlim(1, 20)
sub_gmf.set_xlabel(r'$\mathtt{N}$ (Group Richness)', fontsize=25)
sub_gmf.yaxis.tick_right()
sub_gmf.yaxis.set_ticks_position('both')
sub_gmf.yaxis.set_label_position('right')
sub_gmf.set_ylim([10**-7, 2.0*10**-4])
sub_gmf.set_yscale('log')
sub_gmf.set_ylabel(r'$\zeta(\mathtt{N})$', fontsize=25)
fig.subplots_adjust(hspace=0.05)
fig_name = ''.join([ut.fig_dir(),
'paper.data_observables',
'.pdf'])
fig.savefig(fig_name, bbox_inches='tight', dpi=150)
plt.close()
return None
## ABC figures
''' Figures for the ABC Results section
'''
def PoolEvolution(obvs):
''' Demostrative plot for the evolution of the pool. Illustrate the pool evolution for
log M_min versus log M_1, which has the starkest evolution from its prior.
'''
if obvs == 'nbargmf':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/run1/',])
theta_file = lambda tt: ''.join([result_dir, 'nbar_gmf_theta_t', str(tt), '.ABCnbargmf.dat'])
t_list = [0, 1, 2, 3, 5, 8]
elif obvs == 'nbarxi':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/'])
theta_file = lambda tt: ''.join([result_dir, 'nbar_xi_theta_t', str(tt), '.abc.dat'])
t_list = [0, 1, 2, 3, 7, 9]
else:
raise ValueError
prior_min, prior_max = PriorRange('first_try')
prior_range = np.zeros((2,2))
prior_range[:,0] = np.array([prior_min[2], prior_min[4]])
prior_range[:,1] = np.array([prior_max[2], prior_max[4]])
# true HOD parameter
true_dict = Data.data_hod_param(Mr=21)
true_pair = [true_dict['logMmin'], true_dict['logM1']]
prettyplot()
pretty_colors = prettycolors()
fig = plt.figure(figsize=(12,8))
all_fig = fig.add_subplot(111)
for i_t, t in enumerate(t_list):
sub = fig.add_subplot(2, len(t_list)/2, i_t+1)
theta_Mmin, theta_M1 = np.loadtxt(theta_file(t), unpack=True, usecols=[2, 4])
corner.hist2d(theta_Mmin, theta_M1, bins=20, range=prior_range,
levels=[0.68, 0.95], color='c', fill_contours=True, smooth=1.0)
t_label = r"$\mathtt{t = "+str(t)+"}$"
sub.text(13.0, 15.0, t_label, fontsize=25)
if i_t == len(t_list) - 1:
true_label = r'$``\mathtt{true}"$'
else:
true_label = None
plt.scatter(np.repeat(true_pair[0],2), np.repeat(true_pair[1],2),
s=75, marker='*', c='k', lw=0, label=true_label)
if i_t == len(t_list) - 1:
plt.legend(loc='lower left', scatterpoints=1, markerscale=2.5,
handletextpad=-0.25, scatteryoffsets=[0.5])
if i_t == 0:
sub.set_xticklabels([])
sub.set_yticklabels([13., 13.5, 14., 14.5, 15., 15.5])
elif (i_t > 0) and (i_t < len(t_list)/2):
sub.set_xticklabels([])
sub.set_yticklabels([])
elif i_t == len(t_list)/2:
sub.set_yticklabels([13., 13.5, 14., 14.5, 15.])
elif i_t > len(t_list)/2:
sub.set_yticklabels([])
all_fig.set_xticklabels([])
all_fig.set_yticklabels([])
all_fig.set_ylabel(
r'$\mathtt{log}\;\mathcal{M}_\mathtt{1}$',
fontsize=30, labelpad=50)
all_fig.set_xlabel(
r'$\mathtt{log}\;\mathcal{M}_\mathtt{min}$',
fontsize=30, labelpad=25)
fig.subplots_adjust(hspace=0.0)
fig_name = ''.join([ut.fig_dir(),
'paper_ABC_poolevolution',
'.', obvs,
'.pdf'])
fig.savefig(fig_name, bbox_inches='tight', dpi=150)
plt.close()
def PosteriorObservable(Mr=21, b_normal=0.25, clobber=False):
''' Plot 1\sigma and 2\sigma model predictions from ABC-PMC posterior likelihood
'''
prettyplot()
pretty_colors=prettycolors()
fig = plt.figure(1, figsize=(16,12))
gs = gridspec.GridSpec(2, 2, height_ratios=[2.5, 1], width_ratios=[1,1])
for obvs in ['nbargmf', 'nbarxi']:
if obvs == 'nbargmf':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/run1/',])
theta_file = lambda tt: ''.join([result_dir, 'nbar_gmf_theta_t', str(tt), '.ABCnbargmf.dat'])
tf = 8
obvs_list = ['gmf']
elif obvs == 'nbarxi':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/'])
theta_file = lambda tt: ''.join([result_dir, 'nbar_xi_theta_t', str(tt), '.abc.dat'])
tf = 9
obvs_list = ['xi']
else:
raise ValueError
theta = np.loadtxt(theta_file(tf)) # import thetas
#theta = theta[:10]
obvs_file = ''.join(theta_file(tf).rsplit('.dat')[:-1] + ['.', obvs_list[0], '.p'])
print obvs_file
HODsimulator = ABC_HODsim(Mr=Mr, b_normal=b_normal)
if not os.path.isfile(obvs_file) or clobber:
model_obv = []
for i in xrange(len(theta)):
print i
obv_i = HODsimulator(
theta[i],
prior_range=None,
observables=obvs_list)
model_obv.append(obv_i[0])
model_obv = np.array(model_obv)
pickle.dump(model_obv, open(obvs_file, 'wb'))
else:
model_obv = pickle.load(open(obvs_file, 'rb'))
if 'xi' in obvs:
r_bin = Data.data_xi_bin(Mr=Mr)
elif 'gmf' in obvs:
r_binedge = Data.data_gmf_bins()
r_bin = 0.5 * (r_binedge[:-1] + r_binedge[1:])
a, b, c, d, e = np.percentile(model_obv, [2.5, 16, 50, 84, 97.5], axis=0)
# plotting
if obvs == 'nbarxi':
ax = plt.subplot(gs[0])
elif obvs == 'nbargmf':
ax = plt.subplot(gs[1])
if 'xi' in obvs: # 2PCF
xi_data = Data.data_xi(Mr=Mr, b_normal=b_normal)
cov_data = Data.data_cov(Mr=Mr, b_normal=b_normal, inference='mcmc')
data_xi_cov = cov_data[1:16, 1:16]
ax.fill_between(r_bin, a, e, color=pretty_colors[3], alpha=0.3, edgecolor="none")
ax.fill_between(r_bin, b, d, color=pretty_colors[3], alpha=0.5, edgecolor="none")
ax.errorbar(r_bin, xi_data, yerr = np.sqrt(np.diag(data_xi_cov)), fmt="o", color='k',
markersize=0, lw=0, capsize=3, elinewidth=1.5)
ax.scatter(r_bin, xi_data, c='k', s=10, lw=0)
ax.set_ylabel(r'$\xi_\mathtt{gg}(\mathtt{r})$', fontsize=27)
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_xticklabels([])
ax.set_xlim([0.1, 20.])
ax.set_ylim([0.09, 1000.])
ax = plt.subplot(gs[2])
ax.fill_between(r_bin, a/xi_data, e/xi_data, color=pretty_colors[3], alpha=0.3, edgecolor="none")
ax.fill_between(r_bin, b/xi_data, d/xi_data, color=pretty_colors[3], alpha=0.5, edgecolor="none")
ax.errorbar(r_bin, np.repeat(1., len(xi_data)), yerr=np.sqrt(np.diag(data_xi_cov))/xi_data,
fmt="o", color='k', markersize=0, lw=0, capsize=3, elinewidth=1.5)
ax.plot(np.arange(0.1, 20., 0.1), np.repeat(1., len(np.arange(0.1, 20, 0.1))), c='k', ls='--', lw=2)
ax.set_xlim([0.1, 20.])
ax.set_xscale('log')
ax.set_ylim([0.5, 1.5])
ax.set_xlabel(r'$\mathtt{r}\;[\mathtt{Mpc}/h]$', fontsize=25)
ax.set_ylabel(r'$\xi_\mathtt{gg}/\xi_\mathtt{gg}^\mathtt{obvs}$', fontsize=25)
elif 'gmf' in obvs: # GMF
data_gmf = Data.data_gmf(Mr=Mr, b_normal=b_normal)
cov_data = Data.data_cov(Mr=Mr, b_normal=b_normal, inference='mcmc')
data_gmf_cov = cov_data[16:, 16:]
ax.fill_between(r_bin, a, e, color=pretty_colors[3], alpha=0.3, edgecolor="none")
ax.fill_between(r_bin, b, d, color=pretty_colors[3], alpha=0.5, edgecolor="none", label='ABC Posterior')
ax.errorbar(r_bin, data_gmf, yerr=np.sqrt(np.diag(data_gmf_cov)), fmt="o", color='k',
markersize=0, lw=0, capsize=4, elinewidth=2)
ax.scatter(r_bin, data_gmf, s=15, lw=0, c='k', label='Mock Observation')
ax.legend(loc='upper right', scatterpoints=1, prop={'size': 25}, borderpad=1.0)
ax.yaxis.tick_right()
ax.yaxis.set_ticks_position('both')
ax.yaxis.set_label_position('right')
ax.set_ylabel(r'$\zeta$ $[(\mathrm{h}/\mathtt{Mpc})^{3}]$', fontsize=25)
ax.set_yscale('log')
ax.set_xlim([1., 20.])
ax.set_xticklabels([])
ax.set_ylim([10.**-7.2, 2*10**-4.])
ax = plt.subplot(gs[3])
ax.fill_between(r_bin, a/data_gmf, e/data_gmf, color=pretty_colors[3], alpha=0.3, edgecolor="none")
ax.fill_between(r_bin, b/data_gmf, d/data_gmf, color=pretty_colors[3], alpha=0.5, edgecolor="none")
ax.errorbar(r_bin, np.repeat(1., len(data_gmf)), yerr=np.sqrt(np.diag(data_gmf_cov))/data_gmf,
fmt="o", color='k', markersize=0, lw=0, capsize=3, elinewidth=1.5)
ax.plot(np.arange(1., 20., 1), np.repeat(1., len(np.arange(1., 20, 1))), c='k', ls='--', lw=1.75)
ax.yaxis.tick_right()
ax.yaxis.set_label_position('right')
ax.set_ylim([-0.1, 2.1])
ax.set_ylabel(r'$\zeta/\zeta^\mathtt{obvs}$', fontsize=25)
ax.set_xlim([1., 20.])
ax.set_xlabel(r'$\mathtt{N}$ [Group Richness]', fontsize=25)
fig.subplots_adjust(wspace=0.05, hspace=0.0)
fig_name = ''.join([ut.fig_dir(),
'paper',
'.ABCposterior',
'.pdf'])
fig.savefig(fig_name, bbox_inches='tight')
plt.close()
return None
def ABC_Coner(obvs, weighted=False):
''' Pretty corner plot
'''
if obvs == 'nbargmf':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/run1/',])
theta_file = lambda tt: ''.join([result_dir, 'nbar_gmf_theta_t', str(tt), '.ABCnbargmf.dat'])
w_file = lambda tt: ''.join([result_dir, 'nbar_gmf_w_t', str(tt), '.ABCnbargmf.dat'])
tf = 8
elif obvs == 'nbarxi':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/'])
theta_file = lambda tt: ''.join([result_dir, 'nbar_xi_theta_t', str(tt), '.abc.dat'])
w_file = lambda tt: ''.join([result_dir, 'nbar_xi_w_t', str(tt), '.abc.dat'])
tf = 9
else:
raise ValueError
theta = np.loadtxt(theta_file(tf))
if weighted:
weights = np.loadtxt(w_file(tf))
else:
weights = None
true_dict = Data.data_hod_param(Mr=21)
true_theta = [
true_dict['logM0'],
np.log(true_dict['sigma_logM']),
true_dict['logMmin'],
true_dict['alpha'],
true_dict['logM1']
]
prior_min, prior_max = PriorRange('first_try')
prior_range = np.zeros((len(prior_min),2))
prior_range[:,0] = np.array(prior_min)
prior_range[:,1] = np.array(prior_max)
fig = corner.corner(
theta,
weights=weights,
truths=true_theta,
truth_color='k',
labels=[
r'$\log\;\mathcal{M}_{0}}$',
r'$\log\;\sigma_\mathtt{log\;M}}$',
r'$\log\;\mathcal{M}_\mathtt{min}}$',
r'$\alpha$',
r'$\log\;\mathcal{M}_{1}}$'
],
label_kwargs={'fontsize': 25},
range=prior_range,
quantiles=[0.16,0.5,0.84],
show_titles=True,
title_args={"fontsize": 12},
plot_datapoints=True,
fill_contours=True,
levels=[0.68, 0.95],
color='#ee6a50',
bins=20,
smooth=1.0)
fig_name = ''.join([ut.fig_dir(),
'paper.ABCcorner',
'.', obvs,
'.pdf'])
fig.savefig(fig_name, bbox_inches='tight', dpi=150)
plt.close()
return None
def ABC_Convergence(weighted=False):
''' Plot the error bars on the parameters as a function of time step
'''
prettyplot()
pretty_colors = prettycolors()
fig = plt.figure(figsize=(20, 10))
for i_obv, obvs in enumerate(['nbargmf', 'nbarxi']):
if obvs == 'nbargmf':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/run1/',])
theta_file = lambda tt: ''.join([result_dir, 'nbar_gmf_theta_t', str(tt), '.ABCnbargmf.dat'])
w_file = lambda tt: ''.join([result_dir, 'nbar_gmf_w_t', str(tt), '.ABCnbargmf.dat'])
tf = 8
elif obvs == 'nbarxi':
result_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/'])
theta_file = lambda tt: ''.join([result_dir, 'nbar_xi_theta_t', str(tt), '.abc.dat'])
w_file = lambda tt: ''.join([result_dir, 'nbar_xi_w_t', str(tt), '.abc.dat'])
tf = 9
else:
raise ValueError
t_list = range(tf+1)
columns = [
r"$\mathtt{log}\;\mathcal{M}_0$",
r"$\sigma_{\mathtt{log}\;\mathcal{M}}$",
r"$\mathtt{log}\;\mathcal{M}_\mathtt{min}$",
r"$\alpha$",
r"$\mathtt{log}\;\mathcal{M}_1$"
]
true_dict = Data.data_hod_param(Mr=21)
true_theta = [
true_dict['logM0'],
np.log(true_dict['sigma_logM']),
true_dict['logMmin'],
true_dict['alpha'],
true_dict['logM1']
]
prior_min, prior_max = PriorRange('first_try')
a_theta = np.zeros((len(t_list), 5))
b_theta = np.zeros((len(t_list), 5))
d_theta = np.zeros((len(t_list), 5))
e_theta = np.zeros((len(t_list), 5))
for i_t, tt in enumerate(t_list):
theta_i = np.loadtxt(theta_file(tt), unpack=True)
w_i = np.loadtxt(w_file(tt))
for i_par in range(len(theta_i)):
if not weighted:
a, b, d, e = np.percentile(theta_i[i_par], [2.5, 16, 84, 97.5], axis=0)
else:
a = ut.quantile_1D(theta_i[i_par], w_i, 0.025)
b = ut.quantile_1D(theta_i[i_par], w_i, 0.16)
d = ut.quantile_1D(theta_i[i_par], w_i, 0.84)
e = ut.quantile_1D(theta_i[i_par], w_i, 0.975)
a_theta[i_t, i_par] = a
b_theta[i_t, i_par] = b
d_theta[i_t, i_par] = d
e_theta[i_t, i_par] = e
keep_index = [2,3,4]
for ii, i in enumerate(keep_index):
if i == keep_index[-1]:
true_label = r'$``\mathtt{true}"$'
abc_1sig_label = r'ABC Pool'
else:
true_label = None
abc_1sig_label = None
sub = fig.add_subplot(2, len(keep_index), i_obv * len(keep_index) + ii+1)
sub.fill_between(t_list, a_theta[:, i], e_theta[:,i],
color=pretty_colors[3], alpha=0.3, edgecolor="none")
sub.fill_between(t_list, b_theta[:, i], d_theta[:,i],
color=pretty_colors[3], alpha=0.5, edgecolor="none",
label=abc_1sig_label)
sub.plot(t_list, np.repeat(true_theta[i], len(t_list)), c='k', ls='--', lw=2,
label=true_label)
if ii == 0:
if obvs == 'nbargmf':
sub.text(4.85, 13.4, r"$\bar{\mathtt{n}}$ and $\zeta(\mathtt{N})$", fontsize=25)
elif obvs == 'nbarxi':
sub.text(4.85, 13.4, r"$\bar{\mathtt{n}}$ and $\xi(\mathtt{r})$", fontsize=25)
sub.set_ylabel(columns[i], fontsize=25)
sub.set_ylim([prior_min[i], prior_max[i]])
sub.set_xlim([-1, 10])
if i_obv == 1:
sub.legend(loc='upper right', borderpad=1.)
sub.set_xlabel('iterations', fontsize=25)
if i == 4:
sub.set_yticklabels([13.0, 13.5, 14.0, 14.5, 15.])
else:
sub.set_xticklabels([])
fig.subplots_adjust(wspace=0.3, hspace=0.0)
if weighted:
weight_str = '.weighted'
else:
weight_str = ''
fig_name = ''.join([ut.fig_dir(),
'paper',
'.ABCconvergence',
weight_str,
'.pdf'])
fig.savefig(fig_name, bbox_inches='tight', dpi=150)
plt.close()
return None
def ABCvsMCMC_histogram(obvs, nwalkers=100, nburns=9000):
''' Plots that compare the ABC posteriors to the MCMC posteriors
'''
if obvs == 'nbargmf':
abc_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/run1/',])
abc_theta_file = lambda tt: ''.join([abc_dir, 'nbar_gmf_theta_t', str(tt), '.ABCnbargmf.dat'])
tf = 8
mcmc_dir = ''.join([ut.dat_dir(), 'paper/'])
mcmc_filename = ''.join([mcmc_dir, 'nbar_gmf.mcmc.mcmc_chain.p'])
elif obvs == 'nbarxi':
abc_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/',])
abc_theta_file = lambda tt: ''.join([abc_dir, 'nbar_xi_theta_t', str(tt), '.abc.dat'])
tf = 9
mcmc_dir = ''.join([ut.dat_dir(), 'paper/'])
mcmc_filename = ''.join([mcmc_dir, 'nbar_xi.mcmc.mcmc_chain.p'])
else:
raise ValueError
prior_min, prior_max = PriorRange('first_try')
prior_range = np.zeros((len(prior_min),2))
prior_range[:,0] = prior_min
prior_range[:,1] = prior_max
# true HOD parameter
true_dict = Data.data_hod_param(Mr=21)
truths = [
true_dict['logM0'], # log M0
np.log(true_dict['sigma_logM']), # log(sigma)
true_dict['logMmin'], # log Mmin
true_dict['alpha'], # alpha
true_dict['logM1'] # log M1
]
mcmc_sample = pickle.load(open(mcmc_filename, 'rb'))[nburns*nwalkers:,:]
abc_sample = np.loadtxt(abc_theta_file(tf))
normie = norm()
sig1lo = normie.cdf(-1)
sig1hi = normie.cdf(1)
sig2lo = normie.cdf(-2)
sig2hi = normie.cdf(2)
nsamples_mcmc = len(mcmc_sample[:, 2])
nsamples_abc = len(abc_sample[:, 2])
sig1lo_mcmc = int(sig1lo * nsamples_mcmc)
sig2lo_mcmc = int(sig2lo * nsamples_mcmc)
sig1hi_mcmc = int(sig1hi * nsamples_mcmc)
sig2hi_mcmc = int(sig2hi * nsamples_mcmc)
sig1lo_abc = int(sig1lo * nsamples_abc)
sig2lo_abc = int(sig2lo * nsamples_abc)
sig1hi_abc = int(sig1hi * nsamples_abc)
sig2hi_abc = int(sig2hi * nsamples_abc)
par_labels = [
r'$\mathtt{log}\;\mathcal{M}_{0}$',
r'$\mathtt{log}\;\sigma_\mathtt{log\;M}$',
r'$\mathtt{log}\;\mathcal{M}_\mathtt{min}$',
r'$\alpha$',
r'$\mathtt{log}\;\mathcal{M}_{1}$'
]
prettyplot()
pretty_colors = prettycolors()
fig = plt.figure(1, figsize=(20,8))
gs = gridspec.GridSpec(2, 3, height_ratios=[2.75, 1])
# first panel
for i in [0, 1, 2]:
if i == 0:
i_col = 2
prior_range[i_col,0] = 12.5
prior_range[i_col,1] = 13.
plot_range = prior_range[i_col, :]
elif i == 1:
i_col = 3
plot_range = prior_range[i_col, :]
elif i == 2:
i_col = 4
prior_range[i_col,0] = 13.5
prior_range[i_col,1] = 14.25
plot_range = np.array([13.5, 14.5])
ax = plt.subplot(gs[i])
q = ax.hist(mcmc_sample[:,i_col], bins=20,
range=[prior_range[i_col,0], prior_range[i_col,1]],
normed=True,
alpha=0.75,
color=pretty_colors[1],
linewidth=2,
histtype='stepfilled',
edgecolor=None
)
qq = ax.hist(abc_sample[:,i_col], bins=20,
range=[prior_range[i_col,0], prior_range[i_col,1]],
normed=True,
alpha=0.75,
color=pretty_colors[3],
linewidth=2,
histtype='stepfilled',
edgecolor=None
)
ax.axvline(truths[i_col], color='k', ls='--', linewidth=3)
ax.set_xticklabels([])
ax.set_xlim([plot_range[0] , plot_range[1]])
if i == 2:
thick_line2 = mlines.Line2D([], [], ls='-', c=pretty_colors[1], linewidth=4,
label='$\mathcal{L}^\mathtt{Gauss}$ \nMCMC')
thick_line1 = mlines.Line2D([], [], ls='-', c=pretty_colors[3], linewidth=4,
label='ABC-PMC')
ax.legend(loc='upper right', handles=[thick_line2, thick_line1],
frameon=False, fontsize=25, handletextpad=-0.0)
# general box properties
boxprops = {'color': 'k'}
medianprops = {'alpha': 0.}
bplots1 = []
ax = plt.subplot(gs[i+3])
# stats dict for each box
bplots1.append({'med': np.median(mcmc_sample[:, i_col]),
'q1': np.sort(mcmc_sample[:, i_col])[sig1lo_mcmc],
'q3': np.sort(mcmc_sample[:, i_col])[sig1hi_mcmc],
'whislo': np.sort(mcmc_sample[:, i_col])[sig2lo_mcmc],
'whishi': np.sort(mcmc_sample[:, i_col])[sig2hi_mcmc],
'fliers': []})
bplots1.append({'med': np.median(abc_sample[:, i_col]),
'q1': np.sort(abc_sample[:, i_col])[sig1lo_abc],
'q3': np.sort(abc_sample[:, i_col])[sig1hi_abc],
'whislo': np.sort(abc_sample[:, i_col])[sig2lo_abc],
'whishi': np.sort(abc_sample[:, i_col])[sig2hi_abc],
'fliers': []})
whiskprop = dict(linestyle='-', linewidth=2, color='k')
boxprops = dict(linestyle='-', linewidth=2, color='k')
bxp1 = ax.bxp(bplots1, positions=[1,2], vert=False, patch_artist=True,
showfliers=False, boxprops=boxprops, medianprops=medianprops, whiskerprops=whiskprop)
for ibox, box in enumerate(bxp1['boxes']):
if ibox == 0:
box.set(facecolor=pretty_colors[1], alpha=0.75)
elif ibox == 1:
box.set(facecolor=pretty_colors[3], alpha=0.75)
ax.axvline(truths[i_col], color='k', ls='--', linewidth=3)
ax.set_xlim([plot_range[0] , plot_range[1]])
ax.set_xlabel(par_labels[i_col], fontsize=25, labelpad=15)
if i == 0:
ax.set_yticks([1,2])
ax.set_yticklabels([r"$\mathtt{MCMC}$", r"$\mathtt{ABC}$"])
else:
ax.set_yticks([])
fig.subplots_adjust(wspace=0.2, hspace=0.0)
fig_name = ''.join([ut.fig_dir(),
'paper.ABCvsMCMC',
'.', obvs,
'.pdf'])
print fig_name
fig.savefig(fig_name, bbox_inches='tight', dpi=150)
return None
#plt.show()
def ABCvsMCMC_contour(obvs, nwalkers=100, nburns=9000, sigma=False):
''' Plots that compare the ABC posteriors to the MCMC posteriors
'''
if obvs == 'nbargmf':
abc_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/run1/',])
abc_theta_file = lambda tt: ''.join([abc_dir, 'nbar_gmf_theta_t', str(tt), '.ABCnbargmf.dat'])
tf = 8
mcmc_dir = ''.join([ut.dat_dir(), 'paper/'])
mcmc_filename = ''.join([mcmc_dir, 'nbar_gmf.mcmc.mcmc_chain.p'])
elif obvs == 'nbarxi':
abc_dir = ''.join([ut.dat_dir(), 'paper/ABC', obvs, '/',])
abc_theta_file = lambda tt: ''.join([abc_dir, 'nbar_xi_theta_t', str(tt), '.abc.dat'])
tf = 9
mcmc_dir = ''.join([ut.dat_dir(), 'paper/'])
mcmc_filename = ''.join([mcmc_dir, 'nbar_xi.mcmc.mcmc_chain.p'])
else:
raise ValueError
prior_min, prior_max = PriorRange('first_try')
prior_range = np.zeros((len(prior_min),2))
prior_range[:,0] = prior_min
prior_range[:,1] = prior_max
# true HOD parameter
true_dict = Data.data_hod_param(Mr=21)
truths = [
true_dict['logM0'], # log M0
np.log(true_dict['sigma_logM']), # log(sigma)
true_dict['logMmin'], # log Mmin
true_dict['alpha'], # alpha
true_dict['logM1'] # log M1
]
mcmc_sample = pickle.load(open(mcmc_filename, 'rb'))[nburns*nwalkers:,:]
abc_sample = np.loadtxt(abc_theta_file(tf))
par_labels = [
r'$\mathtt{log}\;\mathcal{M}_{0}$',
r'$\mathtt{log}\;\sigma_\mathtt{log\;M}$',
r'$\mathtt{log}\;\mathcal{M}_\mathtt{min}$',
r'$\alpha$',
r'$\mathtt{log}\;\mathcal{M}_{1}$'
]
prettyplot()
pretty_colors = prettycolors()
fig = plt.figure(1, figsize=(20,6))
gs = gridspec.GridSpec(1, 3)
# first panel
for i in [0, 1, 2]:
plot_range = np.zeros((2,2))
if i == 0:
col_pair = [2, 3]
plot_range[0, 0] = 12.5
plot_range[0, 1] = 13.0
plot_range[1, 0] = prior_range[3, 0]
plot_range[1, 1] = prior_range[3, 1]
elif i == 2:
col_pair = [4, 2]
plot_range[0, 0] = 13.6
plot_range[0, 1] = 14.2
plot_range[1, 0] = 12.5
plot_range[1, 1] = 13.0
elif i == 1:
col_pair = [3, 4]
plot_range[0, 0] = prior_range[3, 0]
plot_range[0, 1] = prior_range[3, 1]
plot_range[1, 0] = 13.6
plot_range[1, 1] = 14.2
if i == 2:
mcmc_label = r'$\mathcal{L}^\mathtt{Gauss}$ MCMC'
abc_label = 'ABC-PMC'
else:
mcmc_label = None
abc_label = None
mcmc_par1 = mcmc_sample[:,col_pair[0]]
mcmc_par2 = mcmc_sample[:,col_pair[1]]
abc_par1 = abc_sample[:, col_pair[0]]
abc_par2 = abc_sample[:, col_pair[1]]
ax = plt.subplot(gs[i])
if sigma:
lvls = [1-np.exp(-0.5), 1-np.exp(-0.125)]
else:
lvls = [0.68, 0.95]
corner.hist2d(mcmc_par1, mcmc_par2, bins=20, range=plot_range, ax = ax, plot_datapoints=False,
levels=lvls, color='#1F77B4', fill_contours=True, smooth=1.0, label=mcmc_label)
corner.hist2d(abc_par1, abc_par2, bins=20, range=plot_range, ax = ax,
levels=lvls, color='#FF7F0E', fill_contours=True, smooth=1.0, label=abc_label)
ax.scatter(np.repeat(truths[col_pair[0]],2), np.repeat(truths[col_pair[1]],2),
s=100, marker='*', c='k', lw=0, label=None)
#ax.axvline(truths[i_col], color='k', ls='--', linewidth=3)
#ax.set_xticklabels([])
ax.set_xlim([plot_range[0, 0] , plot_range[0, 1]])
ax.set_ylim([plot_range[1, 0] , plot_range[1, 1]])
if i == 2:
thick_line1 = mlines.Line2D([], [], ls='-', c='#FF7F0E', linewidth=12, alpha=0.5,
label='ABC-PMC')
ax.legend(loc='upper right', handles=[thick_line1],
frameon=False, fontsize=25, handletextpad=0.1, scatteryoffsets=[0.5])
elif i == 1:
thick_line2 = mlines.Line2D([], [], ls='-', c='#1F77B4', linewidth=12, alpha=0.5,
label='$\mathcal{L}^\mathtt{Gauss}$ \nMCMC')
ax.legend(loc='upper right', handles=[thick_line2],
frameon=False, fontsize=25, handletextpad=0.1, scatteryoffsets=[0.5])
ax.set_xlabel(par_labels[col_pair[0]], fontsize=25, labelpad=15)
ax.set_ylabel(par_labels[col_pair[1]], fontsize=25)
if sigma:
sigma_str = '.true1sigma'
else:
sigma_str = ''
fig.subplots_adjust(wspace=0.3)
fig_name = ''.join([ut.fig_dir(),
'paper.ABCvsMCMC.contour',
'.', obvs,
sigma_str,
'.pdf'])
fig.savefig(fig_name, bbox_inches='tight', dpi=150)
return None
if __name__=="__main__":
#TrueObservables()
ABCvsMCMC_contour('nbargmf', nwalkers=100, nburns=9000, sigma=True)
ABCvsMCMC_contour('nbarxi', nwalkers=100, nburns=9000, sigma=True)
#ABC_Coner('nbargmf')
#ABC_Coner('nbarxi')
#ABC_Convergence(weighted=True)
#ABC_Convergence(weighted=False)
#PosteriorObservable(Mr=21, b_normal=0.25)
#PoolEvolution('nbargmf')
#PoolEvolution('nbarxi')
#PosteriorObservable(Mr=21, b_normal=0.25)
#PoolEvolution('nbargmf')
#PoolEvolution('nbarxi')
| |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Full text indexing and search, implemented in pure python.
Defines a SearchableModel subclass of db.Model that supports full text
indexing and search, based on the datastore's existing indexes.
Don't expect too much. First, there's no ranking, which is a killer drawback.
There's also no exact phrase match, substring match, boolean operators,
stemming, or other common full text search features. Finally, support for stop
words (common words that are not indexed) is currently limited to English.
To be indexed, entities must be created and saved as SearchableModel
instances, e.g.:
class Article(search.SearchableModel):
text = db.TextProperty()
...
article = Article(text=...)
article.save()
To search the full text index, use the SearchableModel.all() method to get an
instance of SearchableModel.Query, which subclasses db.Query. Use its search()
method to provide a search query, in addition to any other filters or sort
orders, e.g.:
query = article.all().search('a search query').filter(...).order(...)
for result in query:
...
The full text index is stored in a property named __searchable_text_index.
In general, if you just want to provide full text search, you *don't* need to
add any extra indexes to your index.yaml. However, if you want to use search()
in a query *in addition to* an ancestor, filter, or sort order, you'll need to
create an index in index.yaml with the __searchable_text_index property. For
example:
- kind: Article
properties:
- name: __searchable_text_index
- name: date
direction: desc
...
Note that using SearchableModel will noticeable increase the latency of save()
operations, since it writes an index row for each indexable word. This also
means that the latency of save() will increase roughly with the size of the
properties in a given entity. Caveat hacker!
"""
import re
import string
import sys
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.ext import db
from google.appengine.datastore import datastore_pb
class SearchableEntity(datastore.Entity):
"""A subclass of datastore.Entity that supports full text indexing.
Automatically indexes all string and Text properties, using the datastore's
built-in per-property indices. To search, use the SearchableQuery class and
its Search() method.
"""
_FULL_TEXT_INDEX_PROPERTY = '__searchable_text_index'
_FULL_TEXT_MIN_LENGTH = 3
_FULL_TEXT_STOP_WORDS = frozenset([
'a', 'about', 'according', 'accordingly', 'affected', 'affecting', 'after',
'again', 'against', 'all', 'almost', 'already', 'also', 'although',
'always', 'am', 'among', 'an', 'and', 'any', 'anyone', 'apparently', 'are',
'arise', 'as', 'aside', 'at', 'away', 'be', 'became', 'because', 'become',
'becomes', 'been', 'before', 'being', 'between', 'both', 'briefly', 'but',
'by', 'came', 'can', 'cannot', 'certain', 'certainly', 'could', 'did', 'do',
'does', 'done', 'during', 'each', 'either', 'else', 'etc', 'ever', 'every',
'following', 'for', 'found', 'from', 'further', 'gave', 'gets', 'give',
'given', 'giving', 'gone', 'got', 'had', 'hardly', 'has', 'have', 'having',
'here', 'how', 'however', 'i', 'if', 'in', 'into', 'is', 'it', 'itself',
'just', 'keep', 'kept', 'knowledge', 'largely', 'like', 'made', 'mainly',
'make', 'many', 'might', 'more', 'most', 'mostly', 'much', 'must', 'nearly',
'necessarily', 'neither', 'next', 'no', 'none', 'nor', 'normally', 'not',
'noted', 'now', 'obtain', 'obtained', 'of', 'often', 'on', 'only', 'or',
'other', 'our', 'out', 'owing', 'particularly', 'past', 'perhaps', 'please',
'poorly', 'possible', 'possibly', 'potentially', 'predominantly', 'present',
'previously', 'primarily', 'probably', 'prompt', 'promptly', 'put',
'quickly', 'quite', 'rather', 'readily', 'really', 'recently', 'regarding',
'regardless', 'relatively', 'respectively', 'resulted', 'resulting',
'results', 'said', 'same', 'seem', 'seen', 'several', 'shall', 'should',
'show', 'showed', 'shown', 'shows', 'significantly', 'similar', 'similarly',
'since', 'slightly', 'so', 'some', 'sometime', 'somewhat', 'soon',
'specifically', 'state', 'states', 'strongly', 'substantially',
'successfully', 'such', 'sufficiently', 'than', 'that', 'the', 'their',
'theirs', 'them', 'then', 'there', 'therefore', 'these', 'they', 'this',
'those', 'though', 'through', 'throughout', 'to', 'too', 'toward', 'under',
'unless', 'until', 'up', 'upon', 'use', 'used', 'usefully', 'usefulness',
'using', 'usually', 'various', 'very', 'was', 'we', 'were', 'what', 'when',
'where', 'whether', 'which', 'while', 'who', 'whose', 'why', 'widely',
'will', 'with', 'within', 'without', 'would', 'yet', 'you'])
_word_delimiter_regex = re.compile('[' + re.escape(string.punctuation) + ']')
def __init__(self, kind_or_entity, word_delimiter_regex=None, *args,
**kwargs):
"""Constructor. May be called as a copy constructor.
If kind_or_entity is a datastore.Entity, copies it into this Entity.
datastore.Get() and Query() returns instances of datastore.Entity, so this
is useful for converting them back to SearchableEntity so that they'll be
indexed when they're stored back in the datastore.
Otherwise, passes through the positional and keyword args to the
datastore.Entity constructor.
Args:
kind_or_entity: string or datastore.Entity
word_delimiter_regex: a regex matching characters that delimit words
"""
self._word_delimiter_regex = word_delimiter_regex
if isinstance(kind_or_entity, datastore.Entity):
self._Entity__key = kind_or_entity._Entity__key
self.update(kind_or_entity)
else:
super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
def _ToPb(self):
"""Rebuilds the full text index, then delegates to the superclass.
Returns:
entity_pb.Entity
"""
if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
del self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY]
index = set()
for (name, values) in self.items():
if not isinstance(values, list):
values = [values]
if (isinstance(values[0], basestring) and
not isinstance(values[0], datastore_types.Blob)):
for value in values:
index.update(SearchableEntity._FullTextIndex(
value, self._word_delimiter_regex))
index_list = list(index)
if index_list:
self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY] = index_list
return super(SearchableEntity, self)._ToPb()
@classmethod
def _FullTextIndex(cls, text, word_delimiter_regex=None):
"""Returns a set of keywords appropriate for full text indexing.
See SearchableQuery.Search() for details.
Args:
text: string
Returns:
set of strings
"""
if word_delimiter_regex is None:
word_delimiter_regex = cls._word_delimiter_regex
if text:
datastore_types.ValidateString(text, 'text', max_len=sys.maxint)
text = word_delimiter_regex.sub(' ', text)
words = text.lower().split()
words = set(unicode(w) for w in words)
words -= cls._FULL_TEXT_STOP_WORDS
for word in list(words):
if len(word) < cls._FULL_TEXT_MIN_LENGTH:
words.remove(word)
else:
words = set()
return words
class SearchableQuery(datastore.Query):
"""A subclass of datastore.Query that supports full text search.
Only searches over entities that were created and stored using the
SearchableEntity or SearchableModel classes.
"""
def Search(self, search_query, word_delimiter_regex=None):
"""Add a search query. This may be combined with filters.
Note that keywords in the search query will be silently dropped if they
are stop words or too short, ie if they wouldn't be indexed.
Args:
search_query: string
Returns:
# this query
SearchableQuery
"""
datastore_types.ValidateString(search_query, 'search query')
self._search_query = search_query
self._word_delimiter_regex = word_delimiter_regex
return self
def _ToPb(self, limit=None, offset=None):
"""Adds filters for the search query, then delegates to the superclass.
Raises BadFilterError if a filter on the index property already exists.
Args:
# an upper bound on the number of results returned by the query.
limit: int
# number of results that match the query to skip. limit is applied
# after the offset is fulfilled.
offset: int
Returns:
datastore_pb.Query
"""
if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
raise datastore_errors.BadFilterError(
'%s is a reserved name.' % SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
pb = super(SearchableQuery, self)._ToPb(limit=limit, offset=offset)
if hasattr(self, '_search_query'):
keywords = SearchableEntity._FullTextIndex(
self._search_query, self._word_delimiter_regex)
for keyword in keywords:
filter = pb.add_filter()
filter.set_op(datastore_pb.Query_Filter.EQUAL)
prop = filter.add_property()
prop.set_name(SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
prop.mutable_value().set_stringvalue(unicode(keyword).encode('utf-8'))
return pb
class SearchableModel(db.Model):
"""A subclass of db.Model that supports full text search and indexing.
Automatically indexes all string-based properties. To search, use the all()
method to get a SearchableModel.Query, then use its search() method.
"""
class Query(db.Query):
"""A subclass of db.Query that supports full text search."""
_search_query = None
def search(self, search_query):
"""Adds a full text search to this query.
Args:
search_query, a string containing the full text search query.
Returns:
self
"""
self._search_query = search_query
return self
def _get_query(self):
"""Wraps db.Query._get_query() and injects SearchableQuery."""
query = db.Query._get_query(self, _query_class=SearchableQuery)
if self._search_query:
query.Search(self._search_query)
return query
def _populate_internal_entity(self):
"""Wraps db.Model._populate_internal_entity() and injects
SearchableEntity."""
return db.Model._populate_internal_entity(self,
_entity_class=SearchableEntity)
@classmethod
def from_entity(cls, entity):
"""Wraps db.Model.from_entity() and injects SearchableEntity."""
if not isinstance(entity, SearchableEntity):
entity = SearchableEntity(entity)
return super(SearchableModel, cls).from_entity(entity)
@classmethod
def all(cls):
"""Returns a SearchableModel.Query for this kind."""
return SearchableModel.Query(cls)
| |
"""
This is the Django template system.
How it works:
The Lexer.tokenize() function converts a template string (i.e., a string containing
markup with custom template tags) to tokens, which can be either plain text
(TOKEN_TEXT), variables (TOKEN_VAR) or block statements (TOKEN_BLOCK).
The Parser() class takes a list of tokens in its constructor, and its parse()
method returns a compiled template -- which is, under the hood, a list of
Node objects.
Each Node is responsible for creating some sort of output -- e.g. simple text
(TextNode), variable values in a given context (VariableNode), results of basic
logic (IfNode), results of looping (ForNode), or anything else. The core Node
types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can
define their own custom node types.
Each Node has a render() method, which takes a Context and returns a string of
the rendered node. For example, the render() method of a Variable Node returns
the variable's value as a string. The render() method of a ForNode returns the
rendered output of whatever was inside the loop, recursively.
The Template class is a convenient wrapper that takes care of template
compilation and rendering.
Usage:
The only thing you should ever use directly in this file is the Template class.
Create a compiled template object with a template_string, then call render()
with a context. In the compilation stage, the TemplateSyntaxError exception
will be raised if the template doesn't have proper syntax.
Sample code:
>>> from django import template
>>> s = u'<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>'
>>> t = template.Template(s)
(t is now a compiled template, and its render() method can be called multiple
times with multiple contexts)
>>> c = template.Context({'test':True, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html><h1>Hello</h1></html>'
>>> c = template.Context({'test':False, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html></html>'
"""
from __future__ import unicode_literals
import re
import warnings
from functools import partial
from importlib import import_module
from inspect import getargspec, getcallargs
from django.apps import apps
from django.template.context import ( # NOQA: imported for backwards compatibility
BaseContext, Context, ContextPopException, RequestContext,
)
from django.utils import lru_cache, six
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango21Warning,
)
from django.utils.encoding import (
force_str, force_text, python_2_unicode_compatible,
)
from django.utils.formats import localize
from django.utils.html import conditional_escape
from django.utils.itercompat import is_iterable
from django.utils.module_loading import module_has_submodule
from django.utils.safestring import (
EscapeData, SafeData, mark_for_escaping, mark_safe,
)
from django.utils.text import (
get_text_list, smart_split, unescape_string_literal,
)
from django.utils.timezone import template_localtime
from django.utils.translation import pgettext_lazy, ugettext_lazy
TOKEN_TEXT = 0
TOKEN_VAR = 1
TOKEN_BLOCK = 2
TOKEN_COMMENT = 3
TOKEN_MAPPING = {
TOKEN_TEXT: 'Text',
TOKEN_VAR: 'Var',
TOKEN_BLOCK: 'Block',
TOKEN_COMMENT: 'Comment',
}
# template syntax constants
FILTER_SEPARATOR = '|'
FILTER_ARGUMENT_SEPARATOR = ':'
VARIABLE_ATTRIBUTE_SEPARATOR = '.'
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
VARIABLE_TAG_START = '{{'
VARIABLE_TAG_END = '}}'
COMMENT_TAG_START = '{#'
COMMENT_TAG_END = '#}'
TRANSLATOR_COMMENT_MARK = 'Translators'
SINGLE_BRACE_START = '{'
SINGLE_BRACE_END = '}'
ALLOWED_VARIABLE_CHARS = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.')
# what to report as the origin for templates that come from non-loader sources
# (e.g. strings)
UNKNOWN_SOURCE = '<unknown source>'
# match a variable or block tag and capture the entire tag, including start/end
# delimiters
tag_re = (re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END))))
# global dictionary of libraries that have been loaded using get_library
libraries = {}
# global list of libraries to load by default for a new parser
builtins = []
class TemplateSyntaxError(Exception):
pass
class TemplateDoesNotExist(Exception):
pass
class TemplateEncodingError(Exception):
pass
@python_2_unicode_compatible
class VariableDoesNotExist(Exception):
def __init__(self, msg, params=()):
self.msg = msg
self.params = params
def __str__(self):
return self.msg % tuple(force_text(p, errors='replace') for p in self.params)
class InvalidTemplateLibrary(Exception):
pass
class Origin(object):
def __init__(self, name):
self.name = name
def reload(self):
raise NotImplementedError('subclasses of Origin must provide a reload() method')
def __str__(self):
return self.name
class StringOrigin(Origin):
def __init__(self, source):
super(StringOrigin, self).__init__(UNKNOWN_SOURCE)
self.source = source
def reload(self):
return self.source
class Template(object):
def __init__(self, template_string, origin=None, name=None, engine=None):
try:
template_string = force_text(template_string)
except UnicodeDecodeError:
raise TemplateEncodingError("Templates can only be constructed "
"from unicode or UTF-8 strings.")
# If Template is instantiated directly rather than from an Engine and
# exactly one Django template engine is configured, use that engine.
# This is required to preserve backwards-compatibility for direct use
# e.g. Template('...').render(Context({...}))
if engine is None:
from .engine import Engine
engine = Engine.get_default()
if engine.debug and origin is None:
origin = StringOrigin(template_string)
self.nodelist = engine.compile_string(template_string, origin)
self.name = name
self.origin = origin
self.engine = engine
def __iter__(self):
for node in self.nodelist:
for subnode in node:
yield subnode
def _render(self, context):
return self.nodelist.render(context)
def render(self, context):
"Display stage -- can be called many times"
context.render_context.push()
try:
if context.template is None:
with context.bind_template(self):
return self._render(context)
else:
return self._render(context)
finally:
context.render_context.pop()
class Token(object):
def __init__(self, token_type, contents):
# token_type must be TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK or
# TOKEN_COMMENT.
self.token_type, self.contents = token_type, contents
self.lineno = None
def __str__(self):
token_name = TOKEN_MAPPING[self.token_type]
return ('<%s token: "%s...">' %
(token_name, self.contents[:20].replace('\n', '')))
def split_contents(self):
split = []
bits = iter(smart_split(self.contents))
for bit in bits:
# Handle translation-marked template pieces
if bit.startswith('_("') or bit.startswith("_('"):
sentinal = bit[2] + ')'
trans_bit = [bit]
while not bit.endswith(sentinal):
bit = next(bits)
trans_bit.append(bit)
bit = ' '.join(trans_bit)
split.append(bit)
return split
class Lexer(object):
def __init__(self, template_string, origin):
self.template_string = template_string
self.origin = origin
self.lineno = 1
self.verbatim = False
def tokenize(self):
"""
Return a list of tokens from a given template_string.
"""
in_tag = False
result = []
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, in_tag))
in_tag = not in_tag
return result
def create_token(self, token_string, in_tag):
"""
Convert the given token string into a new Token object and return it.
If in_tag is True, we are processing something that matched a tag,
otherwise it should be treated as a literal string.
"""
if in_tag and token_string.startswith(BLOCK_TAG_START):
# The [2:-2] ranges below strip off *_TAG_START and *_TAG_END.
# We could do len(BLOCK_TAG_START) to be more "correct", but we've
# hard-coded the 2s here for performance. And it's not like
# the TAG_START values are going to change anytime, anyway.
block_content = token_string[2:-2].strip()
if self.verbatim and block_content == self.verbatim:
self.verbatim = False
if in_tag and not self.verbatim:
if token_string.startswith(VARIABLE_TAG_START):
token = Token(TOKEN_VAR, token_string[2:-2].strip())
elif token_string.startswith(BLOCK_TAG_START):
if block_content[:9] in ('verbatim', 'verbatim '):
self.verbatim = 'end%s' % block_content
token = Token(TOKEN_BLOCK, block_content)
elif token_string.startswith(COMMENT_TAG_START):
content = ''
if token_string.find(TRANSLATOR_COMMENT_MARK):
content = token_string[2:-2].strip()
token = Token(TOKEN_COMMENT, content)
else:
token = Token(TOKEN_TEXT, token_string)
token.lineno = self.lineno
self.lineno += token_string.count('\n')
return token
class Parser(object):
def __init__(self, tokens):
self.tokens = tokens
self.tags = {}
self.filters = {}
for lib in builtins:
self.add_library(lib)
def parse(self, parse_until=None):
if parse_until is None:
parse_until = []
nodelist = self.create_nodelist()
while self.tokens:
token = self.next_token()
# Use the raw values here for TOKEN_* for a tiny performance boost.
if token.token_type == 0: # TOKEN_TEXT
self.extend_nodelist(nodelist, TextNode(token.contents), token)
elif token.token_type == 1: # TOKEN_VAR
if not token.contents:
self.empty_variable(token)
try:
filter_expression = self.compile_filter(token.contents)
except TemplateSyntaxError as e:
if not self.compile_filter_error(token, e):
raise
var_node = self.create_variable_node(filter_expression)
self.extend_nodelist(nodelist, var_node, token)
elif token.token_type == 2: # TOKEN_BLOCK
try:
command = token.contents.split()[0]
except IndexError:
self.empty_block_tag(token)
if command in parse_until:
# put token back on token list so calling
# code knows why it terminated
self.prepend_token(token)
return nodelist
# execute callback function for this tag and append
# resulting node
self.enter_command(command, token)
try:
compile_func = self.tags[command]
except KeyError:
self.invalid_block_tag(token, command, parse_until)
try:
compiled_result = compile_func(self, token)
except TemplateSyntaxError as e:
if not self.compile_function_error(token, e):
raise
self.extend_nodelist(nodelist, compiled_result, token)
self.exit_command()
if parse_until:
self.unclosed_block_tag(parse_until)
return nodelist
def skip_past(self, endtag):
while self.tokens:
token = self.next_token()
if token.token_type == TOKEN_BLOCK and token.contents == endtag:
return
self.unclosed_block_tag([endtag])
def create_variable_node(self, filter_expression):
return VariableNode(filter_expression)
def create_nodelist(self):
return NodeList()
def extend_nodelist(self, nodelist, node, token):
if node.must_be_first and nodelist:
try:
if nodelist.contains_nontext:
raise AttributeError
except AttributeError:
raise TemplateSyntaxError("%r must be the first tag "
"in the template." % node)
if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
nodelist.contains_nontext = True
nodelist.append(node)
def enter_command(self, command, token):
pass
def exit_command(self):
pass
def error(self, token, msg):
return TemplateSyntaxError(msg)
def empty_variable(self, token):
raise self.error(token, "Empty variable tag")
def empty_block_tag(self, token):
raise self.error(token, "Empty block tag")
def invalid_block_tag(self, token, command, parse_until=None):
if parse_until:
raise self.error(token, "Invalid block tag: '%s', expected %s" %
(command, get_text_list(["'%s'" % p for p in parse_until])))
raise self.error(token, "Invalid block tag: '%s'" % command)
def unclosed_block_tag(self, parse_until):
raise self.error(None, "Unclosed tags: %s " % ', '.join(parse_until))
def compile_filter_error(self, token, e):
pass
def compile_function_error(self, token, e):
pass
def next_token(self):
return self.tokens.pop(0)
def prepend_token(self, token):
self.tokens.insert(0, token)
def delete_first_token(self):
del self.tokens[0]
def add_library(self, lib):
self.tags.update(lib.tags)
self.filters.update(lib.filters)
def compile_filter(self, token):
"""
Convenient wrapper for FilterExpression
"""
return FilterExpression(token, self)
def find_filter(self, filter_name):
if filter_name in self.filters:
return self.filters[filter_name]
else:
raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
# This only matches constant *strings* (things in quotes or marked for
# translation). Numbers are treated as variables for implementation reasons
# (so that they retain their type when passed to filters).
constant_string = r"""
(?:%(i18n_open)s%(strdq)s%(i18n_close)s|
%(i18n_open)s%(strsq)s%(i18n_close)s|
%(strdq)s|
%(strsq)s)
""" % {
'strdq': r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string
'strsq': r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string
'i18n_open': re.escape("_("),
'i18n_close': re.escape(")"),
}
constant_string = constant_string.replace("\n", "")
filter_raw_string = r"""
^(?P<constant>%(constant)s)|
^(?P<var>[%(var_chars)s]+|%(num)s)|
(?:\s*%(filter_sep)s\s*
(?P<filter_name>\w+)
(?:%(arg_sep)s
(?:
(?P<constant_arg>%(constant)s)|
(?P<var_arg>[%(var_chars)s]+|%(num)s)
)
)?
)""" % {
'constant': constant_string,
'num': r'[-+\.]?\d[\d\.e]*',
'var_chars': "\w\.",
'filter_sep': re.escape(FILTER_SEPARATOR),
'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR),
}
filter_re = re.compile(filter_raw_string, re.UNICODE | re.VERBOSE)
class FilterExpression(object):
"""
Parses a variable token and its optional filters (all as a single string),
and return a list of tuples of the filter name and arguments.
Sample::
>>> token = 'variable|default:"Default value"|date:"Y-m-d"'
>>> p = Parser('')
>>> fe = FilterExpression(token, p)
>>> len(fe.filters)
2
>>> fe.var
<Variable: 'variable'>
"""
def __init__(self, token, parser):
self.token = token
matches = filter_re.finditer(token)
var_obj = None
filters = []
upto = 0
for match in matches:
start = match.start()
if upto != start:
raise TemplateSyntaxError("Could not parse some characters: "
"%s|%s|%s" %
(token[:upto], token[upto:start],
token[start:]))
if var_obj is None:
var, constant = match.group("var", "constant")
if constant:
try:
var_obj = Variable(constant).resolve({})
except VariableDoesNotExist:
var_obj = None
elif var is None:
raise TemplateSyntaxError("Could not find variable at "
"start of %s." % token)
else:
var_obj = Variable(var)
else:
filter_name = match.group("filter_name")
args = []
constant_arg, var_arg = match.group("constant_arg", "var_arg")
if constant_arg:
args.append((False, Variable(constant_arg).resolve({})))
elif var_arg:
args.append((True, Variable(var_arg)))
filter_func = parser.find_filter(filter_name)
self.args_check(filter_name, filter_func, args)
filters.append((filter_func, args))
upto = match.end()
if upto != len(token):
raise TemplateSyntaxError("Could not parse the remainder: '%s' "
"from '%s'" % (token[upto:], token))
self.filters = filters
self.var = var_obj
def resolve(self, context, ignore_failures=False):
if isinstance(self.var, Variable):
try:
obj = self.var.resolve(context)
except VariableDoesNotExist:
if ignore_failures:
obj = None
else:
string_if_invalid = context.template.engine.string_if_invalid
if string_if_invalid:
if '%s' in string_if_invalid:
return string_if_invalid % self.var
else:
return string_if_invalid
else:
obj = string_if_invalid
else:
obj = self.var
for func, args in self.filters:
arg_vals = []
for lookup, arg in args:
if not lookup:
arg_vals.append(mark_safe(arg))
else:
arg_vals.append(arg.resolve(context))
if getattr(func, 'expects_localtime', False):
obj = template_localtime(obj, context.use_tz)
if getattr(func, 'needs_autoescape', False):
new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
else:
new_obj = func(obj, *arg_vals)
if getattr(func, 'is_safe', False) and isinstance(obj, SafeData):
obj = mark_safe(new_obj)
elif isinstance(obj, EscapeData):
obj = mark_for_escaping(new_obj)
else:
obj = new_obj
return obj
def args_check(name, func, provided):
provided = list(provided)
# First argument, filter input, is implied.
plen = len(provided) + 1
# Check to see if a decorator is providing the real function.
func = getattr(func, '_decorated_function', func)
args, varargs, varkw, defaults = getargspec(func)
alen = len(args)
dlen = len(defaults or [])
# Not enough OR Too many
if plen < (alen - dlen) or plen > alen:
raise TemplateSyntaxError("%s requires %d arguments, %d provided" %
(name, alen - dlen, plen))
return True
args_check = staticmethod(args_check)
def __str__(self):
return self.token
def resolve_variable(path, context):
"""
Returns the resolved variable, which may contain attribute syntax, within
the given context.
Deprecated; use the Variable class instead.
"""
warnings.warn("resolve_variable() is deprecated. Use django.template."
"Variable(path).resolve(context) instead",
RemovedInDjango20Warning, stacklevel=2)
return Variable(path).resolve(context)
class Variable(object):
"""
A template variable, resolvable against a given context. The variable may
be a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':u'News'}}
>>> Variable('article.section').resolve(c)
u'News'
>>> Variable('article').resolve(c)
{'section': u'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = u'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
"""
def __init__(self, var):
self.var = var
self.literal = None
self.lookups = None
self.translate = False
self.message_context = None
if not isinstance(var, six.string_types):
raise TypeError(
"Variable must be a string or number, got %s" % type(var))
try:
# First try to treat this variable as a number.
#
# Note that this could cause an OverflowError here that we're not
# catching. Since this should only happen at compile time, that's
# probably OK.
self.literal = float(var)
# So it's a float... is it an int? If the original value contained a
# dot or an "e" then it was a float, not an int.
if '.' not in var and 'e' not in var.lower():
self.literal = int(self.literal)
# "2." is invalid
if var.endswith('.'):
raise ValueError
except ValueError:
# A ValueError means that the variable isn't a number.
if var.startswith('_(') and var.endswith(')'):
# The result of the lookup should be translated at rendering
# time.
self.translate = True
var = var[2:-1]
# If it's wrapped with quotes (single or double), then
# we're also dealing with a literal.
try:
self.literal = mark_safe(unescape_string_literal(var))
except ValueError:
# Otherwise we'll set self.lookups so that resolve() knows we're
# dealing with a bonafide variable
if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_':
raise TemplateSyntaxError("Variables and attributes may "
"not begin with underscores: '%s'" %
var)
self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
def resolve(self, context):
"""Resolve this variable against a given context."""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
if self.message_context:
return pgettext_lazy(self.message_context, value)
else:
return ugettext_lazy(value)
return value
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.var)
def __str__(self):
return self.var
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
try: # dictionary lookup
current = current[bit]
# ValueError/IndexError are for numpy.array lookup on
# numpy < 1.9 and 1.9+ respectively
except (TypeError, AttributeError, KeyError, ValueError, IndexError):
try: # attribute lookup
# Don't return class attributes if the class is the context:
if isinstance(current, BaseContext) and getattr(type(current), bit):
raise AttributeError
current = getattr(current, bit)
except (TypeError, AttributeError) as e:
# Reraise an AttributeError raised by a @property
if (isinstance(e, AttributeError) and
not isinstance(current, BaseContext) and bit in dir(current)):
raise
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError): # unsubscriptable object
raise VariableDoesNotExist("Failed lookup for key "
"[%s] in %r",
(bit, current)) # missing attribute
if callable(current):
if getattr(current, 'do_not_call_in_templates', False):
pass
elif getattr(current, 'alters_data', False):
current = context.template.engine.string_if_invalid
else:
try: # method call (assuming no args required)
current = current()
except TypeError:
try:
getcallargs(current)
except TypeError: # arguments *were* required
current = context.template.engine.string_if_invalid # invalid method call
else:
raise
except Exception as e:
if getattr(e, 'silent_variable_failure', False):
current = context.template.engine.string_if_invalid
else:
raise
return current
class Node(object):
# Set this to True for nodes that must be first in the template (although
# they can be preceded by text nodes.
must_be_first = False
child_nodelists = ('nodelist',)
def render(self, context):
"""
Return the node rendered as a string.
"""
pass
def __iter__(self):
yield self
def get_nodes_by_type(self, nodetype):
"""
Return a list of all nodes (within this node and its nodelist)
of the given type
"""
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
for attr in self.child_nodelists:
nodelist = getattr(self, attr, None)
if nodelist:
nodes.extend(nodelist.get_nodes_by_type(nodetype))
return nodes
class NodeList(list):
# Set to True the first time a non-TextNode is inserted by
# extend_nodelist().
contains_nontext = False
def render(self, context):
bits = []
for node in self:
if isinstance(node, Node):
bit = self.render_node(node, context)
else:
bit = node
bits.append(force_text(bit))
return mark_safe(''.join(bits))
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes of the given type"
nodes = []
for node in self:
nodes.extend(node.get_nodes_by_type(nodetype))
return nodes
def render_node(self, node, context):
return node.render(context)
class TextNode(Node):
def __init__(self, s):
self.s = s
def __repr__(self):
return force_str("<Text Node: '%s'>" % self.s[:25], 'ascii',
errors='replace')
def render(self, context):
return self.s
def render_value_in_context(value, context):
"""
Converts any value to a string to become part of a rendered template. This
means escaping, if required, and conversion to a unicode object. If value
is a string, it is expected to have already been translated.
"""
value = template_localtime(value, use_tz=context.use_tz)
value = localize(value, use_l10n=context.use_l10n)
value = force_text(value)
if ((context.autoescape and not isinstance(value, SafeData)) or
isinstance(value, EscapeData)):
return conditional_escape(value)
else:
return value
class VariableNode(Node):
def __init__(self, filter_expression):
self.filter_expression = filter_expression
def __repr__(self):
return "<Variable Node: %s>" % self.filter_expression
def render(self, context):
try:
output = self.filter_expression.resolve(context)
except UnicodeDecodeError:
# Unicode conversion can fail sometimes for reasons out of our
# control (e.g. exception rendering). In that case, we fail
# quietly.
return ''
return render_value_in_context(output, context)
# Regex for token keyword arguments
kwarg_re = re.compile(r"(?:(\w+)=)?(.+)")
def token_kwargs(bits, parser, support_legacy=False):
"""
A utility method for parsing token keyword arguments.
:param bits: A list containing remainder of the token (split by spaces)
that is to be checked for arguments. Valid arguments will be removed
from this list.
:param support_legacy: If set to true ``True``, the legacy format
``1 as foo`` will be accepted. Otherwise, only the standard ``foo=1``
format is allowed.
:returns: A dictionary of the arguments retrieved from the ``bits`` token
list.
There is no requirement for all remaining token ``bits`` to be keyword
arguments, so the dictionary will be returned as soon as an invalid
argument format is reached.
"""
if not bits:
return {}
match = kwarg_re.match(bits[0])
kwarg_format = match and match.group(1)
if not kwarg_format:
if not support_legacy:
return {}
if len(bits) < 3 or bits[1] != 'as':
return {}
kwargs = {}
while bits:
if kwarg_format:
match = kwarg_re.match(bits[0])
if not match or not match.group(1):
return kwargs
key, value = match.groups()
del bits[:1]
else:
if len(bits) < 3 or bits[1] != 'as':
return kwargs
key, value = bits[2], bits[0]
del bits[:3]
kwargs[key] = parser.compile_filter(value)
if bits and not kwarg_format:
if bits[0] != 'and':
return kwargs
del bits[:1]
return kwargs
def parse_bits(parser, bits, params, varargs, varkw, defaults,
takes_context, name):
"""
Parses bits for template tag helpers simple_tag and inclusion_tag, in
particular by detecting syntax errors and by extracting positional and
keyword arguments.
"""
if takes_context:
if params[0] == 'context':
params = params[1:]
else:
raise TemplateSyntaxError(
"'%s' is decorated with takes_context=True so it must "
"have a first argument of 'context'" % name)
args = []
kwargs = {}
unhandled_params = list(params)
for bit in bits:
# First we try to extract a potential kwarg from the bit
kwarg = token_kwargs([bit], parser)
if kwarg:
# The kwarg was successfully extracted
param, value = list(six.iteritems(kwarg))[0]
if param not in params and varkw is None:
# An unexpected keyword argument was supplied
raise TemplateSyntaxError(
"'%s' received unexpected keyword argument '%s'" %
(name, param))
elif param in kwargs:
# The keyword argument has already been supplied once
raise TemplateSyntaxError(
"'%s' received multiple values for keyword argument '%s'" %
(name, param))
else:
# All good, record the keyword argument
kwargs[str(param)] = value
if param in unhandled_params:
# If using the keyword syntax for a positional arg, then
# consume it.
unhandled_params.remove(param)
else:
if kwargs:
raise TemplateSyntaxError(
"'%s' received some positional argument(s) after some "
"keyword argument(s)" % name)
else:
# Record the positional argument
args.append(parser.compile_filter(bit))
try:
# Consume from the list of expected positional arguments
unhandled_params.pop(0)
except IndexError:
if varargs is None:
raise TemplateSyntaxError(
"'%s' received too many positional arguments" %
name)
if defaults is not None:
# Consider the last n params handled, where n is the
# number of defaults.
unhandled_params = unhandled_params[:-len(defaults)]
if unhandled_params:
# Some positional arguments were not supplied
raise TemplateSyntaxError(
"'%s' did not receive value(s) for the argument(s): %s" %
(name, ", ".join("'%s'" % p for p in unhandled_params)))
return args, kwargs
def generic_tag_compiler(parser, token, params, varargs, varkw, defaults,
name, takes_context, node_class):
"""
Returns a template.Node subclass.
"""
bits = token.split_contents()[1:]
args, kwargs = parse_bits(parser, bits, params, varargs, varkw,
defaults, takes_context, name)
return node_class(takes_context, args, kwargs)
class TagHelperNode(Node):
"""
Base class for tag helper nodes such as SimpleNode and InclusionNode.
Manages the positional and keyword arguments to be passed to the decorated
function.
"""
def __init__(self, takes_context, args, kwargs):
self.takes_context = takes_context
self.args = args
self.kwargs = kwargs
def get_resolved_arguments(self, context):
resolved_args = [var.resolve(context) for var in self.args]
if self.takes_context:
resolved_args = [context] + resolved_args
resolved_kwargs = {k: v.resolve(context) for k, v in self.kwargs.items()}
return resolved_args, resolved_kwargs
class Library(object):
def __init__(self):
self.filters = {}
self.tags = {}
def tag(self, name=None, compile_function=None):
if name is None and compile_function is None:
# @register.tag()
return self.tag_function
elif name is not None and compile_function is None:
if callable(name):
# @register.tag
return self.tag_function(name)
else:
# @register.tag('somename') or @register.tag(name='somename')
def dec(func):
return self.tag(name, func)
return dec
elif name is not None and compile_function is not None:
# register.tag('somename', somefunc)
self.tags[name] = compile_function
return compile_function
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.tag: (%r, %r)", (name, compile_function))
def tag_function(self, func):
self.tags[getattr(func, "_decorated_function", func).__name__] = func
return func
def filter(self, name=None, filter_func=None, **flags):
if name is None and filter_func is None:
# @register.filter()
def dec(func):
return self.filter_function(func, **flags)
return dec
elif name is not None and filter_func is None:
if callable(name):
# @register.filter
return self.filter_function(name, **flags)
else:
# @register.filter('somename') or @register.filter(name='somename')
def dec(func):
return self.filter(name, func, **flags)
return dec
elif name is not None and filter_func is not None:
# register.filter('somename', somefunc)
self.filters[name] = filter_func
for attr in ('expects_localtime', 'is_safe', 'needs_autoescape'):
if attr in flags:
value = flags[attr]
# set the flag on the filter for FilterExpression.resolve
setattr(filter_func, attr, value)
# set the flag on the innermost decorated function
# for decorators that need it e.g. stringfilter
if hasattr(filter_func, "_decorated_function"):
setattr(filter_func._decorated_function, attr, value)
filter_func._filter_name = name
return filter_func
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.filter: (%r, %r)", (name, filter_func))
def filter_function(self, func, **flags):
name = getattr(func, "_decorated_function", func).__name__
return self.filter(name, func, **flags)
def simple_tag(self, func=None, takes_context=None, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class SimpleNode(TagHelperNode):
def __init__(self, takes_context, args, kwargs, target_var):
super(SimpleNode, self).__init__(takes_context, args, kwargs)
self.target_var = target_var
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
output = func(*resolved_args, **resolved_kwargs)
if self.target_var is not None:
context[self.target_var] = output
return ''
return output
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
def compile_func(parser, token):
bits = token.split_contents()[1:]
target_var = None
if len(bits) >= 2 and bits[-2] == 'as':
target_var = bits[-1]
bits = bits[:-2]
args, kwargs = parse_bits(parser, bits, params,
varargs, varkw, defaults, takes_context, function_name)
return SimpleNode(takes_context, args, kwargs, target_var)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
if func is None:
# @register.simple_tag(...)
return dec
elif callable(func):
# @register.simple_tag
return dec(func)
else:
raise TemplateSyntaxError("Invalid arguments provided to simple_tag")
def assignment_tag(self, func=None, takes_context=None, name=None):
warnings.warn(
"assignment_tag() is deprecated. Use simple_tag() instead",
RemovedInDjango21Warning,
stacklevel=2,
)
return self.simple_tag(func, takes_context, name)
def inclusion_tag(self, file_name, takes_context=False, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class InclusionNode(TagHelperNode):
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
_dict = func(*resolved_args, **resolved_kwargs)
if not getattr(self, 'nodelist', False):
if isinstance(file_name, Template):
t = file_name
elif isinstance(getattr(file_name, 'template', None), Template):
t = file_name.template
elif not isinstance(file_name, six.string_types) and is_iterable(file_name):
t = context.template.engine.select_template(file_name)
else:
t = context.template.engine.get_template(file_name)
self.nodelist = t.nodelist
new_context = context.new(_dict)
# Copy across the CSRF token, if present, because
# inclusion tags are often used for forms, and we need
# instructions for using CSRF protection to be as simple
# as possible.
csrf_token = context.get('csrf_token', None)
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
return self.nodelist.render(new_context)
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
compile_func = partial(generic_tag_compiler,
params=params, varargs=varargs, varkw=varkw,
defaults=defaults, name=function_name,
takes_context=takes_context, node_class=InclusionNode)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
return dec
def is_library_missing(name):
"""Check if library that failed to load cannot be found under any
templatetags directory or does exist but fails to import.
Non-existing condition is checked recursively for each subpackage in cases
like <appdir>/templatetags/subpackage/package/module.py.
"""
# Don't bother to check if '.' is in name since any name will be prefixed
# with some template root.
path, module = name.rsplit('.', 1)
try:
package = import_module(path)
return not module_has_submodule(package, module)
except ImportError:
return is_library_missing(path)
def import_library(taglib_module):
"""
Load a template tag library module.
Verifies that the library contains a 'register' attribute, and
returns that attribute as the representation of the library
"""
try:
mod = import_module(taglib_module)
except ImportError as e:
# If the ImportError is because the taglib submodule does not exist,
# that's not an error that should be raised. If the submodule exists
# and raised an ImportError on the attempt to load it, that we want
# to raise.
if is_library_missing(taglib_module):
return None
else:
raise InvalidTemplateLibrary("ImportError raised loading %s: %s" %
(taglib_module, e))
try:
return mod.register
except AttributeError:
raise InvalidTemplateLibrary("Template library %s does not have "
"a variable named 'register'" %
taglib_module)
@lru_cache.lru_cache()
def get_templatetags_modules():
"""
Return the list of all available template tag modules.
Caches the result for faster access.
"""
templatetags_modules_candidates = ['django.templatetags']
templatetags_modules_candidates.extend(
'%s.templatetags' % app_config.name
for app_config in apps.get_app_configs())
templatetags_modules = []
for templatetag_module in templatetags_modules_candidates:
try:
import_module(templatetag_module)
except ImportError:
continue
else:
templatetags_modules.append(templatetag_module)
return templatetags_modules
def get_library(library_name):
"""
Load the template library module with the given name.
If library is not already loaded loop over all templatetags modules
to locate it.
{% load somelib %} and {% load someotherlib %} loops twice.
Subsequent loads eg. {% load somelib %} in the same process will grab
the cached module from libraries.
"""
lib = libraries.get(library_name, None)
if not lib:
templatetags_modules = get_templatetags_modules()
tried_modules = []
for module in templatetags_modules:
taglib_module = '%s.%s' % (module, library_name)
tried_modules.append(taglib_module)
lib = import_library(taglib_module)
if lib:
libraries[library_name] = lib
break
if not lib:
raise InvalidTemplateLibrary("Template library %s not found, "
"tried %s" %
(library_name,
','.join(tried_modules)))
return lib
def add_to_builtins(module):
builtins.append(import_library(module))
add_to_builtins('django.template.defaulttags')
add_to_builtins('django.template.defaultfilters')
add_to_builtins('django.template.loader_tags')
| |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import six
import testtools
from tempest.api.compute import base
from tempest.common import image as common_image
from tempest.common.utils import data_utils
from tempest.common import waiters
from tempest import config
from tempest.lib import exceptions
from tempest import test
CONF = config.CONF
class ListImageFiltersTestJSON(base.BaseV2ComputeTest):
@classmethod
def skip_checks(cls):
super(ListImageFiltersTestJSON, cls).skip_checks()
if not CONF.service_available.glance:
skip_msg = ("%s skipped as glance is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(ListImageFiltersTestJSON, cls).setup_clients()
cls.client = cls.compute_images_client
# Check if glance v1 is available to determine which client to use. We
# prefer glance v1 for the compute API tests since the compute image
# API proxy was written for glance v1.
if CONF.image_feature_enabled.api_v1:
cls.glance_client = cls.os.image_client
elif CONF.image_feature_enabled.api_v2:
cls.glance_client = cls.os.image_client_v2
else:
raise exceptions.InvalidConfiguration(
'Either api_v1 or api_v2 must be True in '
'[image-feature-enabled].')
@classmethod
def resource_setup(cls):
super(ListImageFiltersTestJSON, cls).resource_setup()
def _create_image():
params = {
'name': data_utils.rand_name(cls.__name__ + '-image'),
'container_format': 'bare',
'disk_format': 'raw'
}
if CONF.image_feature_enabled.api_v1:
params.update({'is_public': False})
params = {'headers':
common_image.image_meta_to_headers(**params)}
else:
params.update({'visibility': 'private'})
body = cls.glance_client.create_image(**params)
body = body['image'] if 'image' in body else body
image_id = body['id']
cls.images.append(image_id)
# Wait 1 second between creation and upload to ensure a delta
# between created_at and updated_at.
time.sleep(1)
image_file = six.BytesIO((b'*' * 1024))
if CONF.image_feature_enabled.api_v1:
cls.glance_client.update_image(image_id, data=image_file)
else:
cls.glance_client.store_image_file(image_id, data=image_file)
waiters.wait_for_image_status(cls.client, image_id, 'ACTIVE')
body = cls.client.show_image(image_id)['image']
return body
# Create non-snapshot images via glance
cls.image1 = _create_image()
cls.image1_id = cls.image1['id']
cls.image2 = _create_image()
cls.image2_id = cls.image2['id']
cls.image3 = _create_image()
cls.image3_id = cls.image3['id']
if not CONF.compute_feature_enabled.snapshot:
return
# Create instances and snapshots via nova
cls.server1 = cls.create_test_server()
cls.server2 = cls.create_test_server(wait_until='ACTIVE')
# NOTE(sdague) this is faster than doing the sync wait_util on both
waiters.wait_for_server_status(cls.servers_client,
cls.server1['id'], 'ACTIVE')
# Create images to be used in the filter tests
cls.snapshot1 = cls.create_image_from_server(
cls.server1['id'], wait_until='ACTIVE')
cls.snapshot1_id = cls.snapshot1['id']
# Servers have a hidden property for when they are being imaged
# Performing back-to-back create image calls on a single
# server will sometimes cause failures
cls.snapshot3 = cls.create_image_from_server(
cls.server2['id'], wait_until='ACTIVE')
cls.snapshot3_id = cls.snapshot3['id']
# Wait for the server to be active after the image upload
cls.snapshot2 = cls.create_image_from_server(
cls.server1['id'], wait_until='ACTIVE')
cls.snapshot2_id = cls.snapshot2['id']
@test.idempotent_id('a3f5b513-aeb3-42a9-b18e-f091ef73254d')
def test_list_images_filter_by_status(self):
# The list of images should contain only images with the
# provided status
params = {'status': 'ACTIVE'}
images = self.client.list_images(**params)['images']
self.assertTrue(any([i for i in images if i['id'] == self.image1_id]))
self.assertTrue(any([i for i in images if i['id'] == self.image2_id]))
self.assertTrue(any([i for i in images if i['id'] == self.image3_id]))
@test.idempotent_id('33163b73-79f5-4d07-a7ea-9213bcc468ff')
def test_list_images_filter_by_name(self):
# List of all images should contain the expected images filtered
# by name
params = {'name': self.image1['name']}
images = self.client.list_images(**params)['images']
self.assertTrue(any([i for i in images if i['id'] == self.image1_id]))
self.assertFalse(any([i for i in images if i['id'] == self.image2_id]))
self.assertFalse(any([i for i in images if i['id'] == self.image3_id]))
@test.idempotent_id('9f238683-c763-45aa-b848-232ec3ce3105')
@testtools.skipUnless(CONF.compute_feature_enabled.snapshot,
'Snapshotting is not available.')
def test_list_images_filter_by_server_id(self):
# The images should contain images filtered by server id
params = {'server': self.server1['id']}
images = self.client.list_images(**params)['images']
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot1_id]),
"Failed to find image %s in images. Got images %s" %
(self.image1_id, images))
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot2_id]))
self.assertFalse(any([i for i in images
if i['id'] == self.snapshot3_id]))
@test.idempotent_id('05a377b8-28cf-4734-a1e6-2ab5c38bf606')
@testtools.skipUnless(CONF.compute_feature_enabled.snapshot,
'Snapshotting is not available.')
def test_list_images_filter_by_server_ref(self):
# The list of servers should be filtered by server ref
server_links = self.server2['links']
# Try all server link types
for link in server_links:
params = {'server': link['href']}
images = self.client.list_images(**params)['images']
self.assertFalse(any([i for i in images
if i['id'] == self.snapshot1_id]))
self.assertFalse(any([i for i in images
if i['id'] == self.snapshot2_id]))
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot3_id]))
@test.idempotent_id('e3356918-4d3e-4756-81d5-abc4524ba29f')
@testtools.skipUnless(CONF.compute_feature_enabled.snapshot,
'Snapshotting is not available.')
def test_list_images_filter_by_type(self):
# The list of servers should be filtered by image type
params = {'type': 'snapshot'}
images = self.client.list_images(**params)['images']
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot1_id]))
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot2_id]))
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot3_id]))
self.assertFalse(any([i for i in images
if i['id'] == self.image_ref]))
@test.idempotent_id('3a484ca9-67ba-451e-b494-7fcf28d32d62')
def test_list_images_limit_results(self):
# Verify only the expected number of results are returned
params = {'limit': '1'}
images = self.client.list_images(**params)['images']
self.assertEqual(1, len([x for x in images if 'id' in x]))
@test.idempotent_id('18bac3ae-da27-436c-92a9-b22474d13aab')
def test_list_images_filter_by_changes_since(self):
# Verify only updated images are returned in the detailed list
# Becoming ACTIVE will modify the updated time
# Filter by the image's created time
params = {'changes-since': self.image3['created']}
images = self.client.list_images(**params)['images']
found = any([i for i in images if i['id'] == self.image3_id])
self.assertTrue(found)
@test.idempotent_id('9b0ea018-6185-4f71-948a-a123a107988e')
def test_list_images_with_detail_filter_by_status(self):
# Detailed list of all images should only contain images
# with the provided status
params = {'status': 'ACTIVE'}
images = self.client.list_images(detail=True, **params)['images']
self.assertTrue(any([i for i in images if i['id'] == self.image1_id]))
self.assertTrue(any([i for i in images if i['id'] == self.image2_id]))
self.assertTrue(any([i for i in images if i['id'] == self.image3_id]))
@test.idempotent_id('644ea267-9bd9-4f3b-af9f-dffa02396a17')
def test_list_images_with_detail_filter_by_name(self):
# Detailed list of all images should contain the expected
# images filtered by name
params = {'name': self.image1['name']}
images = self.client.list_images(detail=True, **params)['images']
self.assertTrue(any([i for i in images if i['id'] == self.image1_id]))
self.assertFalse(any([i for i in images if i['id'] == self.image2_id]))
self.assertFalse(any([i for i in images if i['id'] == self.image3_id]))
@test.idempotent_id('ba2fa9a9-b672-47cc-b354-3b4c0600e2cb')
def test_list_images_with_detail_limit_results(self):
# Verify only the expected number of results (with full details)
# are returned
params = {'limit': '1'}
images = self.client.list_images(detail=True, **params)['images']
self.assertEqual(1, len(images))
@test.idempotent_id('8c78f822-203b-4bf6-8bba-56ebd551cf84')
@testtools.skipUnless(CONF.compute_feature_enabled.snapshot,
'Snapshotting is not available.')
def test_list_images_with_detail_filter_by_server_ref(self):
# Detailed list of servers should be filtered by server ref
server_links = self.server2['links']
# Try all server link types
for link in server_links:
params = {'server': link['href']}
images = self.client.list_images(detail=True, **params)['images']
self.assertFalse(any([i for i in images
if i['id'] == self.snapshot1_id]))
self.assertFalse(any([i for i in images
if i['id'] == self.snapshot2_id]))
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot3_id]))
@test.idempotent_id('888c0cc0-7223-43c5-9db0-b125fd0a393b')
@testtools.skipUnless(CONF.compute_feature_enabled.snapshot,
'Snapshotting is not available.')
def test_list_images_with_detail_filter_by_type(self):
# The detailed list of servers should be filtered by image type
params = {'type': 'snapshot'}
images = self.client.list_images(detail=True, **params)['images']
self.client.show_image(self.image_ref)
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot1_id]))
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot2_id]))
self.assertTrue(any([i for i in images
if i['id'] == self.snapshot3_id]))
self.assertFalse(any([i for i in images
if i['id'] == self.image_ref]))
@test.idempotent_id('7d439e18-ac2e-4827-b049-7e18004712c4')
def test_list_images_with_detail_filter_by_changes_since(self):
# Verify an update image is returned
# Becoming ACTIVE will modify the updated time
# Filter by the image's created time
params = {'changes-since': self.image1['created']}
images = self.client.list_images(detail=True, **params)['images']
self.assertTrue(any([i for i in images if i['id'] == self.image1_id]))
| |
"""Contains miscellaneous helpers"""
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.moves.urllib import request
from future.utils import PY2
from past.builtins import basestring
import logging
import ast
import copy
import hashlib
import locale
import operator
import os
import re
import sys
from collections import MutableMapping
from datetime import timedelta, datetime
from pprint import pformat
import flexget
import queue
import requests
from html.entities import name2codepoint
log = logging.getLogger('utils')
def str_to_boolean(string):
return string.lower() in ['true', '1', 't', 'y', 'yes']
def str_to_int(string):
try:
return int(string.replace(',', ''))
except ValueError:
return None
if PY2:
def native_str_to_text(string, **kwargs):
if 'encoding' not in kwargs:
kwargs['encoding'] = 'ascii'
return string.decode(**kwargs)
else:
def native_str_to_text(string, **kwargs):
return string
def convert_bytes(bytes):
"""Returns given bytes as prettified string."""
bytes = float(bytes)
if bytes >= 1099511627776:
terabytes = bytes / 1099511627776
size = '%.2fT' % terabytes
elif bytes >= 1073741824:
gigabytes = bytes / 1073741824
size = '%.2fG' % gigabytes
elif bytes >= 1048576:
megabytes = bytes / 1048576
size = '%.2fM' % megabytes
elif bytes >= 1024:
kilobytes = bytes / 1024
size = '%.2fK' % kilobytes
else:
size = '%.2fb' % bytes
return size
class MergeException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def strip_html(text):
"""Tries to strip all HTML tags from *text*. If unsuccessful returns original text."""
from bs4 import BeautifulSoup
try:
text = ' '.join(BeautifulSoup(text).find_all(text=True))
return ' '.join(text.split())
except Exception:
return text
# This pattern matches a character entity reference (a decimal numeric
# references, a hexadecimal numeric reference, or a named reference).
charrefpat = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?')
def _htmldecode(text):
"""Decode HTML entities in the given text."""
# From screpe.py - licensed under apache 2.0 .. should not be a problem for a MIT afaik
if isinstance(text, str):
uchr = chr
else:
def uchr(value):
value > 127 and chr(value) or chr(value)
def entitydecode(match, uchr=uchr):
entity = match.group(1)
if entity.startswith('#x'):
return uchr(int(entity[2:], 16))
elif entity.startswith('#'):
return uchr(int(entity[1:]))
elif entity in name2codepoint:
return uchr(name2codepoint[entity])
else:
return match.group(0)
return charrefpat.sub(entitydecode, text)
def decode_html(value):
"""
:param string value: String to be html-decoded
:returns: Html decoded string
"""
return _htmldecode(value)
def encode_html(unicode_data, encoding='ascii'):
"""
Encode unicode_data for use as XML or HTML, with characters outside
of the encoding converted to XML numeric character references.
"""
try:
return unicode_data.encode(encoding, 'xmlcharrefreplace')
except ValueError:
# ValueError is raised if there are unencodable chars in the
# data and the 'xmlcharrefreplace' error handler is not found.
# Pre-2.3 Python doesn't support the 'xmlcharrefreplace' error
# handler, so we'll emulate it.
return _xmlcharref_encode(unicode_data, encoding)
def _xmlcharref_encode(unicode_data, encoding):
"""Emulate Python 2.3's 'xmlcharrefreplace' encoding error handler."""
chars = []
# Phase through the unicode_data string one character at a time in
# order to catch unencodable characters:
for char in unicode_data:
try:
chars.append(char.encode(encoding, 'strict'))
except UnicodeError:
chars.append('&#%i;' % ord(char))
return ''.join(chars)
def merge_dict_from_to(d1, d2):
"""Merges dictionary d1 into dictionary d2. d1 will remain in original form."""
for k, v in list(d1.items()):
if k in d2:
if isinstance(v, type(d2[k])):
if isinstance(v, dict):
merge_dict_from_to(d1[k], d2[k])
elif isinstance(v, list):
d2[k].extend(copy.deepcopy(v))
elif isinstance(v, (basestring, bool, int, float, type(None))):
pass
else:
raise Exception('Unknown type: %s value: %s in dictionary' % (type(v), repr(v)))
elif (isinstance(v, (basestring, bool, int, float, type(None))) and
isinstance(d2[k], (basestring, bool, int, float, type(None)))):
# Allow overriding of non-container types with other non-container types
pass
else:
raise MergeException('Merging key %s failed, conflicting datatypes %r vs. %r.' % (
k, type(v).__name__, type(d2[k]).__name__))
else:
d2[k] = copy.deepcopy(v)
class SmartRedirectHandler(request.HTTPRedirectHandler):
def http_error_301(self, req, fp, code, msg, headers):
result = request.HTTPRedirectHandler.http_error_301(self, req, fp, code, msg, headers)
result.status = code
return result
def http_error_302(self, req, fp, code, msg, headers):
result = request.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
result.status = code
return result
class ReList(list):
"""
A list that stores regexps.
You can add compiled or uncompiled regexps to the list.
It will always return the compiled version.
It will compile the text regexps on demand when first accessed.
"""
# Set the default flags
flags = re.IGNORECASE | re.UNICODE
def __init__(self, *args, **kwargs):
"""Optional :flags: keyword argument with regexp flags to compile with"""
if 'flags' in kwargs:
self.flags = kwargs['flags']
del kwargs['flags']
list.__init__(self, *args, **kwargs)
def __getitem__(self, k):
item = list.__getitem__(self, k)
if isinstance(item, basestring):
item = re.compile(item, re.IGNORECASE | re.UNICODE)
self[k] = item
return item
def __iter__(self):
for i in range(len(self)):
yield self[i]
# Determine the encoding for io
io_encoding = None
if hasattr(sys.stdout, 'encoding'):
io_encoding = sys.stdout.encoding
if not io_encoding:
try:
io_encoding = locale.getpreferredencoding()
except Exception:
pass
if not io_encoding:
# Default to utf8 if nothing can be determined
io_encoding = 'utf8'
else:
# Normalize the encoding
io_encoding = io_encoding.lower()
if io_encoding == 'cp65001':
io_encoding = 'utf8'
elif io_encoding in ['us-ascii', '646', 'ansi_x3.4-1968']:
io_encoding = 'ascii'
def parse_timedelta(value):
"""Parse a string like '5 days' into a timedelta object. Also allows timedeltas to pass through."""
if isinstance(value, timedelta):
# Allow timedelta objects to pass through
return value
if not value:
# If no time is given, default to 0
return timedelta()
amount, unit = value.lower().split(' ')
# Make sure unit name is plural.
if not unit.endswith('s'):
unit += 's'
params = {unit: float(amount)}
try:
return timedelta(**params)
except TypeError:
raise ValueError('Invalid time format \'%s\'' % value)
def timedelta_total_seconds(td):
"""replaces python 2.7+ timedelta.total_seconds()"""
# TODO: Remove this when we no longer support python 2.6
try:
return td.total_seconds()
except AttributeError:
return (td.days * 24 * 3600) + td.seconds + (td.microseconds / 1000000)
def multiply_timedelta(interval, number):
"""`timedelta`s can not normally be multiplied by floating points. This does that."""
return timedelta(seconds=timedelta_total_seconds(interval) * number)
if os.name == 'posix':
def pid_exists(pid):
"""Check whether pid exists in the current process table."""
import errno
if pid < 0:
return False
try:
os.kill(pid, 0)
except OSError as e:
return e.errno == errno.EPERM
else:
return True
else:
def pid_exists(pid):
import ctypes
import ctypes.wintypes
kernel32 = ctypes.windll.kernel32
PROCESS_QUERY_INFORMATION = 0x0400
STILL_ACTIVE = 259
handle = kernel32.OpenProcess(PROCESS_QUERY_INFORMATION, 0, pid)
if handle == 0:
return False
# If the process exited recently, a pid may still exist for the handle.
# So, check if we can get the exit code.
exit_code = ctypes.wintypes.DWORD()
is_running = kernel32.GetExitCodeProcess(handle, ctypes.byref(exit_code)) == 0
kernel32.CloseHandle(handle)
# See if we couldn't get the exit code or the exit code indicates that the
# process is still running.
return is_running or exit_code.value == STILL_ACTIVE
_binOps = {
ast.Add: operator.add,
ast.Sub: operator.sub,
ast.Mult: operator.mul,
ast.Div: operator.truediv,
ast.Mod: operator.mod
}
def arithmeticEval(s):
"""
A safe eval supporting basic arithmetic operations.
:param s: expression to evaluate
:return: value
"""
node = ast.parse(s, mode='eval')
def _eval(node):
if isinstance(node, ast.Expression):
return _eval(node.body)
elif isinstance(node, ast.Str):
return node.s
elif isinstance(node, ast.Num):
return node.n
elif isinstance(node, ast.BinOp):
return _binOps[type(node.op)](_eval(node.left), _eval(node.right))
else:
raise Exception('Unsupported type {}'.format(node))
return _eval(node.body)
class TimedDict(MutableMapping):
"""Acts like a normal dict, but keys will only remain in the dictionary for a specified time span."""
def __init__(self, cache_time='5 minutes'):
self.cache_time = parse_timedelta(cache_time)
self._store = dict()
self._last_prune = datetime.now()
def _prune(self):
"""Prune all expired keys."""
for key, (add_time, _) in list(self._store.items()):
if add_time < datetime.now() - self.cache_time:
del self._store[key]
self._last_prune = datetime.now()
def __getitem__(self, key):
add_time, value = self._store[key]
# Prune data and raise KeyError if expired
if add_time < datetime.now() - self.cache_time:
del self._store[key]
raise KeyError(key, 'cache time expired')
return value
def __setitem__(self, key, value):
# Make sure we clear periodically, even if old keys aren't accessed again
if self._last_prune < datetime.now() - (2 * self.cache_time):
self._prune()
self._store[key] = (datetime.now(), value)
def __delitem__(self, key):
del self._store[key]
def __iter__(self):
# Uses our getitem to skip expired items
return (key for key in list(self._store.keys()) if key in self)
def __len__(self):
return len(list(self.__iter__()))
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__, dict(list(zip(self._store, (v[1] for v in list(self._store.values()))))))
class BufferQueue(queue.Queue):
"""Used in place of a file-like object to capture text and access it safely from another thread."""
# Allow access to the Empty error from here
Empty = queue.Empty
def write(self, line):
self.put(line)
def singleton(cls):
instances = {}
def getinstance(*args, **kwargs):
if cls not in instances:
instances[cls] = cls(*args, **kwargs)
return instances[cls]
return getinstance
def split_title_year(title):
"""Splits title containing a year into a title, year pair."""
if not title:
return
if not re.search(r'\d{4}', title):
return title, None
match = re.search(r'(.*?)\(?(\d{4})?\)?$', title)
title = match.group(1).strip()
if match.group(2):
year = int(match.group(2))
else:
year = None
return title, year
def get_latest_flexget_version_number():
"""
Return latest Flexget version from http://download.flexget.com/latestversion
"""
try:
page = requests.get('http://download.flexget.com/latestversion')
except requests.RequestException:
return
ver = page.text.strip()
return ver
def get_current_flexget_version():
return flexget.__version__
def parse_filesize(text_size, si=True):
"""
Parses a data size and returns its value in mebibytes
:param string text_size: string containing the data size to parse i.e. "5 GB"
:param bool si: If True, possibly ambiguous units like KB, MB, GB will be assumed to be base 10 units,
rather than the default base 2. i.e. if si then 50 GB = 47684 else 50GB = 51200
:returns: an float with the data size in mebibytes
"""
prefix_order = {'': 0, 'k': 1, 'm': 2, 'g': 3, 't': 4, 'p': 5}
parsed_size = re.match('(\d+(?:[.,\s]\d+)*)(?:\s*)((?:[ptgmk]i?)?b)', text_size.strip().lower(), flags=re.UNICODE)
if not parsed_size:
raise ValueError('%s does not look like a file size' % text_size)
amount = parsed_size.group(1)
unit = parsed_size.group(2)
if not unit.endswith('b'):
raise ValueError('%s does not look like a file size' % text_size)
unit = unit.rstrip('b')
if unit.endswith('i'):
si = False
unit = unit.rstrip('i')
if unit not in prefix_order:
raise ValueError('%s does not look like a file size' % text_size)
order = prefix_order[unit]
amount = float(amount.replace(',', '').replace(' ', ''))
base = 1000 if si else 1024
return (amount * (base ** order)) / 1024 ** 2
def get_config_hash(config):
"""
:param dict config: Configuration
:return: MD5 hash for *config*
"""
if isinstance(config, dict) or isinstance(config, list):
# this does in fact support nested dicts, they're sorted too!
return hashlib.md5(pformat(config).encode('utf-8')).hexdigest()
else:
return hashlib.md5(str(config).encode('utf-8')).hexdigest()
def parse_episode_identifier(ep_id):
"""
Parses series episode identifier, raises ValueError if it fails
:param ep_id: Value to parse
:return: Return identifier type: `sequence`, `ep` or `date`
:raises ValueError: If ep_id does not match any valid types
"""
error = None
identified_by = None
if isinstance(ep_id, int):
if ep_id <= 0:
error = 'sequence type episode must be higher than 0'
identified_by = 'sequence'
elif re.match(r'(?i)^S\d{1,4}E\d{1,3}$', ep_id):
identified_by = 'ep'
elif re.match(r'\d{4}-\d{2}-\d{2}', ep_id):
identified_by = 'date'
else:
# Check if a sequence identifier was passed as a string
try:
ep_id = int(ep_id)
if ep_id <= 0:
error = 'sequence type episode must be higher than 0'
identified_by = 'sequence'
except ValueError:
error = '`%s` is not a valid episode identifier.' % ep_id
if error:
raise ValueError(error)
return identified_by
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
resource_group_name: str,
managed_instance_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/sqlAgent/current')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_or_update_request(
resource_group_name: str,
managed_instance_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/sqlAgent/current')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
class SqlAgentOperations(object):
"""SqlAgentOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.sql.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get(
self,
resource_group_name: str,
managed_instance_name: str,
**kwargs: Any
) -> "_models.SqlAgentConfiguration":
"""Gets current instance sql agent configuration.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlAgentConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.SqlAgentConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlAgentConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SqlAgentConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/sqlAgent/current'} # type: ignore
@distributed_trace
def create_or_update(
self,
resource_group_name: str,
managed_instance_name: str,
parameters: "_models.SqlAgentConfiguration",
**kwargs: Any
) -> "_models.SqlAgentConfiguration":
"""Puts new sql agent configuration to instance.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param parameters:
:type parameters: ~azure.mgmt.sql.models.SqlAgentConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SqlAgentConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.SqlAgentConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SqlAgentConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'SqlAgentConfiguration')
request = build_create_or_update_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SqlAgentConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/sqlAgent/current'} # type: ignore
| |
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
# Copyright (c) 2010-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin P2P network half-a-node.
This python code was modified from ArtForz' public domain half-a-node, as
found in the mini-node branch of http://github.com/jgarzik/pynode.
P2PConnection: A low-level connection object to a node's P2P interface
P2PInterface: A high-level interface object for communicating to a node over P2P"""
import asyncore
from collections import defaultdict
from io import BytesIO
import logging
import socket
import struct
import sys
import threading
from test_framework.messages import *
from test_framework.util import wait_until
logger = logging.getLogger("TestFramework.mininode")
MESSAGEMAP = {
b"addr": msg_addr,
b"block": msg_block,
b"blocktxn": msg_blocktxn,
b"cmpctblock": msg_cmpctblock,
b"feefilter": msg_feefilter,
b"getaddr": msg_getaddr,
b"getblocks": msg_getblocks,
b"getblocktxn": msg_getblocktxn,
b"getdata": msg_getdata,
b"getheaders": msg_getheaders,
b"headers": msg_headers,
b"inv": msg_inv,
b"mempool": msg_mempool,
b"ping": msg_ping,
b"pong": msg_pong,
b"reject": msg_reject,
b"sendcmpct": msg_sendcmpct,
b"sendheaders": msg_sendheaders,
b"tx": msg_tx,
b"verack": msg_verack,
b"version": msg_version,
}
MAGIC_BYTES = {
"mainnet": b"\x32\x5e\x6f\x86", # mainnet
"testnet": b"\x1b\xba\x63\xc5", # testnet
"regtest": b"\xcd\xf3\xe0\xee", # regtest
}
class P2PConnection(asyncore.dispatcher):
"""A low-level connection object to a node's P2P interface.
This class is responsible for:
- opening and closing the TCP connection to the node
- reading bytes from and writing bytes to the socket
- deserializing and serializing the P2P message header
- logging messages as they are sent and received
This class contains no logic for handing the P2P message payloads. It must be
sub-classed and the on_message() callback overridden."""
def __init__(self):
# All P2PConnections must be created before starting the NetworkThread.
# assert that the network thread is not running.
assert not network_thread_running()
super().__init__(map=mininode_socket_map)
def peer_connect(self, dstaddr, dstport, net="regtest"):
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
self.sendbuf = b""
self.recvbuf = b""
self.state = "connecting"
self.network = net
self.disconnect = False
logger.info('Connecting to Bitcoin Node: %s:%d' % (self.dstaddr, self.dstport))
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
def peer_disconnect(self):
# Connection could have already been closed by other end.
if self.state == "connected":
self.disconnect_node()
# Connection and disconnection methods
def handle_connect(self):
"""asyncore callback when a connection is opened."""
if self.state != "connected":
logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport))
self.state = "connected"
self.on_open()
def handle_close(self):
"""asyncore callback when a connection is closed."""
logger.debug("Closing connection to: %s:%d" % (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = b""
self.sendbuf = b""
try:
self.close()
except:
pass
self.on_close()
def disconnect_node(self):
"""Disconnect the p2p connection.
Called by the test logic thread. Causes the p2p connection
to be disconnected on the next iteration of the asyncore loop."""
self.disconnect = True
# Socket read methods
def handle_read(self):
"""asyncore callback when data is read from the socket."""
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self._on_data()
def _on_data(self):
"""Try to read P2P messages from the recv buffer.
This method reads data from the buffer in a loop. It deserializes,
parses and verifies the P2P header, then passes the P2P payload to
the on_message callback for processing."""
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command not in MESSAGEMAP:
raise ValueError("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg)))
f = BytesIO(msg)
t = MESSAGEMAP[command]()
t.deserialize(f)
self._log_message("receive", t)
self.on_message(t)
except Exception as e:
logger.exception('Error reading message:', repr(e))
raise
def on_message(self, message):
"""Callback for processing a P2P payload. Must be overridden by derived class."""
raise NotImplementedError
# Socket write methods
def writable(self):
"""asyncore method to determine whether the handle_write() callback should be called on the next loop."""
with mininode_lock:
pre_connection = self.state == "connecting"
length = len(self.sendbuf)
return (length > 0 or pre_connection)
def handle_write(self):
"""asyncore callback when data should be written to the socket."""
with mininode_lock:
# asyncore does not expose socket connection, only the first read/write
# event, thus we must check connection manually here to know when we
# actually connect
if self.state == "connecting":
self.handle_connect()
if not self.writable():
return
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def send_message(self, message, pushbuf=False):
"""Send a P2P message over the socket.
This method takes a P2P payload, builds the P2P header and adds
the message to the send buffer to be sent over the socket."""
if self.state != "connected" and not pushbuf:
raise IOError('Not connected, no pushbuf')
self._log_message("send", message)
command = message.command
data = message.serialize()
tmsg = MAGIC_BYTES[self.network]
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
if (len(self.sendbuf) == 0 and not pushbuf):
try:
sent = self.send(tmsg)
self.sendbuf = tmsg[sent:]
except BlockingIOError:
self.sendbuf = tmsg
else:
self.sendbuf += tmsg
# Class utility methods
def _log_message(self, direction, msg):
"""Logs a message being sent or received over the connection."""
if direction == "send":
log_message = "Send message to "
elif direction == "receive":
log_message = "Received message from "
log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500])
if len(log_message) > 500:
log_message += "... (msg truncated)"
logger.debug(log_message)
class P2PInterface(P2PConnection):
"""A high-level P2P interface class for communicating with a Bitcoin node.
This class provides high-level callbacks for processing P2P message
payloads, as well as convenience methods for interacting with the
node over P2P.
Individual testcases should subclass this and override the on_* methods
if they want to alter message handling behaviour."""
def __init__(self):
super().__init__()
# Track number of messages of each type received and the most recent
# message of each type
self.message_count = defaultdict(int)
self.last_message = {}
# A count of the number of ping messages we've sent to the node
self.ping_counter = 1
# The network services received from the peer
self.nServices = 0
def peer_connect(self, *args, services=NODE_NETWORK|NODE_WITNESS, send_version=True, **kwargs):
super().peer_connect(*args, **kwargs)
if send_version:
# Send a version msg
vt = msg_version()
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
# Message receiving methods
def on_message(self, message):
"""Receive message and dispatch message to appropriate callback.
We keep a count of how many of each message type has been received
and the most recent message of each type."""
with mininode_lock:
try:
command = message.command.decode('ascii')
self.message_count[command] += 1
self.last_message[command] = message
getattr(self, 'on_' + command)(message)
except:
print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0]))
raise
# Callback methods. Can be overridden by subclasses in individual test
# cases to provide custom message handling behaviour.
def on_open(self):
pass
def on_close(self):
pass
def on_addr(self, message): pass
def on_block(self, message): pass
def on_blocktxn(self, message): pass
def on_cmpctblock(self, message): pass
def on_feefilter(self, message): pass
def on_getaddr(self, message): pass
def on_getblocks(self, message): pass
def on_getblocktxn(self, message): pass
def on_getdata(self, message): pass
def on_getheaders(self, message): pass
def on_headers(self, message): pass
def on_mempool(self, message): pass
def on_pong(self, message): pass
def on_reject(self, message): pass
def on_sendcmpct(self, message): pass
def on_sendheaders(self, message): pass
def on_tx(self, message): pass
def on_inv(self, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
self.send_message(want)
def on_ping(self, message):
self.send_message(msg_pong(message.nonce))
def on_verack(self, message):
self.verack_received = True
def on_version(self, message):
assert message.nVersion >= MIN_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format(message.nVersion, MIN_VERSION_SUPPORTED)
self.send_message(msg_verack())
self.nServices = message.nServices
# Connection helper methods
def wait_for_disconnect(self, timeout=60):
test_function = lambda: self.state != "connected"
wait_until(test_function, timeout=timeout, lock=mininode_lock)
# Message receiving helper methods
def wait_for_block(self, blockhash, timeout=60):
test_function = lambda: self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_getdata(self, timeout=60):
test_function = lambda: self.last_message.get("getdata")
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_getheaders(self, timeout=60):
test_function = lambda: self.last_message.get("getheaders")
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_inv(self, expected_inv, timeout=60):
"""Waits for an INV message and checks that the first inv object in the message was as expected."""
if len(expected_inv) > 1:
raise NotImplementedError("wait_for_inv() will only verify the first inv object")
test_function = lambda: self.last_message.get("inv") and \
self.last_message["inv"].inv[0].type == expected_inv[0].type and \
self.last_message["inv"].inv[0].hash == expected_inv[0].hash
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_verack(self, timeout=60):
test_function = lambda: self.message_count["verack"]
wait_until(test_function, timeout=timeout, lock=mininode_lock)
# Message sending helper functions
def send_and_ping(self, message):
self.send_message(message)
self.sync_with_ping()
# Sync up with the node
def sync_with_ping(self, timeout=60):
self.send_message(msg_ping(nonce=self.ping_counter))
test_function = lambda: self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter
wait_until(test_function, timeout=timeout, lock=mininode_lock)
self.ping_counter += 1
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# P2PConnection acquires this lock whenever delivering a message to a P2PInterface,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the P2PInterface or P2PConnection.
mininode_lock = threading.RLock()
class NetworkThread(threading.Thread):
def __init__(self):
super().__init__(name="NetworkThread")
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[obj.handle_close() for obj in disconnected]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
logger.debug("Network thread closing")
def network_thread_start():
"""Start the network thread."""
# Only one network thread may run at a time
assert not network_thread_running()
NetworkThread().start()
def network_thread_running():
"""Return whether the network thread is running."""
return any([thread.name == "NetworkThread" for thread in threading.enumerate()])
def network_thread_join(timeout=10):
"""Wait timeout seconds for the network thread to terminate.
Throw if the network thread doesn't terminate in timeout seconds."""
network_threads = [thread for thread in threading.enumerate() if thread.name == "NetworkThread"]
assert len(network_threads) <= 1
for thread in network_threads:
thread.join(timeout)
assert not thread.is_alive()
| |
import unittest
import numpy
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainer import gradient_check
from chainer import testing
from chainer.testing import array
from chainer.testing import attr
from chainer.testing import backend
from chainer.testing import condition
from chainer.testing import parameterize
from chainer.utils import conv
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return x, x
@parameterize(*(testing.product([
testing.product({
'c_contiguous': [True],
'test_outsize': [True, False],
'nobias': [True],
'stride': [1, 2],
'dilate': [1],
'x_dtype': [numpy.float32],
'W_dtype': [numpy.float32],
'groups': [1, 2],
})
+ testing.product({
'c_contiguous': [False],
'test_outsize': [True],
'nobias': [False],
'stride': [1, 2],
'dilate': [1, 2],
'x_dtype': [numpy.float16, numpy.float32, numpy.float64],
'W_dtype': [numpy.float16, numpy.float32, numpy.float64],
'groups': [1, 2],
}),
])))
@backend.inject_backend_tests(
['test_forward', 'test_backward', 'test_double_backward'],
# CPU tests
testing.product({
'use_cuda': [False],
'use_ideep': ['never', 'always'],
})
# GPU tests
+ testing.product([
[{'use_cuda': True}],
# Without cuDNN
testing.product({
'use_cudnn': ['never'],
})
# With cuDNN
+ testing.product({
'use_cudnn': ['always'],
'cudnn_deterministic': [True, False],
'autotune': [True, False],
})])
# ChainerX tests
+ testing.product({
'use_chainerx': [True],
'chainerx_device': ['native:0', 'cuda:0'],
}))
class TestDeconvolution2DFunction(unittest.TestCase):
def setUp(self):
in_channels_a_group = 3
out_channels_a_group = 2
self.in_channels = in_channels_a_group * self.groups
self.out_channels = out_channels_a_group * self.groups
self.ksize = 3
self.pad = 1
kh, kw = _pair(self.ksize)
sh, sw = _pair(self.stride)
ph, pw = _pair(self.pad)
W = numpy.random.normal(
0, numpy.sqrt(1. / (kh * kw * in_channels_a_group)),
(self.in_channels, out_channels_a_group, kh, kw)
).astype(self.W_dtype)
b = None if self.nobias else numpy.random.uniform(
-1, 1, self.out_channels).astype(self.x_dtype)
N = 2
inh, inw = 4, 3
outh = conv.get_deconv_outsize(inh, kh, sh, ph, d=self.dilate)
outw = conv.get_deconv_outsize(inw, kw, sw, pw, d=self.dilate)
self.outsize = (outh, outw) if self.test_outsize else None
x = numpy.random.uniform(
-1, 1, (N, self.in_channels, inh, inw)).astype(self.x_dtype)
gy = numpy.random.uniform(
-1, 1, (N, self.out_channels, outh, outw)).astype(self.x_dtype)
ggx = numpy.random.uniform(-1, 1, x.shape).astype(
self.x_dtype)
ggW = numpy.random.uniform(-1, 1, W.shape).astype(
self.W_dtype)
ggb = None if self.nobias else numpy.random.uniform(
-1, 1, b.shape).astype(self.x_dtype)
self.inputs = [x, W, b]
self.grad_outputs = [gy]
self.grad_grad_inputs = [ggx, ggW, ggb]
self.test_forward_options = {}
self.check_backward_options = {'dtype': numpy.float64}
self.check_double_backward_options = {'dtype': numpy.float64}
if self.x_dtype == numpy.float16:
self.test_forward_options.update(atol=5e-3, rtol=5e-2)
self.check_backward_options.update(atol=5e-4, rtol=5e-3)
self.check_double_backward_options.update(atol=5e-3, rtol=5e-2)
elif self.W_dtype == numpy.float16:
self.check_backward_options.update(atol=5e-4, rtol=5e-3)
self.check_double_backward_options.update(atol=5e-3, rtol=5e-2)
def forward_cpu(self, inputs):
x, W, b = inputs
x_cpu = chainer.Variable(x)
W_cpu = chainer.Variable(W)
b_cpu = None if b is None else chainer.Variable(b)
with chainer.using_config('use_ideep', 'never'):
y_cpu = F.deconvolution_2d(
x_cpu, W_cpu, b_cpu, stride=self.stride, pad=self.pad,
outsize=self.outsize, dilate=self.dilate, groups=self.groups)
return y_cpu,
def check_forward(self, inputs, backend_config):
y_expected, = self.forward_cpu(inputs)
x, W, b = backend_config.get_array(inputs)
x = chainer.Variable(x)
W = chainer.Variable(W)
b = None if b is None else chainer.Variable(b)
with backend_config:
y_actual = F.deconvolution_2d(
x, W, b, stride=self.stride, pad=self.pad,
outsize=self.outsize, dilate=self.dilate, groups=self.groups)
assert y_expected.data.dtype == self.x_dtype
assert y_actual.data.dtype == self.x_dtype
testing.assert_allclose(
y_expected.data, y_actual.data, **self.test_forward_options)
@attr.gpu
def test_forward(self, backend_config):
self.check_forward(self.inputs, backend_config)
def check_backward(self, inputs, grad_outputs, backend_config):
inputs = backend_config.get_array(inputs)
grad_outputs = backend_config.get_array(grad_outputs)
if not self.c_contiguous:
inputs = array._as_noncontiguous_array(inputs)
grad_outputs = array._as_noncontiguous_array(grad_outputs)
x_data, W_data, b_data = inputs
y_grad, = grad_outputs
args = (x_data, W_data)
if b_data is not None:
args = args + (b_data,)
def f(*args):
return F.deconvolution_2d(
*args, stride=self.stride, pad=self.pad, outsize=self.outsize,
dilate=self.dilate, groups=self.groups)
with backend_config:
gradient_check.check_backward(
f, args, y_grad, **self.check_backward_options)
@condition.retry(10)
def test_backward(self, backend_config):
self.check_backward(self.inputs, self.grad_outputs, backend_config)
def check_double_backward(
self, inputs, grad_outputs, grad_grad_inputs, backend_config):
inputs = backend_config.get_array(inputs)
grad_outputs = backend_config.get_array(grad_outputs)
grad_grad_inputs = backend_config.get_array(grad_grad_inputs)
if not self.c_contiguous:
inputs = array._as_noncontiguous_array(inputs)
grad_outputs = array._as_noncontiguous_array(grad_outputs)
grad_grad_inputs = array._as_noncontiguous_array(grad_grad_inputs)
x_data, W_data, b_data = inputs
y_grad, = grad_outputs
x_grad_grad, W_grad_grad, b_grad_grad = grad_grad_inputs
args = (x_data, W_data)
grad_grads = (x_grad_grad, W_grad_grad)
if b_data is not None:
args = args + (b_data,)
grad_grads = grad_grads + (b_grad_grad,)
def f(*args):
return F.deconvolution_2d(
*args, stride=self.stride, pad=self.pad, outsize=self.outsize,
dilate=self.dilate, groups=self.groups)
with backend_config:
gradient_check.check_double_backward(
f, args, y_grad, grad_grads,
**self.check_double_backward_options)
@condition.retry(10)
def test_double_backward(self, backend_config):
self.check_double_backward(self.inputs, self.grad_outputs,
self.grad_grad_inputs, backend_config)
@testing.parameterize(*testing.product({
'use_cudnn': ['always', 'auto', 'never'],
'cudnn_deterministic': [True, False],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
'groups': [1, 2],
}))
@attr.cudnn
class TestDeconvolution2DCudnnCall(unittest.TestCase):
def setUp(self):
in_channels_a_group = 3
out_channels_a_group = 2
self.in_channels = in_channels_a_group * self.groups
self.out_channels = out_channels_a_group * self.groups
kh, kw = _pair(3)
sh, sw = _pair(1)
ph, pw = _pair(1)
self.W = cuda.cupy.random.normal(
0, numpy.sqrt(1. / (kh * kw * in_channels_a_group)),
(self.in_channels, out_channels_a_group, kh, kw)
).astype(self.dtype)
N = 2
inh, inw = 4, 3
outh = conv.get_deconv_outsize(inh, kh, sh, ph)
outw = conv.get_deconv_outsize(inw, kw, sw, pw)
self.x = cuda.cupy.random.uniform(
-1, 1, (N, self.in_channels, inh, inw)).astype(self.dtype)
self.gy = cuda.cupy.random.uniform(
-1, 1, (N, self.out_channels, outh, outw)).astype(self.dtype)
with chainer.using_config('use_cudnn', self.use_cudnn):
self.should_call_cudnn = chainer.should_use_cudnn('>=auto')
if self.groups > 1 and cuda.cuda.cudnn.getVersion() < 7000:
self.should_call_cudnn = False
def forward(self):
x = chainer.Variable(self.x)
W = chainer.Variable(self.W)
return F.deconvolution_2d(x, W, None, stride=1, pad=1,
groups=self.groups)
def test_call_cudnn_forward(self):
name = 'cupy.cudnn.convolution_backward_data'
with chainer.using_config('use_cudnn', self.use_cudnn):
with chainer.using_config('cudnn_deterministic',
self.cudnn_deterministic):
with testing.patch(name) as func:
self.forward()
self.assertEqual(func.called, self.should_call_cudnn)
def test_call_cudnn_backward(self):
data_func_name = 'cupy.cudnn.convolution_forward'
filter_func_name = 'cupy.cudnn.convolution_backward_filter'
with chainer.using_config('use_cudnn', self.use_cudnn):
with chainer.using_config('cudnn_deterministic',
self.cudnn_deterministic):
y = self.forward()
y.grad = self.gy
with testing.patch(data_func_name) as data_func, \
testing.patch(filter_func_name) as filter_func:
y.backward()
self.assertEqual(
data_func.called, self.should_call_cudnn)
self.assertEqual(
filter_func.called, self.should_call_cudnn)
@testing.parameterize(*testing.product({
'c_contiguous': [True, False],
'cudnn_deterministic': [True, False],
'nobias': [True, False],
'groups': [1, 2],
}))
@attr.gpu
@attr.cudnn
class TestDeconvolution2DFunctionCudnnDeterministic(unittest.TestCase):
def setUp(self):
self.stride = 2
self.pad = 1
batch_sz = 2
in_channels_a_group = 64
out_channels_a_group = 64
in_channels = in_channels_a_group * self.groups
out_channels = out_channels_a_group * self.groups
kh, kw = (3, 3)
in_h, in_w = (32, 128)
out_h, out_w = (63, 255)
# should be same types for cudnn test
x_dtype = numpy.float32
W_dtype = numpy.float32
self.W = numpy.random.normal(
0, numpy.sqrt(1. / (kh * kw * in_channels_a_group)),
(out_channels, in_channels_a_group, kh, kw)).astype(W_dtype)
self.b = numpy.random.uniform(-1, 1, out_channels).astype(x_dtype)
self.x = numpy.random.uniform(
-1, 1, (batch_sz, in_channels, in_h, in_w)).astype(x_dtype)
self.gy = numpy.random.uniform(
-1, 1, (batch_sz, out_channels, out_h, out_w)).astype(x_dtype)
def test_cudnn_deterministic(self):
x1, W1, b1, y1 = self._run()
x2, W2, b2, y2 = self._run()
cuda.cupy.testing.assert_array_equal(x1.grad, x2.grad)
cuda.cupy.testing.assert_array_equal(y1.data, y2.data)
cuda.cupy.testing.assert_array_equal(W1.grad, W2.grad)
def _contiguous(self, *inputs):
if self.c_contiguous:
return inputs
else:
return array._as_noncontiguous_array(inputs)
def _run(self):
with chainer.using_config('cudnn_deterministic', True):
# verify data continuity and move to gpu
x_data, W_data, b_data, gy_data = tuple(
cuda.to_gpu(data) for data in self._contiguous(
self.x, self.W, self.b, self.gy))
x, W, b, y = self._run_forward(x_data, W_data, b_data)
y.grad = gy_data
y.backward()
return x, W, b, y
def _run_forward(self, x_data, W_data, b_data):
x = chainer.Variable(x_data)
W = chainer.Variable(W_data)
b = None if self.nobias else chainer.Variable(b_data)
with chainer.using_config('use_cudnn', 'always'):
y = F.deconvolution_2d(x, W, b, stride=self.stride, pad=self.pad,
groups=self.groups)
return x, W, b, y
class TestDeconvolution2DInvalidDilation(unittest.TestCase):
n_batches = 2
in_channels = 3
out_channels = 2
dilate = 0
x_shape = (n_batches, in_channels, 10, 10)
w_shape = (in_channels, out_channels, 3, 3)
def check_invalid_dilation(self, x_data, w_data):
x = chainer.Variable(x_data)
w = chainer.Variable(w_data)
F.deconvolution_2d(x, w, dilate=self.dilate)
def test_invalid_dilation_cpu(self):
x = numpy.ones(self.x_shape, numpy.float32)
w = numpy.ones(self.w_shape, numpy.float32)
with self.assertRaises(ValueError):
with chainer.using_config('use_ideep', 'never'):
self.check_invalid_dilation(x, w)
@attr.ideep
def test_invalid_dilation_cpu_ideep(self):
x = numpy.ones(self.x_shape, numpy.float32)
w = numpy.ones(self.w_shape, numpy.float32)
with self.assertRaises(ValueError):
with chainer.using_config('use_ideep', 'always'):
self.check_invalid_dilation(x, w)
@attr.gpu
def test_invalid_dilation_gpu(self):
x = cuda.cupy.ones(self.x_shape, numpy.float32)
w = cuda.cupy.ones(self.w_shape, numpy.float32)
with self.assertRaises(ValueError):
with chainer.using_config('use_cudnn', 'never'):
self.check_invalid_dilation(x, w)
@attr.cudnn
def test_invalid_dilation_gpu_cudnn(self):
x = cuda.cupy.ones(self.x_shape, numpy.float32)
w = cuda.cupy.ones(self.w_shape, numpy.float32)
with self.assertRaises(ValueError):
with chainer.using_config('use_cudnn', 'cudnn'):
self.check_invalid_dilation(x, w)
testing.run_module(__name__, __file__)
| |
#!/usr/bin/python
import re
class PyBoolReException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class PyBoolRe:
""" A class to perform boolean word matches in
a string or paragraph. This class allows you to
perform complex matches in a string or group of
words by creating simple boolean expressions,
grouped by parantheses to create complex match
expressions.
Author: Anand B Pillai, http://tinyurl.com/yq3y
Copyright: None
LICENSE: GPL
Version: 0.2
Usage:
1. Create regular expressions using the boolean
keywords '|' and '&', standing for 'OR' and
'AND' respectively.
2. Use parantheses to group the boolean expressions
to create complex match expressions.
3. Caveats:
1. Fails for expressions with redundant parens such
as ((A | B)) etc.
Example:
p = PyBoolRe('Guido & Python')
s = 'Guido created Python'
mobject = p.match(s)
# Work with 'mobject' like you normally work with
# regular expression match objects
"""
def __init__(self, boolstr):
# Require whitespace before words?
self.__needspace = True
# whitespace re
self._wspre = re.compile('^\s*$')
# create regexp string
self.__rexplist = []
oparct = boolstr.count('(')
clparct = boolstr.count(')')
if oparct != clparct:
raise PyBoolReException, 'Mismatched parantheses!'
self.__parse(boolstr)
# if NOT is one of the members, reverse
# the list
# print self.__rexplist
if '!' in self.__rexplist:
self.__rexplist.reverse()
s = self.__makerexp(self.__rexplist)
# print s
self.__rexp = re.compile(s)
def match(self, data):
""" Match the boolean expression, behaviour
is same as the 'match' method of re """
return self.__rexp.match(data)
def search(self, data):
""" Search the boolean expression, behaviour
is same as the 'search' method of re """
return self.__rexp.search(data)
def __parse(self, s):
""" Parse the boolean regular expression string
and create the regexp list """
# The string is a nested parantheses with
# any character in between the parens.
scopy = s[:]
oparmatch, clparmatch = False, False
# Look for a NOT expression
index = scopy.rfind('(')
l = []
if index != -1:
oparmatch = True
index2 = scopy.find(')', index)
if index2 != -1:
clparmatch = True
newstr = scopy[index+1:index2]
# if the string is only of whitespace chars, skip it
if not self._wspre.match(newstr):
self.__rexplist.append(newstr)
replacestr = '(' + newstr + ')'
scopy = scopy.replace(replacestr, '')
self.__parse(scopy)
if not clparmatch and not oparmatch:
if scopy: self.__rexplist.append(scopy)
def is_inbetween(self, l, elem):
""" Find out if an element is in between
in a list """
index = l.index(elem)
if index == -1:
return False
if index>2:
if index in range(1, len(l) -1):
return True
else:
return False
else:
return True
def __makenotexpr(self, s):
""" Make a NOT expression """
if s.find('!') == 0:
return ''.join(('(?!', s[1:], ')'))
else:
return s
def __makerexp(self, rexplist):
""" Make the regular expression string for
the boolean match from the nested list """
is_list = True
if type(rexplist) is str:
is_list = False
elem = rexplist
elif type(rexplist) is list:
elem = rexplist[0]
if type(elem) is list:
elem = elem[0]
eor = False
if not is_list or len(rexplist) == 1:
eor = True
word_str = '.*'
s=''
# Implementing NOT
if elem == '!':
return ''.join(('(?!', self.__makerexp(rexplist[1:]), ')'))
# Implementing OR
elif elem.find(' | ') != -1:
listofors = elem.split(' | ')
for o in listofors:
index = listofors.index(o)
in_bet = self.is_inbetween(listofors, o)
if o:
o = self.__makenotexpr(o)
if in_bet:
s = ''.join((s, '|', word_str, o, '.*'))
else:
s = ''.join((s, word_str, o, '.*'))
# Implementing AND
elif elem.find(' & ') != -1:
listofands = elem.split(' & ')
for a in listofands:
index = listofands.index(a)
in_bet = self.is_inbetween(listofands, a)
if a:
a = self.__makenotexpr(a)
s = ''.join((s, word_str, a, '.*'))
else:
if elem:
elem = self.__makenotexpr(elem)
s = ''.join((elem, '.*'))
if eor:
return s
else:
return ''.join((s, self.__makerexp(rexplist[1:])))
if __name__=="__main__":
p = PyBoolRe('(!Guido)')
s1 = 'Guido invented Python and Larry invented Perl'
s2 = 'Larry invented Perl, not Python'
if p.match(s1):
print 'Match found for first string'
else:
print 'No match found for first string'
if p.match(s2):
print 'Match found for second string'
else:
print 'No match found for second string'
| |
#! /usr/bin/python3
import os
import sys
import json
import getpass
from modularitea.atom import Atom
from modularitea.progress_adapter import FetchProgressAdapter, InstallProgressAdapter
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Vte, GLib
import platform
import apt, apt_pkg
# PREFIX = "/opt/"
PREFIX = "/home/mnirfan/Projects/"
sys.path.append(PREFIX+"modularitea/")
user = 'mnirfan'
home = "/home/"+user+"/"
USER_MODULE_DIR = '/home/' + user + '/.modulaitea/modules/'
print("USER_MODULE_DIR", USER_MODULE_DIR)
# SYS_MODULE_DIR = '/opt/modularitea/modules/'
# USER_MODULE_DIR = PREFIX + 'modularitea/modules/'
SYS_MODULE_DIR = '/usr/share/modularitea/modules/'
print("SYS_MODULE_DIR", SYS_MODULE_DIR)
ARCH = 32
if platform.architecture()[0] == '64bit':
ARCH = 64
class Module:
module = None
apt_atoms = []
ppas = []
http_atoms = []
progressbar = None
def __init__(self, module_name, progressbar, action_label, terminal, expander):
self.terminal = terminal
self.progressbar = progressbar
self.action_label = action_label
self.expander = expander
if os.path.exists(USER_MODULE_DIR + module_name):
with open(USER_MODULE_DIR + module_name + '/package.json') as data:
self.module = json.load(data)
print("module", module_name, "found in user dir")
elif os.path.exists(SYS_MODULE_DIR + module_name):
with open(SYS_MODULE_DIR + module_name + '/package.json') as data:
self.module = json.load(data)
print("module", module_name, "found in sys dir")
else:
print('Modul ' + module_name + " doesn't exist")
# raise FileNotFoundError
for atom in self.module['package']['atoms']:
atom_temp = Atom(atom)
print("found ", atom_temp.get_name())
print(atom_temp.object['package']['preferred_source'])
if atom_temp.object['package']['preferred_source'] == 'ubuntu_apt':
self.apt_atoms.append(atom_temp)
if "ppa" in atom_temp.object['package']['source']['ubuntu_apt']:
self.ppas.append(atom_temp.object['package']['source']['ubuntu_apt']['ppa'])
elif atom_temp.object['package']['preferred_source'] == 'http_archive':
self.http_atoms.append(atom_temp)
else:
raise AttributeError
print('APT :', self.apt_atoms)
print('Download :', self.http_atoms)
print('PPA :', self.ppas)
if not os.path.exists(home + ".modularitea/download"):
os.mkdir("{0}.modularitea".format(home))
os.mkdir("{0}.modularitea/download".format(home))
self.downloaded = 0
self.download_needed = self.get_download_size()
def add_ppas(self):
for ppa in self.ppas:
self.action_label.set_label("Menambahkan " + ppa)
p = self.terminal.spawn_sync(
Vte.PtyFlags.DEFAULT,
"~",
['/usr/bin/apt-add-repository', '-y', ppa],
[],
GLib.SpawnFlags.DO_NOT_REAP_CHILD,
None,
None
)
if p[0] == True:
print(ppa, "added")
else:
print(ppa, "failed")
print(p[1])
c = apt.Cache()
self.action_label.set_label("updating software list")
c.update()
self.action_label.set_label("")
def download_apt(self, parent):
fprogress = FetchProgressAdapter(
self.progressbar,
self.action_label,
parent
)
c = apt.Cache()
for package in self.apt_atoms:
c[package.get_apt_package_name()].mark_install()
c.fetch_archives(fprogress)
def install_apt(self, parent):
iprogress = InstallProgressAdapter(
self.progressbar,
self.terminal,
self.action_label,
self.expander
)
fprogress = FetchProgressAdapter(
self.progressbar,
self.action_label,
parent
)
c = apt.Cache()
self.action_label.set_label("updating software list")
for package in self.apt_atoms:
c[package.get_apt_package_name()].mark_install()
c.commit(fetch_progress=fprogress, install_progress=iprogress)
def download_archive(self):
from resumable import urlretrieve
for archive in self.http_atoms:
print(archive.get_url(ARCH))
urlretrieve(
archive.get_url(ARCH),
# home + ".modularitea/download/" + archive.get_name().replace(" ", ""),
home + ".modularitea/download/" + archive.get_url(ARCH).split('/')[-1],
self._report_hook
)
def _report_hook(self, bytes_so_far, chunk_size, total_size):
downloaded = bytes_so_far * chunk_size
self.progressbar.set_fraction(downloaded / total_size)
self.progressbar.set_text(
apt_pkg.size_to_str(self.downloaded + downloaded) + "B of " +
apt_pkg.size_to_str(self.download_needed) + "B"
)
def install_archives(self):
import subprocess
for atom in self.http_atoms:
p = subprocess.Popen(
["/usr/bin/file-roller",
"-e",
atom.get_archive_install_dir(),
home + ".modularitea/download" + atom.get_name().replace(" ", "")],
)
p.communicate()
def get_download_size(self):
from urllib import request
total_size = 0
for package in self.http_atoms:
r = request.urlopen(package.get_url(ARCH))
total_size += int(r.info()['Content-Length'])
print(package.get_name(),int(r.info()['Content-Length']))
c = apt.Cache()
for package in self.apt_atoms:
c[package.get_apt_package_name()].mark_install()
total_size += c.required_download
return total_size
if __name__ == "__main__":
from gi.repository import Gio, GObject, GLib
def update_cache(job, cancellable, data):
import apt
cache = apt.Cache()
cache["glade"].mark_install()
cache.commit(data[0], data[1])
def add_ppa(job, cancellable, data):
module = Module("coba", data[0], data[1], data[2], data[3])
module.install_apt(None)
def download_archive(job, cancellable, data):
module = Module("coba", data[0], data[1], data[2], data[3])
module.download_archive()
GObject.threads_init()
window = Gtk.Window(title="Test adapter")
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
window.add(box)
label = Gtk.Label(label="Empty label")
box.pack_start(label, 1, 1, 10)
progressbar = Gtk.ProgressBar()
progressbar.set_show_text(True)
box.pack_start(progressbar, 1, 1, 10)
vte = Vte.Terminal()
vte.set_size_request(800, 250)
expander = Gtk.Expander(label="tampilkan")
expander.add(vte)
box.pack_start(expander, 1, 1, 10)
window.connect("delete-event", Gtk.main_quit)
window.show_all()
# fprogress = FetchProgressAdapter(progressbar, label, window)
# iprogress = InstallProgressAdapter(progressbar, vte, label, expander)
Gio.io_scheduler_push_job(download_archive,
(progressbar, label, vte, expander),
GLib.PRIORITY_DEFAULT,
None)
Gtk.main()
| |
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# This file is copied and adapted from
# https://github.com/ludwig-ai/ludwig/blob/master/tests/integration_tests/utils.py
import multiprocessing
import os
import random
import shutil
import sys
import traceback
import unittest
import uuid
from distutils.util import strtobool
import cloudpickle
import numpy as np
import pandas as pd
from ludwig.api import LudwigModel
from ludwig.backend import LocalBackend
from ludwig.constants import VECTOR, COLUMN, NAME, PROC_COLUMN
from ludwig.data.dataset_synthesizer import DATETIME_FORMATS
from ludwig.data.dataset_synthesizer import build_synthetic_dataset
from ludwig.experiment import experiment_cli
from ludwig.features.feature_utils import compute_feature_hash
from ludwig.utils.data_utils import read_csv, replace_file_extension
ENCODERS = [
"embed",
"rnn",
"parallel_cnn",
"cnnrnn",
"stacked_parallel_cnn",
"stacked_cnn",
"transformer",
]
HF_ENCODERS_SHORT = ["distilbert"]
HF_ENCODERS = [
"bert",
"gpt",
"gpt2",
# "transformer_xl",
"xlnet",
"xlm",
"roberta",
"distilbert",
"ctrl",
"camembert",
"albert",
"t5",
"xlmroberta",
"longformer",
"flaubert",
"electra",
"mt5",
]
class LocalTestBackend(LocalBackend):
@property
def supports_multiprocessing(self):
return False
def parse_flag_from_env(key, default=False):
try:
value = os.environ[key]
except KeyError:
# KEY isn't set, default to `default`.
_value = default
else:
# KEY is set, convert it to True or False.
try:
_value = strtobool(value)
except ValueError:
# More values are supported, but let's keep the message simple.
raise ValueError("If set, {} must be yes or no.".format(key))
return _value
_run_slow_tests = parse_flag_from_env("RUN_SLOW", default=False)
def slow(test_case):
"""
Decorator marking a test as slow.
Slow tests are skipped by default. Set the RUN_SLOW environment variable
to a truth value to run them.
"""
if not _run_slow_tests:
test_case = unittest.skip("Skipping: this test is too slow")(test_case)
return test_case
def generate_data(
input_features,
output_features,
filename="test_csv.csv",
num_examples=25,
):
"""
Helper method to generate synthetic data based on input, output feature
specs
:param num_examples: number of examples to generate
:param input_features: schema
:param output_features: schema
:param filename: path to the file where data is stored
:return:
"""
features = input_features + output_features
df = build_synthetic_dataset(num_examples, features)
data = [next(df) for _ in range(num_examples)]
dataframe = pd.DataFrame(data[1:], columns=data[0])
dataframe.to_csv(filename, index=False)
return filename
def random_string(length=5):
return uuid.uuid4().hex[:length].upper()
def numerical_feature(normalization=None, **kwargs):
feature = {
"name": "num_" + random_string(),
"type": "numerical",
"preprocessing": {"normalization": normalization},
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def category_feature(**kwargs):
feature = {
"type": "category",
"name": "category_" + random_string(),
"vocab_size": 10,
"embedding_size": 5,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def text_feature(**kwargs):
feature = {
"name": "text_" + random_string(),
"type": "text",
"reduce_input": None,
"vocab_size": 5,
"min_len": 7,
"max_len": 7,
"embedding_size": 8,
"state_size": 8,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def set_feature(**kwargs):
feature = {
"type": "set",
"name": "set_" + random_string(),
"vocab_size": 10,
"max_len": 5,
"embedding_size": 5,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def sequence_feature(**kwargs):
feature = {
"type": "sequence",
"name": "sequence_" + random_string(),
"vocab_size": 10,
"max_len": 7,
"encoder": "embed",
"embedding_size": 8,
"fc_size": 8,
"state_size": 8,
"num_filters": 8,
"hidden_size": 8,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def image_feature(folder, **kwargs):
feature = {
"type": "image",
"name": "image_" + random_string(),
"encoder": "resnet",
"preprocessing": {
"in_memory": True,
"height": 12,
"width": 12,
"num_channels": 3,
},
"resnet_size": 8,
"destination_folder": folder,
"fc_size": 8,
"num_filters": 8,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def audio_feature(folder, **kwargs):
feature = {
"name": "audio_" + random_string(),
"type": "audio",
"preprocessing": {
"audio_feature": {
"type": "fbank",
"window_length_in_s": 0.04,
"window_shift_in_s": 0.02,
"num_filter_bands": 80,
},
"audio_file_length_limit_in_s": 3.0,
},
"encoder": "stacked_cnn",
"should_embed": False,
"conv_layers": [
{
"filter_size": 400,
"pool_size": 16,
"num_filters": 32,
"regularize": "false",
},
{
"filter_size": 40,
"pool_size": 10,
"num_filters": 64,
"regularize": "false",
},
],
"fc_size": 256,
"destination_folder": folder,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def timeseries_feature(**kwargs):
feature = {
"name": "timeseries_" + random_string(),
"type": "timeseries",
"max_len": 7,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def binary_feature(**kwargs):
feature = {"name": "binary_" + random_string(), "type": "binary"}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def bag_feature(**kwargs):
feature = {
"name": "bag_" + random_string(),
"type": "bag",
"max_len": 5,
"vocab_size": 10,
"embedding_size": 5,
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def date_feature(**kwargs):
feature = {
"name": "date_" + random_string(),
"type": "date",
"preprocessing": {
"datetime_format": random.choice(list(DATETIME_FORMATS.keys()))
},
}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def h3_feature(**kwargs):
feature = {"name": "h3_" + random_string(), "type": "h3"}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def vector_feature(**kwargs):
feature = {"type": VECTOR, "vector_size": 5, "name": "vector_" + random_string()}
feature.update(kwargs)
feature[COLUMN] = feature[NAME]
feature[PROC_COLUMN] = compute_feature_hash(feature)
return feature
def run_experiment(
input_features,
output_features,
skip_save_processed_input=True,
config=None,
backend=None,
**kwargs,
):
"""
Helper method to avoid code repetition in running an experiment. Deletes
the data saved to disk after running the experiment
:param input_features: list of input feature dictionaries
:param output_features: list of output feature dictionaries
**kwargs you may also pass extra parameters to the experiment as keyword
arguments
:return: None
"""
if input_features is not None and output_features is not None:
# This if is necessary so that the caller can call with
# config_file (and not config)
config = {
"input_features": input_features,
"output_features": output_features,
"combiner": {"type": "concat", "fc_size": 14},
"training": {"epochs": 2},
}
args = {
"config": config,
"backend": backend or LocalTestBackend(),
"skip_save_training_description": True,
"skip_save_training_statistics": True,
"skip_save_processed_input": skip_save_processed_input,
"skip_save_progress": True,
"skip_save_unprocessed_output": True,
"skip_save_model": True,
"skip_save_predictions": True,
"skip_save_eval_stats": True,
"skip_collect_predictions": True,
"skip_collect_overall_stats": True,
"skip_save_log": True,
}
args.update(kwargs)
_, _, _, _, exp_dir_name = experiment_cli(**args)
shutil.rmtree(exp_dir_name, ignore_errors=True)
def generate_output_features_with_dependencies(main_feature, dependencies):
# helper function to generate multiple output features specifications
# with dependencies, support for 'test_experiment_multiple_seq_seq` unit
# test
# Parameters:
# main_feature: feature identifier, valid values 'feat1', 'feat2', 'feat3'
# dependencies: list of dependencies for 'main_feature', do not li
# Example:
# generate_output_features_with_dependencies('feat2', ['feat1', 'feat3'])
output_features = [
category_feature(vocab_size=2, reduce_input="sum"),
sequence_feature(vocab_size=10, max_len=5),
numerical_feature(),
]
# value portion of dictionary is a tuple: (position, feature_name)
# position: location of output feature in the above output_features list
# feature_name: Ludwig generated feature name
feature_names = {
"feat1": (0, output_features[0]["name"]),
"feat2": (1, output_features[1]["name"]),
"feat3": (2, output_features[2]["name"]),
}
# generate list of dependencies with real feature names
generated_dependencies = [feature_names[feat_name][1] for feat_name in dependencies]
# specify dependencies for the main_feature
output_features[feature_names[main_feature][0]][
"dependencies"
] = generated_dependencies
return output_features
def _subproc_wrapper(fn, queue, *args, **kwargs):
fn = cloudpickle.loads(fn)
try:
results = fn(*args, **kwargs)
except Exception as e:
traceback.print_exc(file=sys.stderr)
results = e
queue.put(results)
def spawn(fn):
def wrapped_fn(*args, **kwargs):
ctx = multiprocessing.get_context("spawn")
queue = ctx.Queue()
p = ctx.Process(
target=_subproc_wrapper,
args=(cloudpickle.dumps(fn), queue, *args),
kwargs=kwargs,
)
p.start()
p.join()
results = queue.get()
if isinstance(results, Exception):
raise RuntimeError(
f"Spawned subprocess raised {type(results).__name__}, "
f"check log output above for stack trace."
)
return results
return wrapped_fn
def run_api_experiment(input_features, output_features, data_csv):
"""
Helper method to avoid code repetition in running an experiment
:param input_features: input schema
:param output_features: output schema
:param data_csv: path to data
:return: None
"""
config = {
"input_features": input_features,
"output_features": output_features,
"combiner": {"type": "concat", "fc_size": 14},
"training": {"epochs": 2},
}
model = LudwigModel(config)
output_dir = None
try:
# Training with csv
_, _, output_dir = model.train(
dataset=data_csv,
skip_save_processed_input=True,
skip_save_progress=True,
skip_save_unprocessed_output=True,
)
model.predict(dataset=data_csv)
model_dir = os.path.join(output_dir, "model")
loaded_model = LudwigModel.load(model_dir)
# Necessary before call to get_weights() to materialize the weights
loaded_model.predict(dataset=data_csv)
model_weights = model.model.get_weights()
loaded_weights = loaded_model.model.get_weights()
for model_weight, loaded_weight in zip(model_weights, loaded_weights):
assert np.allclose(model_weight, loaded_weight)
finally:
# Remove results/intermediate data saved to disk
shutil.rmtree(output_dir, ignore_errors=True)
try:
# Training with dataframe
data_df = read_csv(data_csv)
_, _, output_dir = model.train(
dataset=data_df,
skip_save_processed_input=True,
skip_save_progress=True,
skip_save_unprocessed_output=True,
)
model.predict(dataset=data_df)
finally:
shutil.rmtree(output_dir, ignore_errors=True)
def create_data_set_to_use(data_format, raw_data):
# helper function for generating training and test data with specified
# format handles all data formats except for hdf5
# assumes raw_data is a csv dataset generated by
# tests.integration_tests.utils.generate_data() function
# support for writing to a fwf dataset based on this stackoverflow posting:
# https://stackoverflow.com/questions/16490261/python-pandas-write-dataframe-to-fixed-width-file-to-fwf
from tabulate import tabulate
def to_fwf(df, fname):
content = tabulate(df.values.tolist(), list(df.columns), tablefmt="plain")
open(fname, "w").write(content)
pd.DataFrame.to_fwf = to_fwf
dataset_to_use = None
if data_format == "csv":
dataset_to_use = raw_data
elif data_format in {"df", "dict"}:
dataset_to_use = pd.read_csv(raw_data)
if data_format == "dict":
dataset_to_use = dataset_to_use.to_dict(orient="list")
elif data_format == "excel":
dataset_to_use = replace_file_extension(raw_data, "xlsx")
pd.read_csv(raw_data).to_excel(dataset_to_use, index=False)
elif data_format == "excel_xls":
dataset_to_use = replace_file_extension(raw_data, "xls")
pd.read_csv(raw_data).to_excel(dataset_to_use, index=False)
elif data_format == "feather":
dataset_to_use = replace_file_extension(raw_data, "feather")
pd.read_csv(raw_data).to_feather(dataset_to_use)
elif data_format == "fwf":
dataset_to_use = replace_file_extension(raw_data, "fwf")
pd.read_csv(raw_data).to_fwf(dataset_to_use)
elif data_format == "html":
dataset_to_use = replace_file_extension(raw_data, "html")
pd.read_csv(raw_data).to_html(dataset_to_use, index=False)
elif data_format == "json":
dataset_to_use = replace_file_extension(raw_data, "json")
pd.read_csv(raw_data).to_json(dataset_to_use, orient="records")
elif data_format == "jsonl":
dataset_to_use = replace_file_extension(raw_data, "jsonl")
pd.read_csv(raw_data).to_json(dataset_to_use, orient="records", lines=True)
elif data_format == "parquet":
dataset_to_use = replace_file_extension(raw_data, "parquet")
pd.read_csv(raw_data).to_parquet(dataset_to_use, index=False)
elif data_format == "pickle":
dataset_to_use = replace_file_extension(raw_data, "pickle")
pd.read_csv(raw_data).to_pickle(dataset_to_use)
elif data_format == "stata":
dataset_to_use = replace_file_extension(raw_data, "stata")
pd.read_csv(raw_data).to_stata(dataset_to_use)
elif data_format == "tsv":
dataset_to_use = replace_file_extension(raw_data, "tsv")
pd.read_csv(raw_data).to_csv(dataset_to_use, sep="\t", index=False)
else:
ValueError("'{}' is an unrecognized data format".format(data_format))
return dataset_to_use
def train_with_backend(
backend,
config,
dataset=None,
training_set=None,
validation_set=None,
test_set=None,
predict=True,
evaluate=True,
):
model = LudwigModel(config, backend=backend)
output_dir = None
try:
_, _, output_dir = model.train(
dataset=dataset,
training_set=training_set,
validation_set=validation_set,
test_set=test_set,
skip_save_processed_input=True,
skip_save_progress=True,
skip_save_unprocessed_output=True,
)
if dataset is None:
dataset = training_set
if predict:
preds, _ = model.predict(dataset=dataset)
assert backend.df_engine.compute(preds) is not None
if evaluate:
_, eval_preds, _ = model.evaluate(dataset=dataset)
assert backend.df_engine.compute(eval_preds) is not None
return model.model.get_weights()
finally:
# Remove results/intermediate data saved to disk
shutil.rmtree(output_dir, ignore_errors=True)
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import os
import time
import webob.dec
import webob.exc
from nova.api.openstack import common
from nova.api.openstack import wsgi
from nova import auth
from nova import context
from nova import exception
from nova import flags
from nova import log as logging
from nova import utils
from nova import wsgi as base_wsgi
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
flags.DECLARE('use_forwarded_for', 'nova.api.auth')
class NoAuthMiddleware(base_wsgi.Middleware):
"""Return a fake token if one isn't specified."""
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
if 'X-Auth-Token' not in req.headers:
user_id = req.headers.get('X-Auth-User', 'admin')
project_id = req.headers.get('X-Auth-Project-Id', 'admin')
os_url = os.path.join(req.url, project_id)
res = webob.Response()
# NOTE(vish): This is expecting and returning Auth(1.1), whereas
# keystone uses 2.0 auth. We should probably allow
# 2.0 auth here as well.
res.headers['X-Auth-Token'] = '%s:%s' % (user_id, project_id)
res.headers['X-Server-Management-Url'] = os_url
res.content_type = 'text/plain'
res.status = '204'
return res
token = req.headers['X-Auth-Token']
user_id, _sep, project_id = token.partition(':')
project_id = project_id or user_id
remote_address = getattr(req, 'remote_address', '127.0.0.1')
if FLAGS.use_forwarded_for:
remote_address = req.headers.get('X-Forwarded-For', remote_address)
ctx = context.RequestContext(user_id,
project_id,
is_admin=True,
remote_address=remote_address)
req.environ['nova.context'] = ctx
return self.application
class AuthMiddleware(base_wsgi.Middleware):
"""Authorize the openstack API request or return an HTTP Forbidden."""
def __init__(self, application, db_driver=None):
if not db_driver:
db_driver = FLAGS.db_driver
self.db = utils.import_object(db_driver)
self.auth = auth.manager.AuthManager()
super(AuthMiddleware, self).__init__(application)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
if not self.has_authentication(req):
return self.authenticate(req)
user_id = self.get_user_by_authentication(req)
if not user_id:
token = req.headers["X-Auth-Token"]
msg = _("%(user_id)s could not be found with token '%(token)s'")
LOG.warn(msg % locals())
return wsgi.Fault(webob.exc.HTTPUnauthorized())
# Get all valid projects for the user
projects = self.auth.get_projects(user_id)
if not projects:
return wsgi.Fault(webob.exc.HTTPUnauthorized())
project_id = ""
path_parts = req.path.split('/')
# TODO(wwolf): this v1.1 check will be temporary as
# keystone should be taking this over at some point
if len(path_parts) > 1 and path_parts[1] in ('v1.1', 'v2'):
project_id = path_parts[2]
# Check that the project for project_id exists, and that user
# is authorized to use it
try:
self.auth.get_project(project_id)
except exception.ProjectNotFound:
return wsgi.Fault(webob.exc.HTTPUnauthorized())
if project_id not in [p.id for p in projects]:
return wsgi.Fault(webob.exc.HTTPUnauthorized())
else:
# As a fallback, set project_id from the headers, which is the v1.0
# behavior. As a last resort, be forgiving to the user and set
# project_id based on a valid project of theirs.
try:
project_id = req.headers["X-Auth-Project-Id"]
except KeyError:
project_id = projects[0].id
is_admin = self.auth.is_admin(user_id)
remote_address = getattr(req, 'remote_address', '127.0.0.1')
if FLAGS.use_forwarded_for:
remote_address = req.headers.get('X-Forwarded-For', remote_address)
ctx = context.RequestContext(user_id,
project_id,
is_admin=is_admin,
remote_address=remote_address)
req.environ['nova.context'] = ctx
if not is_admin and not self.auth.is_project_member(user_id,
project_id):
msg = _("%(user_id)s must be an admin or a "
"member of %(project_id)s")
LOG.warn(msg % locals())
return wsgi.Fault(webob.exc.HTTPUnauthorized())
return self.application
def has_authentication(self, req):
return 'X-Auth-Token' in req.headers
def get_user_by_authentication(self, req):
return self.authorize_token(req.headers["X-Auth-Token"])
def authenticate(self, req):
# Unless the request is explicitly made against /<version>/ don't
# honor it
path_info = req.path_info
if len(path_info) > 1:
msg = _("Authentication requests must be made against a version "
"root (e.g. /v2).")
LOG.warn(msg)
return wsgi.Fault(webob.exc.HTTPUnauthorized(explanation=msg))
def _get_auth_header(key):
"""Ensures that the KeyError returned is meaningful."""
try:
return req.headers[key]
except KeyError as ex:
raise KeyError(key)
try:
username = _get_auth_header('X-Auth-User')
key = _get_auth_header('X-Auth-Key')
except KeyError as ex:
msg = _("Could not find %s in request.") % ex
LOG.warn(msg)
return wsgi.Fault(webob.exc.HTTPUnauthorized(explanation=msg))
token, user = self._authorize_user(username, key, req)
if user and token:
res = webob.Response()
res.headers['X-Auth-Token'] = token['token_hash']
res.headers['X-Server-Management-Url'] = \
token['server_management_url']
if token['storage_url']:
res.headers['X-Storage-Url'] = token['storage_url']
if token['cdn_management_url']:
res.headers['X-CDN-Management-Url'] = \
token['cdn_management_url']
res.content_type = 'text/plain'
res.status = '204'
LOG.debug(_("Successfully authenticated '%s'") % username)
return res
else:
return wsgi.Fault(webob.exc.HTTPUnauthorized())
def authorize_token(self, token_hash):
""" retrieves user information from the datastore given a token
If the token has expired, returns None
If the token is not found, returns None
Otherwise returns dict(id=(the authorized user's id))
This method will also remove the token if the timestamp is older than
2 days ago.
"""
ctxt = context.get_admin_context()
try:
token = self.db.auth_token_get(ctxt, token_hash)
except exception.NotFound:
return None
if token:
delta = utils.utcnow() - token['created_at']
if delta.days >= 2:
self.db.auth_token_destroy(ctxt, token['token_hash'])
else:
return token['user_id']
return None
def _authorize_user(self, username, key, req):
"""Generates a new token and assigns it to a user.
username - string
key - string API key
req - wsgi.Request object
"""
ctxt = context.get_admin_context()
project_id = req.headers.get('X-Auth-Project-Id')
if project_id is None:
# If the project_id is not provided in the headers, be forgiving to
# the user and set project_id based on a valid project of theirs.
user = self.auth.get_user_from_access_key(key)
projects = self.auth.get_projects(user.id)
if not projects:
raise webob.exc.HTTPUnauthorized()
project_id = projects[0].id
try:
user = self.auth.get_user_from_access_key(key)
except exception.NotFound:
LOG.warn(_("User not found with provided API key."))
user = None
if user and user.name == username:
token_hash = hashlib.sha1('%s%s%f' % (username, key,
time.time())).hexdigest()
token_dict = {}
token_dict['token_hash'] = token_hash
token_dict['cdn_management_url'] = ''
os_url = req.url.strip('/')
os_url += '/' + project_id
token_dict['server_management_url'] = os_url
token_dict['storage_url'] = ''
token_dict['user_id'] = user.id
token = self.db.auth_token_create(ctxt, token_dict)
return token, user
elif user and user.name != username:
msg = _("Provided API key is valid, but not for user "
"'%(username)s'") % locals()
LOG.warn(msg)
return None, None
| |
import logging
import time
from parsl.providers.kubernetes.template import template_string
logger = logging.getLogger(__name__)
from parsl.providers.error import *
from parsl.providers.provider_base import ExecutionProvider
from parsl.utils import RepresentationMixin
try:
from kubernetes import client, config
_kubernetes_enabled = True
except (ImportError, NameError, FileNotFoundError):
_kubernetes_enabled = False
class KubernetesProvider(ExecutionProvider, RepresentationMixin):
""" Kubernetes execution provider
Parameters
----------
namespace : str
Kubernetes namespace to create deployments.
image : str
Docker image to use in the deployment.
channel : Channel
Channel for accessing this provider. Possible channels include
:class:`~parsl.channels.LocalChannel` (the default),
:class:`~parsl.channels.SSHChannel`, or
:class:`~parsl.channels.SSHInteractiveLoginChannel`.
nodes_per_block : int
Nodes to provision per block.
init_blocks : int
Number of blocks to provision at the start of the run. Default is 1.
min_blocks : int
Minimum number of blocks to maintain.
max_blocks : int
Maximum number of blocks to maintain.
parallelism : float
Ratio of provisioned task slots to active tasks. A parallelism value of 1 represents aggressive
scaling where as many resources as possible are used; parallelism close to 0 represents
the opposite situation in which as few resources as possible (i.e., min_blocks) are used.
worker_init : str
Command to be run first for the workers, such as `python start.py`.
secret : str
Docker secret to use to pull images
user_id : str
Unix user id to run the container as.
group_id : str
Unix group id to run the container as.
run_as_non_root : bool
Run as non-root (True) or run as root (False).
persistent_volumes: list[(str, str)]
List of tuples describing persistent volumes to be mounted in the pod.
The tuples consist of (PVC Name, Mount Directory).
"""
def __init__(self,
image,
namespace='default',
channel=None,
nodes_per_block=1,
init_blocks=4,
min_blocks=0,
max_blocks=10,
parallelism=1,
worker_init="",
user_id=None,
group_id=None,
run_as_non_root=False,
secret=None,
persistent_volumes=[]):
if not _kubernetes_enabled:
raise OptionalModuleMissing(['kubernetes'],
"Kubernetes provider requires kubernetes module and config.")
config.load_kube_config()
self.namespace = namespace
self.image = image
self.channel = channel
self.nodes_per_block = nodes_per_block
self.init_blocks = init_blocks
self.min_blocks = min_blocks
self.max_blocks = max_blocks
self.parallelism = parallelism
self.worker_init = worker_init
self.secret = secret
self.user_id = user_id
self.group_id = group_id
self.run_as_non_root = run_as_non_root
self.persistent_volumes = persistent_volumes
self.kube_client = client.ExtensionsV1beta1Api()
# Dictionary that keeps track of jobs, keyed on job_id
self.resources = {}
def submit(self, cmd_string, blocksize, tasks_per_node, job_name="parsl.auto"):
""" Submit a job
Args:
- cmd_string :(String) - Name of the container to initiate
- blocksize :(float) - Number of replicas
- tasks_per_node (int) : command invocations to be launched per node
Kwargs:
- job_name (String): Name for job, must be unique
Returns:
- None: At capacity, cannot provision more
- job_id: (string) Identifier for the job
"""
if not self.resources:
job_name = "{0}-{1}".format(job_name, time.time()).split(".")[0]
self.deployment_name = '{}-{}-deployment'.format(job_name,
str(time.time()).split('.')[0])
formatted_cmd = template_string.format(command=cmd_string,
worker_init=self.worker_init)
print("Creating replicas :", self.init_blocks)
self.deployment_obj = self._create_deployment_object(job_name,
self.image,
self.deployment_name,
cmd_string=formatted_cmd,
replicas=self.init_blocks,
volumes=self.persistent_volumes)
logger.debug("Deployment name :{}".format(self.deployment_name))
self._create_deployment(self.deployment_obj)
self.resources[self.deployment_name] = {'status': 'RUNNING',
'pods': self.init_blocks}
return self.deployment_name
def status(self, job_ids):
""" Get the status of a list of jobs identified by the job identifiers
returned from the submit request.
Args:
- job_ids (list) : A list of job identifiers
Returns:
- A list of status from ['PENDING', 'RUNNING', 'CANCELLED', 'COMPLETED',
'FAILED', 'TIMEOUT'] corresponding to each job_id in the job_ids list.
Raises:
- ExecutionProviderExceptions or its subclasses
"""
self._status()
# This is a hack
return ['RUNNING' for jid in job_ids]
def cancel(self, job_ids):
""" Cancels the jobs specified by a list of job ids
Args:
job_ids : [<job_id> ...]
Returns :
[True/False...] : If the cancel operation fails the entire list will be False.
"""
for job in job_ids:
logger.debug("Terminating job/proc_id: {0}".format(job))
# Here we are assuming that for local, the job_ids are the process id's
self._delete_deployment(job)
self.resources[job]['status'] = 'CANCELLED'
rets = [True for i in job_ids]
return rets
def _status(self):
""" Internal: Do not call. Returns the status list for a list of job_ids
Args:
self
Returns:
[status...] : Status list of all jobs
"""
jobs_ids = list(self.resources.keys())
# TODO: fix this
return jobs_ids
# do something to get the deployment's status
def _create_deployment_object(self, job_name, job_image,
deployment_name, port=80,
replicas=1,
cmd_string=None,
engine_json_file='~/.ipython/profile_default/security/ipcontroller-engine.json',
engine_dir='.',
volumes=[]):
""" Create a kubernetes deployment for the job.
Args:
- job_name (string) : Name of the job and deployment
- job_image (string) : Docker image to launch
KWargs:
- port (integer) : Container port
- replicas : Number of replica containers to maintain
Returns:
- True: The deployment object to launch
"""
# sorry, quick hack that doesn't pass this stuff through to test it works.
# TODO it also doesn't only add what is set :(
security_context = None
if self.user_id and self.group_id:
security_context = client.V1SecurityContext(run_as_group=self.group_id,
run_as_user=self.user_id,
run_as_non_root=self.run_as_non_root)
# Create the enviornment variables and command to initiate IPP
environment_vars = client.V1EnvVar(name="TEST", value="SOME DATA")
launch_args = ["-c", "{0}; /app/deploy.sh;".format(cmd_string)]
print(launch_args)
volume_mounts = []
# Create mount paths for the volumes
for volume in volumes:
volume_mounts.append(client.V1VolumeMount(mount_path=volume[1],
name=volume[0]))
# Configureate Pod template container
container = None
if security_context:
container = client.V1Container(
name=job_name,
image=job_image,
ports=[client.V1ContainerPort(container_port=port)],
volume_mounts=volume_mounts,
command=['/bin/bash'],
args=launch_args,
env=[environment_vars],
security_context=security_context)
else:
container = client.V1Container(
name=job_name,
image=job_image,
ports=[client.V1ContainerPort(container_port=port)],
volume_mounts=volume_mounts,
command=['/bin/bash'],
args=launch_args,
env=[environment_vars])
# Create a secret to enable pulling images from secure repositories
secret = None
if self.secret:
secret = client.V1LocalObjectReference(name=self.secret)
# Create list of volumes from (pvc, mount) tuples
volume_defs = []
for volume in volumes:
volume_defs.append(client.V1Volume(name=volume[0],
persistent_volume_claim=client.V1PersistentVolumeClaimVolumeSource(
claim_name=volume[0])))
# Create and configurate a spec section
template = client.V1PodTemplateSpec(
metadata=client.V1ObjectMeta(labels={"app": job_name}),
spec=client.V1PodSpec(containers=[container],
image_pull_secrets=[secret],
volumes=volume_defs
))
# Create the specification of deployment
spec = client.ExtensionsV1beta1DeploymentSpec(replicas=replicas,
template=template)
# Instantiate the deployment object
deployment = client.ExtensionsV1beta1Deployment(
api_version="extensions/v1beta1",
kind="Deployment",
metadata=client.V1ObjectMeta(name=deployment_name),
spec=spec)
return deployment
def _create_deployment(self, deployment):
""" Create the kubernetes deployment """
api_response = self.kube_client.create_namespaced_deployment(
body=deployment,
namespace=self.namespace)
logger.debug("Deployment created. status='{0}'".format(str(api_response.status)))
def _delete_deployment(self, deployment_name):
""" Delete deployment """
api_response = self.kube_client.delete_namespaced_deployment(
name=deployment_name,
namespace=self.namespace,
body=client.V1DeleteOptions(
propagation_policy='Foreground',
grace_period_seconds=5))
logger.debug("Deployment deleted. status='{0}'".format(
str(api_response.status)))
@property
def scaling_enabled(self):
return False
@property
def channels_required(self):
return False
@property
def label(self):
return "kubernetes"
| |
# mininode.py - Adn P2P network half-a-node
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# This python code was modified from ArtForz' public domain half-a-node, as
# found in the mini-node branch of http://github.com/jgarzik/pynode.
#
# NodeConn: an object which manages p2p connectivity to a adn node
# NodeConnCB: a base class that describes the interface for receiving
# callbacks with network messages from a NodeConn
# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
# data structures that should map to corresponding structures in
# adn/primitives
# msg_block, msg_tx, msg_headers, etc.:
# data structures that represent network messages
# ser_*, deser_*: functions that handle serialization/deserialization
import struct
import socket
import asyncore
import time
import sys
import random
from binascii import hexlify, unhexlify
from io import BytesIO
from codecs import encode
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
import adn_hash
BIP0031_VERSION = 60000
MY_VERSION = 70103 # past bip-31 for ping/pong
MY_SUBVERSION = b"/python-mininode-tester:0.0.2/"
MAX_INV_SZ = 50000
MAX_BLOCK_SIZE = 1000000
COIN = 100000000L # 1 btc in satoshis
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def hash256(s):
return sha256(sha256(s))
def adnhash(s):
return adn_hash.getPoWHash(s)
def deser_string(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return f.read(nit)
def ser_string(s):
if len(s) < 253:
return struct.pack("B", len(s)) + s
elif len(s) < 0x10000:
return struct.pack("<BH", 253, len(s)) + s
elif len(s) < 0x100000000L:
return struct.pack("<BI", 254, len(s)) + s
return struct.pack("<BQ", 255, len(s)) + s
def deser_uint256(f):
r = 0L
for i in xrange(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in xrange(8):
rs += struct.pack("<I", u & 0xFFFFFFFFL)
u >>= 32
return rs
def uint256_from_str(s):
r = 0L
t = struct.unpack("<IIIIIIII", s[:32])
for i in xrange(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(unhexlify(hex_string.encode('ascii'))))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return hexlify(obj.serialize()).decode('ascii')
# Objects that map to adnd objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block"}
def __init__(self, t=0, h=0L):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), hexlify(self.scriptSig),
self.nSequence)
class CTxOut(object):
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
hexlify(self.scriptPubKey))
class CTransaction(object):
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
def rehash(self):
self.sha256 = None
self.calc_sha256()
def calc_sha256(self):
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize()))
self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 100000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
class CBlockHeader(object):
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(adnhash(r))
self.hash = encode(adnhash(r)[::-1], 'hex_codec').decode('ascii')
def rehash(self):
self.sha256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self):
r = b""
r += super(CBlock, self).serialize()
r += ser_vector(self.vtx)
return r
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
while len(hashes) > 1:
newhashes = []
for i in xrange(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.sha256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = b""
self.strStatusBar = b""
self.strReserved = b""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
% (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = b""
self.vchSig = b""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = b""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
% (len(self.vchMsg), len(self.vchSig))
# Objects that correspond to messages on the wire
class msg_version(object):
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight)
class msg_verack(object):
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = b"alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = b""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx(object):
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_block(object):
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
class msg_getaddr(object):
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping_prebip31(object):
command = b"ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_ping() (pre-bip31)"
class msg_ping(object):
command = b"ping"
def __init__(self, nonce=0L):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong(object):
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool(object):
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders(object):
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders(object):
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers(object):
command = b"headers"
def __init__(self):
self.headers = []
def deserialize(self, f):
# comment in adnd indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject(object):
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0L
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
# Helper function
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
attempt = 0
elapsed = 0
while attempt < attempts and elapsed < timeout:
with mininode_lock:
if predicate():
return True
attempt += 1
elapsed += 0.05
time.sleep(0.05)
return False
# This is what a callback should look like for NodeConn
# Reimplement the on_* functions to provide handling for events
class NodeConnCB(object):
def __init__(self):
self.verack_received = False
# deliver_sleep_time is helpful for debugging race conditions in p2p
# tests; it causes message delivery to sleep for the specified time
# before acquiring the global lock and delivering the next message.
self.deliver_sleep_time = None
def set_deliver_sleep_time(self, value):
with mininode_lock:
self.deliver_sleep_time = value
def get_deliver_sleep_time(self):
with mininode_lock:
return self.deliver_sleep_time
# Spin until verack message is received from the node.
# Tests may want to use this as a signal that the test can begin.
# This can be called from the testing thread, so it needs to acquire the
# global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
def deliver(self, conn, message):
deliver_sleep = self.get_deliver_sleep_time()
if deliver_sleep is not None:
time.sleep(deliver_sleep)
with mininode_lock:
try:
getattr(self, 'on_' + message.command)(conn, message)
except:
print "ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0])
def on_version(self, conn, message):
if message.nVersion >= 209:
conn.send_message(msg_verack())
conn.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
conn.ver_recv = conn.ver_send
def on_verack(self, conn, message):
conn.ver_recv = conn.ver_send
self.verack_received = True
def on_inv(self, conn, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
conn.send_message(want)
def on_addr(self, conn, message): pass
def on_alert(self, conn, message): pass
def on_getdata(self, conn, message): pass
def on_getblocks(self, conn, message): pass
def on_tx(self, conn, message): pass
def on_block(self, conn, message): pass
def on_getaddr(self, conn, message): pass
def on_headers(self, conn, message): pass
def on_getheaders(self, conn, message): pass
def on_ping(self, conn, message):
if conn.ver_send > BIP0031_VERSION:
conn.send_message(msg_pong(message.nonce))
def on_reject(self, conn, message): pass
def on_close(self, conn): pass
def on_mempool(self, conn): pass
def on_pong(self, conn, message): pass
# More useful callbacks and functions for NodeConnCB's which have a single NodeConn
class SingleNodeConnCB(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node
def sync_with_ping(self, timeout=30):
def received_pong():
return (self.last_pong.nonce == self.ping_counter)
self.send_message(msg_ping(nonce=self.ping_counter))
success = wait_until(received_pong, timeout)
self.ping_counter += 1
return success
# The actual NodeConn class
# This class provides an interface for a p2p connection to a specified node
class NodeConn(asyncore.dispatcher):
messagemap = {
b"version": msg_version,
b"verack": msg_verack,
b"addr": msg_addr,
b"alert": msg_alert,
b"inv": msg_inv,
b"getdata": msg_getdata,
b"getblocks": msg_getblocks,
b"tx": msg_tx,
b"block": msg_block,
b"getaddr": msg_getaddr,
b"ping": msg_ping,
b"pong": msg_pong,
b"headers": msg_headers,
b"getheaders": msg_getheaders,
b"reject": msg_reject,
b"mempool": msg_mempool,
}
MAGIC_BYTES = {
"mainnet": b"\xbf\x0c\x6b\xbd", # mainnet
"testnet3": b"\xce\xe2\xca\xff", # testnet3
"regtest": b"\xfc\xc1\xb7\xdc" # regtest
}
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=1):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = b""
self.recvbuf = b""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
self.network = net
self.cb = callback
self.disconnect = False
# stuff version msg into sendbuf
vt = msg_version()
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
print 'MiniNode: Connecting to Adn Node IP # ' + dstaddr + ':' \
+ str(dstport)
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.rpc = rpc
def show_debug_msg(self, msg):
self.log.debug(msg)
def handle_connect(self):
self.show_debug_msg("MiniNode: Connected & Listening: \n")
self.state = "connected"
def handle_close(self):
self.show_debug_msg("MiniNode: Closing Connection to %s:%d... "
% (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = b""
self.sendbuf = b""
try:
self.close()
except:
pass
self.cb.on_close(self)
def handle_read(self):
try:
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self.got_data()
except:
pass
def readable(self):
return True
def writable(self):
with mininode_lock:
length = len(self.sendbuf)
return (length > 0)
def handle_write(self):
with mininode_lock:
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4+12+4:4+12+4+msglen]
self.recvbuf = self.recvbuf[4+12+4+msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command in self.messagemap:
f = BytesIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
self.show_debug_msg("Unknown command: '" + command + "' " +
repr(msg))
except Exception as e:
print 'got_data:', repr(e)
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
return
self.show_debug_msg("Send %s" % repr(message))
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[self.network]
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
if message.command == b"version":
if message.nVersion <= BIP0031_VERSION:
self.messagemap[b'ping'] = msg_ping_prebip31
if self.last_sent + 30 * 60 < time.time():
self.send_message(self.messagemap[b'ping']())
self.show_debug_msg("Recv %s" % repr(message))
self.cb.deliver(self, message)
def disconnect_node(self):
self.disconnect = True
class NetworkThread(Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[ obj.handle_close() for obj in disconnected ]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
# An exception we can raise if we detect a potential disconnect
# (p2p or rpc) before the test is complete
class EarlyDisconnectError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| |
from webhelpers2.html import tags, HTML
from blazeform import element
from blazeform.form import FormBase
from blazeform.util import StringIndentHelper, NotGiven, HtmlAttributeHolder
class FormRenderer(object):
def __init__(self, element):
self.element = element
self.output = StringIndentHelper()
self.header_section_open = False
self.settings = {}
def begin(self):
attr = self.element.get_attrs()
action = attr.pop('action', '')
self.output.inc(tags.form(action, **attr))
def render(self, **kwargs):
self.settings.update(kwargs)
self.begin()
on_first = True
on_alt = False
self.req_note_written = False
for child in self.rendering_els():
if isinstance(child, element.HeaderElement):
if self.header_section_open:
self.output.dec('</div>')
on_first = True
hstr = '<div id="%s-section" class="header-section">' % child.getidattr()
self.output.inc(hstr)
self.header_section_open = True
if self.required_note_level == 'section':
self.req_note_written = False
rcls = self.element._renderer(child)
r = rcls(child, self.output, on_first, on_alt, 'row', self.settings)
if (r.uses_first and on_first) or isinstance(child, element.HeaderElement):
self.render_required_note(isinstance(child, element.HeaderElement))
r.render()
if r.uses_alt:
on_alt = not on_alt
if r.uses_first:
on_first = False
self.end()
return self.output.get()
@property
def required_note_level(self):
try:
if self.settings['req_note_level'] == 'form':
return 'form'
if self.settings['req_note_level'] == 'section':
return 'section'
except KeyError as e:
if 'req_note_level' not in str(e):
raise
return None
def render_required_note(self, above_header):
if self.required_note_level and not self.req_note_written:
req_note = self.settings.get(
'req_note',
'<div class="required_note%(above_header)s"><span class="star">*</span> '
'= required field</div>'
)
if above_header:
above_header_class = '_above_header'
else:
above_header_class = ''
self.output(req_note % {'above_header': above_header_class})
self.req_note_written = True
def rendering_els(self):
for el in self.element.renderable_els:
yield el
def end(self):
if self.header_section_open:
self.output.dec('</div>')
self.output.dec('</form>')
class StaticFormRenderer(FormRenderer):
no_render = (
element.ButtonElement,
element.FileElement,
element.HiddenElement,
element.ImageElement,
element.ResetElement,
element.SubmitElement,
element.CancelElement,
element.PasswordElement,
element.ConfirmElement
)
def begin(self):
attrs = HtmlAttributeHolder(**self.element.attributes)
attrs.add_attr('class', 'static-form')
for attr in ('enctype', 'method', 'action'):
try:
attrs.del_attr(attr)
except KeyError:
pass
self.output.inc(HTML.div(None, _closed=False, **attrs.attributes))
def rendering_els(self):
for el in self.element.renderable_els:
if not isinstance(el, self.no_render):
yield el
def end(self):
if self.header_section_open:
self.output.dec('</div>')
self.output.dec('</div>')
class Renderer(object):
def __init__(self, element, output, is_first, is_alt, wrap_type, settings):
self.element = element
self.output = output
self.wrap_type = wrap_type
self.uses_alt = False
self.uses_first = False
self.is_first = is_first
self.is_alt = is_alt
self.settings = settings
def first_class(self):
if self.is_first:
return ' first'
return ''
def alt_class(self):
if self.is_alt:
return ' even'
return ' odd'
def begin(self):
pass
def render(self):
self.begin()
self.output(self.element.render())
self.end()
def end(self):
pass
def setting(self, key):
return self.element.settings.get(key, self.settings.get(key, ''))
class HeaderRenderer(Renderer):
def render(self):
self.begin()
if self.element.defaultval is not NotGiven:
self.output(self.element.render())
self.end()
class FieldRenderer(Renderer):
def __init__(self, element, output, is_first, is_alt, wrap_type, settings):
Renderer.__init__(self, element, output, is_first, is_alt, wrap_type, settings)
self.uses_first = True
self.uses_alt = True
def begin(self):
self.begin_row()
self.label_class()
if not self.element.label_after:
self.label()
self.field_wrapper()
self.required()
def begin_row(self):
self.output.inc(
'<div id="%s-%s" class="%s%s%s">' %
(self.element.getidattr(), self.wrap_type, self.wrap_type,
self.alt_class(), self.first_class())
)
def label_class(self):
classes = []
if not self.element.label.value:
classes.append('no-label')
if self.element.label_after:
classes.append('label-after')
if not classes:
self.label_class = ''
else:
self.label_class = ' %s' % ' '.join(classes)
def label(self):
if self.element.label.value:
if not self.element.label_after:
self.element.label.value += ':'
self.output(self.element.label())
def field_wrapper(self):
self.output.inc('<div id="%s-fw" class="field-wrapper%s">' %
(self.element.getidattr(), self.label_class))
def required(self):
if self.element.required and not self.element.form._static:
self.output('<span class="required-star">*</span>')
def notes(self):
if len(self.element.notes) == 1:
self.output('<p class="note">%s%s</p>' % (
self.setting('note_prefix'),
self.element.notes[0]
))
elif len(self.element.notes) > 1:
self.output.inc('<ul class="notes">')
for msg in self.element.notes:
self.output('<li>%s%s</li>' % (
self.setting('note_prefix'),
msg
))
self.output.dec('</ul>')
def errors(self):
if len(self.element.errors) == 1:
self.output('<p class="error">%s%s</p>' % (
self.setting('error_prefix'),
self.element.errors[0]
))
elif len(self.element.errors) > 1:
self.output.inc('<ul class="errors">')
for msg in self.element.errors:
self.output('<li>%s%s</li>' % (
self.setting('error_prefix'),
msg
))
self.output.dec('</ul>')
def end(self):
self.notes()
self.errors()
# close field wrapper
self.output.dec('</div>')
if self.element.label_after:
self.label()
# close row
self.output.dec('</div>')
class InputRenderer(FieldRenderer):
def begin_row(self):
self.output.inc(
'<div id="%s-%s" class="%s %s%s%s">' %
(self.element.getidattr(), self.wrap_type, self.element.etype,
self.wrap_type, self.alt_class(), self.first_class())
)
class StaticRenderer(FieldRenderer):
def required(self):
pass
def errors(self):
pass
class GroupRenderer(StaticRenderer):
def begin_row(self):
self.element.set_attr('id', '%s-%s' % (self.element.getidattr(), self.wrap_type))
class_str = '%s%s%s' % (self.wrap_type, self.alt_class(), self.first_class())
self.element.add_attr('class', class_str)
# HTML.tag should not close the div
attrs = self.element.get_attrs()
attrs['_closed'] = False
self.output.inc(HTML.tag('div', **attrs))
def field_wrapper(self):
self.output.inc('<div id="%s-fw" class="group-wrapper%s">' %
(self.element.getidattr(), self.label_class))
def render(self):
self.begin()
self.render_children()
self.end()
def render_children(self):
on_first = True
on_alt = False
for child in self.element.renderable_els:
rcls = self.element.form._renderer(child)
r = rcls(child, self.output, on_first, on_alt, 'grpel', self.settings)
r.render()
if r.uses_alt:
on_alt = not on_alt
if r.uses_first:
on_first = False
def get_renderer(el):
plain = (
element.HiddenElement,
)
field = (
element.SelectElement,
element.TextAreaElement,
)
static = (
element.FixedElement,
element.StaticElement,
element.RadioElement,
element.MultiCheckboxElement,
)
if isinstance(el, FormBase):
if el._static:
return StaticFormRenderer(el)
return FormRenderer(el)
elif isinstance(el, element.GroupElement):
return GroupRenderer
elif isinstance(el, element.HeaderElement):
return HeaderRenderer
elif isinstance(el, plain):
return Renderer
elif isinstance(el, element.InputElementBase):
return InputRenderer
elif isinstance(el, field):
return FieldRenderer
elif isinstance(el, static):
return StaticRenderer
| |
import collections
import operator
import pytest
from pandas.compat import PY2, PY36
import pandas as pd
from pandas.tests.extension import base
import pandas.util.testing as tm
from .array import JSONArray, JSONDtype, make_data
pytestmark = pytest.mark.skipif(PY2, reason="Py2 doesn't have a UserDict")
@pytest.fixture
def dtype():
return JSONDtype()
@pytest.fixture
def data():
"""Length-100 PeriodArray for semantics test."""
data = make_data()
# Why the while loop? NumPy is unable to construct an ndarray from
# equal-length ndarrays. Many of our operations involve coercing the
# EA to an ndarray of objects. To avoid random test failures, we ensure
# that our data is coercable to an ndarray. Several tests deal with only
# the first two elements, so that's what we'll check.
while len(data[0]) == len(data[1]):
data = make_data()
return JSONArray(data)
@pytest.fixture
def data_missing():
"""Length 2 array with [NA, Valid]"""
return JSONArray([{}, {'a': 10}])
@pytest.fixture
def data_for_sorting():
return JSONArray([{'b': 1}, {'c': 4}, {'a': 2, 'c': 3}])
@pytest.fixture
def data_missing_for_sorting():
return JSONArray([{'b': 1}, {}, {'a': 4}])
@pytest.fixture
def na_value(dtype):
return dtype.na_value
@pytest.fixture
def na_cmp():
return operator.eq
@pytest.fixture
def data_for_grouping():
return JSONArray([
{'b': 1}, {'b': 1},
{}, {},
{'a': 0, 'c': 2}, {'a': 0, 'c': 2},
{'b': 1},
{'c': 2},
])
class BaseJSON(object):
# NumPy doesn't handle an array of equal-length UserDicts.
# The default assert_series_equal eventually does a
# Series.values, which raises. We work around it by
# converting the UserDicts to dicts.
def assert_series_equal(self, left, right, **kwargs):
if left.dtype.name == 'json':
assert left.dtype == right.dtype
left = pd.Series(JSONArray(left.values.astype(object)),
index=left.index, name=left.name)
right = pd.Series(JSONArray(right.values.astype(object)),
index=right.index, name=right.name)
tm.assert_series_equal(left, right, **kwargs)
def assert_frame_equal(self, left, right, *args, **kwargs):
tm.assert_index_equal(
left.columns, right.columns,
exact=kwargs.get('check_column_type', 'equiv'),
check_names=kwargs.get('check_names', True),
check_exact=kwargs.get('check_exact', False),
check_categorical=kwargs.get('check_categorical', True),
obj='{obj}.columns'.format(obj=kwargs.get('obj', 'DataFrame')))
jsons = (left.dtypes == 'json').index
for col in jsons:
self.assert_series_equal(left[col], right[col],
*args, **kwargs)
left = left.drop(columns=jsons)
right = right.drop(columns=jsons)
tm.assert_frame_equal(left, right, *args, **kwargs)
class TestDtype(BaseJSON, base.BaseDtypeTests):
pass
class TestInterface(BaseJSON, base.BaseInterfaceTests):
def test_custom_asserts(self):
# This would always trigger the KeyError from trying to put
# an array of equal-length UserDicts inside an ndarray.
data = JSONArray([collections.UserDict({'a': 1}),
collections.UserDict({'b': 2}),
collections.UserDict({'c': 3})])
a = pd.Series(data)
self.assert_series_equal(a, a)
self.assert_frame_equal(a.to_frame(), a.to_frame())
b = pd.Series(data.take([0, 0, 1]))
with pytest.raises(AssertionError):
self.assert_series_equal(a, b)
with pytest.raises(AssertionError):
self.assert_frame_equal(a.to_frame(), b.to_frame())
class TestConstructors(BaseJSON, base.BaseConstructorsTests):
@pytest.mark.skip(reason="not implemented constructor from dtype")
def test_from_dtype(self, data):
# construct from our dtype & string dtype
pass
class TestReshaping(BaseJSON, base.BaseReshapingTests):
@pytest.mark.skip(reason="Different definitions of NA")
def test_stack(self):
"""
The test does .astype(object).stack(). If we happen to have
any missing values in `data`, then we'll end up with different
rows since we consider `{}` NA, but `.astype(object)` doesn't.
"""
@pytest.mark.xfail(reason="dict for NA")
def test_unstack(self, data, index):
# The base test has NaN for the expected NA value.
# this matches otherwise
return super().test_unstack(data, index)
class TestGetitem(BaseJSON, base.BaseGetitemTests):
pass
class TestMissing(BaseJSON, base.BaseMissingTests):
@pytest.mark.skip(reason="Setting a dict as a scalar")
def test_fillna_series(self):
"""We treat dictionaries as a mapping in fillna, not a scalar."""
@pytest.mark.skip(reason="Setting a dict as a scalar")
def test_fillna_frame(self):
"""We treat dictionaries as a mapping in fillna, not a scalar."""
unhashable = pytest.mark.skip(reason="Unhashable")
unstable = pytest.mark.skipif(not PY36, # 3.6 or higher
reason="Dictionary order unstable")
class TestReduce(base.BaseNoReduceTests):
pass
class TestMethods(BaseJSON, base.BaseMethodsTests):
@unhashable
def test_value_counts(self, all_data, dropna):
pass
@unhashable
def test_sort_values_frame(self):
# TODO (EA.factorize): see if _values_for_factorize allows this.
pass
@unstable
def test_argsort(self, data_for_sorting):
super(TestMethods, self).test_argsort(data_for_sorting)
@unstable
def test_argsort_missing(self, data_missing_for_sorting):
super(TestMethods, self).test_argsort_missing(
data_missing_for_sorting)
@unstable
@pytest.mark.parametrize('ascending', [True, False])
def test_sort_values(self, data_for_sorting, ascending):
super(TestMethods, self).test_sort_values(
data_for_sorting, ascending)
@unstable
@pytest.mark.parametrize('ascending', [True, False])
def test_sort_values_missing(self, data_missing_for_sorting, ascending):
super(TestMethods, self).test_sort_values_missing(
data_missing_for_sorting, ascending)
@pytest.mark.skip(reason="combine for JSONArray not supported")
def test_combine_le(self, data_repeated):
pass
@pytest.mark.skip(reason="combine for JSONArray not supported")
def test_combine_add(self, data_repeated):
pass
@pytest.mark.skip(reason="combine for JSONArray not supported")
def test_combine_first(self, data):
pass
@unhashable
def test_hash_pandas_object_works(self, data, kind):
super().test_hash_pandas_object_works(data, kind)
@pytest.mark.skip(reason="broadcasting error")
def test_where_series(self, data, na_value):
# Fails with
# *** ValueError: operands could not be broadcast together
# with shapes (4,) (4,) (0,)
super().test_where_series(data, na_value)
@pytest.mark.skip(reason="Can't compare dicts.")
def test_searchsorted(self, data_for_sorting):
super(TestMethods, self).test_searchsorted(data_for_sorting)
class TestCasting(BaseJSON, base.BaseCastingTests):
@pytest.mark.skip(reason="failing on np.array(self, dtype=str)")
def test_astype_str(self):
"""This currently fails in NumPy on np.array(self, dtype=str) with
*** ValueError: setting an array element with a sequence
"""
# We intentionally don't run base.BaseSetitemTests because pandas'
# internals has trouble setting sequences of values into scalar positions.
class TestGroupby(BaseJSON, base.BaseGroupbyTests):
@unhashable
def test_groupby_extension_transform(self):
"""
This currently fails in Series.name.setter, since the
name must be hashable, but the value is a dictionary.
I think this is what we want, i.e. `.name` should be the original
values, and not the values for factorization.
"""
@unhashable
def test_groupby_extension_apply(self):
"""
This fails in Index._do_unique_check with
> hash(val)
E TypeError: unhashable type: 'UserDict' with
I suspect that once we support Index[ExtensionArray],
we'll be able to dispatch unique.
"""
@unstable
@pytest.mark.parametrize('as_index', [True, False])
def test_groupby_extension_agg(self, as_index, data_for_grouping):
super(TestGroupby, self).test_groupby_extension_agg(
as_index, data_for_grouping
)
class TestArithmeticOps(BaseJSON, base.BaseArithmeticOpsTests):
def test_error(self, data, all_arithmetic_operators):
pass
def test_add_series_with_extension_array(self, data):
ser = pd.Series(data)
with pytest.raises(TypeError, match="unsupported"):
ser + data
def _check_divmod_op(self, s, op, other, exc=NotImplementedError):
return super(TestArithmeticOps, self)._check_divmod_op(
s, op, other, exc=TypeError
)
class TestComparisonOps(BaseJSON, base.BaseComparisonOpsTests):
pass
class TestPrinting(BaseJSON, base.BasePrintingTests):
pass
| |
"""Bioconda-Utils sphinx extension
This module builds the documentation for our recipes
To build the documentation locally, use e.g::
make -C docs/ BIOCONDA_FILTER_RECIPES=10 SPHINXOPTS="-E" html
.. rubric:: Environment Variables
.. envvar:: BIOCONDA_FILTER_RECIPES
Use this environment variable to reduce the number of recipes for
which documentation pages are built. If set to an integer
(including 0), the first *n* recipes are included. Otherwise, the
contents are considered a regular expression recipes must match to
be included.
"""
import os
import os.path as op
import re
import inspect
from typing import Any, Dict, List, Tuple, Optional
from jinja2.sandbox import SandboxedEnvironment
from sphinx import addnodes
from docutils import nodes
from docutils.parsers import rst
from docutils.statemachine import StringList
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.environment import BuildEnvironment
from sphinx.roles import XRefRole
from sphinx.util import logging as sphinx_logging
from sphinx.util import status_iterator
from sphinx.util.docfields import Field, GroupedField
from sphinx.util.nodes import make_refnode
from sphinx.util.parallel import ParallelTasks, parallel_available, make_chunks
from sphinx.util.rst import escape as rst_escape
from sphinx.util.osutil import ensuredir
from sphinx.util.docutils import SphinxDirective
from sphinx.jinja2glue import BuiltinTemplateLoader
from conda.exports import VersionOrder
from bioconda_utils.utils import RepoData, load_config
from bioconda_utils.recipe import Recipe, RecipeError
from bioconda_utils.githandler import BiocondaRepo
from bioconda_utils.lint import get_checks
# Aquire a logger
try:
logger = sphinx_logging.getLogger(__name__) # pylint: disable=invalid-name
except AttributeError: # not running within sphinx
import logging
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def as_extlink_filter(text):
"""Jinja2 filter converting identifier (list) to extlink format
Args:
text: may be string or list of strings
>>> as_extlink_filter("biotools:abyss")
"biotools: :biotool:`abyss`"
>>> as_extlink_filter(["biotools:abyss", "doi:123"])
"biotools: :biotool:`abyss`, doi: :doi:`123`"
"""
def fmt(text):
assert isinstance(text, str), "identifier has to be a string"
text = text.split(":", 1)
assert len(text) == 2, "identifier needs at least one colon"
return "{0}: :{0}:`{1}`".format(*text)
assert isinstance(text, list), "identifiers have to be given as list"
return list(map(fmt, text))
def underline_filter(text):
"""Jinja2 filter adding =-underline to row of text
>>> underline_filter("headline")
"headline\\n========"
"""
return text + "\n" + "=" * len(text)
def rst_escape_filter(text):
"""Jinja2 filter escaping RST symbols in text
>>> rst_excape_filter("running `cmd.sh`")
"running \`cmd.sh\`"
"""
if text:
return rst_escape(text)
return text
def prefixes_filter(text, split):
"""Jinja2 filter"""
path = []
for part in text.split(split):
path.append(part)
yield {'path': split.join(path), 'part': part}
def rst_link_filter(text, url):
"""Jinja2 filter creating RST link
>>> rst_link_filter("bla", "https://somewhere")
"`bla <https://somewhere>`_"
"""
if url:
return "`{} <{}>`_".format(text, url)
return text
class Renderer:
"""Jinja2 template renderer
- Loads and caches templates from paths configured in conf.py
- Makes additional jinja filters available:
- underline -- turn text into a RSt level 1 headline
- escape -- escape RST special characters
- as_extlink -- convert (list of) identifiers to extlink references
"""
def __init__(self, app, extra_context):
template_loader = BuiltinTemplateLoader()
template_loader.init(app.builder)
template_env = SandboxedEnvironment(loader=template_loader)
template_env.filters['rst_escape'] = rst_escape_filter
template_env.filters['underline'] = underline_filter
template_env.filters['as_extlink'] = as_extlink_filter
template_env.filters['prefixes'] = prefixes_filter
template_env.filters['rst_link'] = rst_link_filter
self.env = template_env
self.templates: Dict[str, Any] = {}
self.extra_context = extra_context
def render(self, template_name, context):
"""Render a template file to string
Args:
template_name: Name of template file
context: dictionary to pass to jinja
"""
try:
template = self.templates[template_name]
except KeyError:
template = self.env.get_template(template_name)
self.templates[template_name] = template
return template.render(**context)
def render_to_file(self, file_name, template_name, context):
"""Render a template file to a file
Ensures that target directories exist and only writes
the file if the content has changed.
Args:
file_name: Target file name
template_name: Name of template file
context: dictionary to pass to jinja
Returns:
True if a file was written
"""
content = self.render(template_name, {**self.extra_context, **context})
# skip if exists and unchanged:
if os.path.exists(file_name):
with open(file_name, encoding="utf-8") as filedes:
if filedes.read() == content:
return False # unchanged
ensuredir(op.dirname(file_name))
with open(file_name, "w", encoding="utf-8") as filedes:
filedes.write(content)
return True
class RequirementsField(GroupedField):
"""Field Type for ``.. conda:package::`` for specifying dependencies
This does two things different than ``GroupedField``:
- No ``--`` inserted between argument and value
- Entry added to domain data ``backrefs`` so that we can
use the requirements to collect required-by data later.
"""
def make_field(self, types, domain, items, env=None, inliner=None, location=None):
fieldname = nodes.field_name('', self.label)
listnode = self.list_type()
for fieldarg, content in items:
par = nodes.paragraph()
par.extend(self.make_xrefs(self.rolename, domain, fieldarg,
addnodes.literal_strong, env=env,
inliner=inliner, location=location))
if content and content[0].astext():
par += nodes.Text(' ')
par += content
listnode += nodes.list_item('', par)
source = env.ref_context['conda:package']
backrefs = env.domains['conda'].data['backrefs'].setdefault(fieldarg, set())
backrefs.add((env.docname, source))
fieldbody = nodes.field_body('', listnode)
fieldbody.set_class('field-list-wrapped')
return nodes.field('', fieldname, fieldbody)
class RequiredByField(Field):
"""Field Type for directive ``.. conda:package::`` for showing required-by
This just creates the field name and field body with a ``pending_xref`` in the
body that will later be filled with the reverse dependencies by
`resolve_required_by_xrefs`
"""
def make_field(self, types, domain, items, env=None, inliner=None, location=None):
fieldname = nodes.field_name('', self.label)
backref = addnodes.pending_xref(
'',
refdomain="conda",
reftype='requiredby', refexplicit=False,
reftarget=env.ref_context['conda:package'],
refdoc=env.docname
)
backref += nodes.inline('', '')
fieldbody = nodes.field_body('', backref)
return nodes.field('', fieldname, fieldbody)
def resolve_required_by_xrefs(app, env, node, contnode):
"""Now that all recipes and packages have been parsed, we are called here
for each ``pending_xref`` node that sphinx has not been able to resolve.
We handle specifically the ``requiredby`` reftype created by the
`RequiredByField` fieldtype allowed in ``conda:package::``
directives, where we replace the ``pending_ref`` node with a bullet
list of reference nodes pointing to the package pages that
"depended" on the package.
"""
if node['reftype'] == 'requiredby' and node['refdomain'] == 'conda':
target = node['reftarget']
docname = node['refdoc']
backrefs = env.domains['conda'].data['backrefs'].get(target, set())
listnode = nodes.bullet_list()
for back_docname, back_target in backrefs:
par = nodes.paragraph()
name_node = addnodes.literal_strong(back_target, back_target,
classes=['xref', 'backref'])
refnode = make_refnode(app.builder, docname,
back_docname, back_target, name_node)
refnode.set_class('conda-package')
par += refnode
listnode += nodes.list_item('', par)
return listnode
class CondaObjectDescription(ObjectDescription):
"""Base class for ``ObjectDescription`` types in the `CondaDomain`"""
typename = "[UNKNOWN]"
option_spec = {
'arch': rst.directives.unchanged,
'badges': rst.directives.unchanged,
'replaces_section_title': rst.directives.flag,
'noindex': rst.directives.flag,
}
def handle_signature(self, sig: str, signode: addnodes.desc) -> str:
"""Transform signature into RST nodes"""
signode += addnodes.desc_annotation(self.typename, self.typename + " ")
signode += addnodes.desc_name(sig, sig)
if 'badges' in self.options:
badges = addnodes.desc_annotation()
badges['classes'] += ['badges']
content = StringList([self.options['badges']])
self.state.nested_parse(content, 0, badges)
signode += badges
if 'replaces_section_title' in self.options:
section = self.state.parent
if isinstance(section, nodes.section):
title = section[-1]
if isinstance(title, nodes.title):
section.remove(title)
else:
signode += self.state.document.reporter.warning(
"%s:%s:: must follow section directly to replace section title"
% (self.domain, self.objtype), line = self.lineno
)
else:
signode += self.state.document.reporter.warning(
"%s:%s:: must be in section to replace section title"
% (self.domain, self.objtype), line = self.lineno
)
return sig
def add_target_and_index(self, name: str, sig: str,
signodes: addnodes.desc) -> None:
"""Add to index and to domain data"""
target_name = "-".join((self.objtype, name))
if target_name not in self.state.document.ids:
signodes['names'].append(target_name)
signodes['ids'].append(target_name)
signodes['first'] = (not self.names)
self.state.document.note_explicit_target(signodes)
objects = self.env.domaindata[self.domain]['objects']
key = (self.objtype, name)
if key in objects:
if hasattr(self.env, 'warn'):
self.env.warn(
self.env.docname,
"Duplicate entry {} {} at {} (other in {})".format(
self.objtype, name, self.lineno,
self.env.doc2path(objects[key][0])))
objects[key] = (self.env.docname, target_name)
index_text = self.get_index_text(name)
if index_text:
self.indexnode['entries'].append(('single', index_text, target_name, '', None))
def get_index_text(self, name: str) -> str:
"""This yields the text with which the object is entered into the index."""
return "{} ({})".format(name, self.objtype)
def before_content(self):
"""We register ourselves in the ``ref_context`` so that a later
call to ``:depends:`packagename``` knows within which package
the dependency was added"""
self.env.ref_context['conda:'+self.typename] = self.names[-1]
class CondaRecipe(CondaObjectDescription):
"""Directive ``.. conda:recipe::`` describing a Recipe
"""
typename = "recipe"
class CondaPackage(CondaObjectDescription):
"""Directive ``.. conda:package::`` describing a Package
This directive takes two specialized field types, ``requirements``
and ``depends``:
.. code:: rst
.. conda:package:: mypkg1
:depends mypkg2: 2.0
:depends mypkg3:
:requirements:
``:depends pkgname: [version]``
Adds a dependency to the package.
``:requirements:``
Lists packages which referenced this package via ``:depends pkgname:``
"""
typename = "package"
doc_field_types = [
RequiredByField('requirements', names=('requirements',),
label=u'Required\u00a0By', has_arg=False),
RequirementsField('depends', names=('depends', 'dependencies', 'deps'),
label="Depends", rolename='depends'),
]
class PackageIndex(Index):
"""Index of Packages"""
name = "package_index"
localname = "Package Index"
shortname = "Packages"
def generate(self, docnames: Optional[List[str]] = None):
"""build index"""
content = {}
objects = sorted(self.domain.data['objects'].items())
for (typ, name), (docname, labelid) in objects:
if docnames and docname not in docnames:
continue
entries = content.setdefault(name[0].lower(), [])
subtype = 0 # 1 has subentries, 2 is subentry
entries.append((
# TODO: Add meaningful info for extra/qualifier/description
# fields, e.g., latest package version.
# name, subtype, docname, labelid, 'extra', 'qualifier', 'description',
name, subtype, docname, labelid, '', '', '',
))
collapse = True
return sorted(content.items()), collapse
class CondaDomain(Domain):
"""Domain for Conda Packages"""
name = "conda"
label = "Conda"
object_types = {
# ObjType(name, *roles, **attrs)
'recipe': ObjType('recipe', 'recipe'),
'package': ObjType('package', 'package', 'depends'),
}
directives = {
'recipe': CondaRecipe,
'package': CondaPackage,
}
roles = {
'recipe': XRefRole(),
'package': XRefRole(),
}
initial_data = {
'objects': {}, #: (type, name) -> docname, labelid
'backrefs': {} #: package_name -> docname, package_name
}
indices = [
PackageIndex
]
def clear_doc(self, docname: str):
"""Remove traces of a document in the domain-specific inventories."""
if 'objects' not in self.data:
return
to_remove = [
key for (key, (stored_docname, _)) in self.data['objects'].items()
if docname == stored_docname
]
for key in to_remove:
del self.data['objects'][key]
def resolve_any_xref(self, env: BuildEnvironment, fromdocname: str,
builder, target, node, contnode):
"""Resolve references from "any" role."""
res = self.resolve_xref(env, fromdocname, builder, 'package', target, node, contnode)
if res:
return [('conda:package', res)]
else:
return []
def resolve_xref(self, env: BuildEnvironment, fromdocname: str,
builder, role, target, node, contnode):
"""Resolve the ``pending_xref`` **node** with the given **role** and **target**."""
for objtype in self.objtypes_for_role(role) or []:
if (objtype, target) in self.data['objects']:
node = make_refnode(
builder, fromdocname,
self.data['objects'][objtype, target][0],
self.data['objects'][objtype, target][1],
contnode, target + ' ' + objtype)
node.set_class('conda-package')
return node
if objtype == "package":
for channel, urlformat in env.app.config.bioconda_other_channels.items():
if RepoData().get_package_data(channels=channel, name=target):
uri = urlformat.format(target)
node = nodes.reference('', '', internal=False,
refuri=uri, classes=[channel])
node += contnode
return node
return None # triggers missing-reference
def get_objects(self):
"""Yields "object description" 5-tuples
``name``: fully qualified name
``dispname``: name to display when searching/linking
``type``: object type, a key in ``self.object_types``
``docname``: the document where it is to be found
``anchor``: the anchor name for the object
``priority``: search priority
- 1: default priority (placed before full-text matches)
- 0: object is important (placed before default-priority objects)
- 2: object is unimportant (placed after full-text matches)
- -1: object should not show up in search at all
"""
for (typ, name), (docname, ref) in self.data['objects'].items():
dispname = "{} '{}'".format(typ, name)
yield name, dispname, typ, docname, ref, 1
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
"""Merge in data regarding *docnames* from a different domaindata
inventory (coming from a subprocess in parallel builds).
"""
for (typ, name), (docname, ref) in otherdata['objects'].items():
if docname in docnames:
self.data['objects'][typ, name] = (docname, ref)
# broken?
#for key, data in otherdata['backrefs'].items():
# if docname in docnames:
# xdata = self.data['backrefs'].setdefault(key, set())
# xdata |= data
class AutoRecipesDirective(rst.Directive):
"""FIXME: This does not yet do ANYTHING!
In theory, a directive like this should act as a hook for a repo
to generate stubs for, similar to other autoXYZ directives.
"""
required_arguments = 0
optional_argument = 0
option_spec = {
'repo': rst.directives.unchanged,
'folder': rst.directives.unchanged,
'config': rst.directives.unchanged,
}
has_content = False
def run(self):
#self.env: BuildEnvironment = self.state.document.settings.env
return [nodes.paragraph('')]
def generate_readme(recipe_basedir, output_dir, folder, repodata, renderer):
"""Generates README.rst for the recipe in folder
Args:
folder: Toplevel folder name in recipes directory
repodata: RepoData object
renderer: Renderer object
Returns:
List of template_options for each concurrent version for
which meta.yaml files exist in the recipe folder and its
subfolders
"""
output_file = op.join(output_dir, folder, 'README.rst')
# Select meta yaml
meta_fname = op.join(recipe_basedir, folder, 'meta.yaml')
if not op.exists(meta_fname):
for item in os.listdir(op.join(recipe_basedir, folder)):
dname = op.join(recipe_basedir, folder, item)
if op.isdir(dname):
fname = op.join(dname, 'meta.yaml')
if op.exists(fname):
meta_fname = fname
break
else:
logger.error("No 'meta.yaml' found in %s", folder)
return []
meta_relpath = meta_fname[len(recipe_basedir)+1:]
# Read the meta.yaml file(s)
try:
recipe = Recipe.from_file(recipe_basedir, meta_fname)
except RecipeError as e:
logger.error("Unable to process %s: %s", meta_fname, e)
return []
# Format the README
packages = []
for package in sorted(list(set(recipe.package_names))):
versions_in_channel = set(repodata.get_package_data(['version', 'build_number'],
channels='bioconda', name=package))
sorted_versions = sorted(versions_in_channel,
key=lambda x: (VersionOrder(x[0]), x[1]),
reverse=True)
if sorted_versions:
depends = [
depstring.split(' ', 1) if ' ' in depstring else (depstring, '')
for depstring in
repodata.get_package_data('depends', name=package,
version=sorted_versions[0][0],
build_number=sorted_versions[0][1],
)[0]
]
else:
depends = []
packages.append({
'name': package,
'versions': ['-'.join(str(w) for w in v) for v in sorted_versions],
'depends' : depends,
})
template_options = {
'name': recipe.name,
'about': recipe.get('about', None),
'extra': recipe.get('extra', None),
'recipe': recipe,
'packages': packages,
}
renderer.render_to_file(output_file, 'readme.rst_t', template_options)
return [output_file]
def generate_recipes(app):
"""Generates recipe RST files
- Checks out repository
- Prepares `RepoData`
- Selects recipes (if `BIOCONDA_FILTER_RECIPES` in environment)
- Dispatches calls to `generate_readme` for each recipe
- Removes old RST files
"""
source_dir = app.env.srcdir
doctree_dir = app.env.doctreedir # .../build/doctrees
repo_dir = op.join(op.dirname(app.env.srcdir), "_bioconda_recipes")
recipe_basedir = op.join(repo_dir, app.config.bioconda_recipes_path)
repodata_cache_file = op.join(doctree_dir, 'RepoDataCache.pkl')
repo_config_file = os.path.join(repo_dir, app.config.bioconda_config_file)
output_dir = op.join(source_dir, 'recipes')
# Initialize Repo and point globals at the right place
repo = BiocondaRepo(folder=repo_dir, home=app.config.bioconda_repo_url)
repo.checkout_master()
load_config(repo_config_file)
logger.info("Preloading RepoData")
repodata = RepoData()
repodata.set_cache(repodata_cache_file)
repodata.df # pylint: disable=pointless-statement
logger.info("Preloading RepoData (done)")
# Collect recipe names
recipe_dirs = os.listdir(recipe_basedir)
if 'BIOCONDA_FILTER_RECIPES' in os.environ:
limiter = os.environ['BIOCONDA_FILTER_RECIPES']
try:
recipe_dirs = recipe_dirs[:int(limiter)]
except ValueError:
match = re.compile(limiter)
recipe_dirs = [recipe for recipe in recipe_dirs
if match.search(recipe)]
# Set up renderer preparing recipe readme.rst files
recipe_base_url = "{base}/tree/master/{recipes}/".format(
base=app.config.bioconda_repo_url.rstrip(".git"),
recipes=app.config.bioconda_recipes_path
)
renderer = Renderer(app, {'gh_recipes': recipe_base_url})
recipes: List[str] = []
if parallel_available and len(recipe_dirs) > 5:
nproc = app.parallel
else:
nproc = 1
if nproc == 1:
for folder in status_iterator(
recipe_dirs,
'Generating package READMEs...',
"purple", len(recipe_dirs), app.verbosity):
if not op.isdir(op.join(recipe_basedir, folder)):
logger.error("Item '%s' in recipes folder is not a folder",
folder)
continue
recipes.extend(generate_readme(recipe_basedir, output_dir, folder, repodata, renderer))
else:
tasks = ParallelTasks(nproc)
chunks = make_chunks(recipe_dirs, nproc)
def process_chunk(chunk):
_recipes: List[Dict[str, Any]] = []
for folder in chunk:
if not op.isdir(op.join(recipe_basedir, folder)):
logger.error("Item '%s' in recipes folder is not a folder",
folder)
continue
_recipes.extend(generate_readme(recipe_basedir, output_dir, folder, repodata, renderer))
return _recipes
def merge_chunk(_chunk, res):
recipes.extend(res)
for chunk in status_iterator(
chunks,
'Generating package READMEs with {} threads...'.format(nproc),
"purple", len(chunks), app.verbosity):
tasks.add_task(process_chunk, chunk, merge_chunk)
logger.info("waiting for workers...")
tasks.join()
files_wanted = set(recipes)
for root, dirs, files in os.walk(output_dir, topdown=False):
for fname in files:
path = op.join(root, fname)
if path not in files_wanted:
os.unlink(path)
for dname in dirs:
try:
os.rmdir(op.join(root, dname))
except OSError:
pass
def add_ribbon(app, pagename, templatename, context, doctree):
"""Adds "Edit me on GitHub" Ribbon to pages
This hooks into ``html-page-context`` event and adds the parameters
``git_ribbon_url`` and ``git_ribbon_message`` to the context from
which the HTML templates (``layout.html``) are expanded.
It understands three types of pages:
- ``_autosummary`` and ``_modules`` prefixed pages are assumed to
be code and link to the ``bioconda-utils`` repo
- ``recipes/*/README`` pages are assumed to be recipes and link
to the ``meta.yaml``
- all others are assumed to be RST docs and link to the ``docs/source/``
folder in ``bioconda-utils``
TODO:
Fix hardcoding of values, should be a mapping that comes from
``conf.py``.
"""
if templatename != 'page.html':
return
if pagename.startswith('_autosummary') or pagename.startswith('_modules'):
_, _, path = pagename.partition('/')
path = path.replace('.', '/') + '.py'
repo = 'bioconda-utils'
elif pagename.startswith('recipes/') and pagename.endswith('/README'):
repo = 'bioconda-recipes'
path = pagename[:-len('README')] + 'meta.yaml'
else:
repo = 'bioconda-utils'
path = 'docs/source/' + os.path.relpath(doctree.get('source'), app.builder.srcdir)
context['git_ribbon_url'] = (f'https://github.com/bioconda/{repo}/'
f'edit/master/{path}')
context['git_ribbon_message'] = "Edit me on GitHub"
class LintDescriptionDirective(SphinxDirective):
required_arguments = 1
optional_argument = 0
has_content = True
add_index = True
def run(self):
if not hasattr(self.env, 'bioconda_lint_checks'):
self.env.bioconda_lint_checks = {str(check): check for check in get_checks()}
# gather data
check_name = self.arguments[0]
if check_name not in self.env.bioconda_lint_checks:
self.error("Duplicate lint description")
check = self.env.bioconda_lint_checks.pop(check_name)
_, lineno = inspect.getsourcelines(check)
lineno += 1
fname = inspect.getfile(check)
doclines = inspect.getdoc(check).splitlines()
docline_src = [(fname, i)
for i in range(lineno, lineno+len(doclines))]
lines = StringList(doclines, items=docline_src)
# create a new section with title
section = nodes.section(ids=[nodes.make_id(check_name)])
title_text = f'":py:class:`{check_name}`"'
title_nodes, messages = self.state.inline_text(title_text, self.lineno)
title = nodes.title(check_name, '', *title_nodes)
section += title
admonition = nodes.admonition()
title_text = doclines[0].rstrip('.')
title_nodes, messages = self.state.inline_text(title_text, lineno)
title = nodes.title(title_text, '', *title_nodes)
admonition += title
admonition += messages
self.state.nested_parse(lines[1:], 0, admonition)
section += admonition
# add remaining content of directive
par = nodes.paragraph()
self.state.nested_parse(self.content, self.content_offset, par)
section += par
return [section]
@classmethod
def finalize(cls, app, env):
# TODO: Check why 'bioconda_lint_checks' is not added to env in
# parallel runs (i.e., raises AttributeError without getattr).
for check in getattr(env, 'bioconda_lint_checks', {}):
logger.error("Undocumented lint checks: %s", check)
def setup(app):
"""Set up sphinx extension"""
app.add_domain(CondaDomain)
app.add_directive('autorecipes', AutoRecipesDirective)
app.add_directive('lint-check', LintDescriptionDirective)
app.connect('builder-inited', generate_recipes)
app.connect('env-updated', LintDescriptionDirective.finalize)
app.connect('missing-reference', resolve_required_by_xrefs)
app.connect('html-page-context', add_ribbon)
app.add_config_value('bioconda_repo_url', '', 'env')
app.add_config_value('bioconda_recipes_path', 'recipes', 'env')
app.add_config_value('bioconda_config_file', 'config.yml', 'env')
app.add_config_value('bioconda_other_channels', {}, 'env')
return {
'version': "0.0.1",
'parallel_read_safe': True,
'parallel_write_safe': True
}
| |
import json
import logging
import os
import shutil
import sys
import time
import warnings
# Dropping a table inexplicably produces a warning despite
# the "IF EXISTS" clause. Squelch these warnings.
warnings.simplefilter("ignore")
import MySQLdb
import utils
tablet_cell_map = {
62344: 'nj',
62044: 'nj',
41983: 'nj',
31981: 'ny',
}
class Tablet(object):
default_uid = 62344
seq = 0
tablets_running = 0
default_db_config = {
"app": {
"uname": "vt_app",
"charset": "utf8"
},
"dba": {
"uname": "vt_dba",
"charset": "utf8"
},
"repl": {
"uname": "vt_repl",
"charset": "utf8"
}
}
def __init__(self, tablet_uid=None, port=None, mysql_port=None, cell=None):
self.tablet_uid = tablet_uid or (Tablet.default_uid + Tablet.seq)
self.port = port or (utils.reserve_ports(1))
self.mysql_port = mysql_port or (utils.reserve_ports(1))
Tablet.seq += 1
if cell:
self.cell = cell
else:
self.cell = tablet_cell_map.get(tablet_uid, 'nj')
self.proc = None
# filled in during init_tablet
self.keyspace = None
self.shard = None
# utility variables
self.tablet_alias = 'test_%s-%010d' % (self.cell, self.tablet_uid)
self.zk_tablet_path = '/zk/test_%s/vt/tablets/%010d' % (self.cell, self.tablet_uid)
self.zk_pid = self.zk_tablet_path + '/pid'
def mysqlctl(self, cmd, quiet=False, extra_my_cnf=None):
utils.prog_compile(['mysqlctl'])
env = None
if extra_my_cnf:
env = os.environ.copy()
env['EXTRA_MY_CNF'] = extra_my_cnf
return utils.run_bg(os.path.join(utils.vtroot, 'bin', 'mysqlctl') +
' -log_dir %s -tablet-uid %u %s' %
(utils.tmp_root, self.tablet_uid, cmd),
env=env)
def init_mysql(self, extra_my_cnf=None):
return self.mysqlctl('-port %u -mysql-port %u init' % (self.port, self.mysql_port), quiet=True, extra_my_cnf=extra_my_cnf)
def start_mysql(self):
return self.mysqlctl('-port %u -mysql-port %u start' % (self.port, self.mysql_port), quiet=True)
def shutdown_mysql(self):
return self.mysqlctl('-port %u -mysql-port %u shutdown' % (self.port, self.mysql_port), quiet=True)
def teardown_mysql(self):
return self.mysqlctl('teardown -force', quiet=True)
def remove_tree(self):
path = '%s/vt_%010d' % (utils.vtdataroot, self.tablet_uid)
try:
shutil.rmtree(path)
except OSError as e:
if utils.options.verbose == 2:
print >> sys.stderr, e, path
def mysql_connection_parameters(self, dbname, user='vt_dba'):
return dict(user=user,
unix_socket='%s/vt_%010d/mysql.sock' % (utils.vtdataroot, self.tablet_uid),
db=dbname)
def connect(self, dbname='', user='vt_dba'):
conn = MySQLdb.Connect(
**self.mysql_connection_parameters(dbname, user))
return conn, conn.cursor()
# Query the MySQL instance directly
def mquery(self, dbname, query, write=False, user='vt_dba'):
conn, cursor = self.connect(dbname, user=user)
if write:
conn.begin()
if isinstance(query, basestring):
query = [query]
for q in query:
# logging.debug("mysql(%s,%s): %s", self.tablet_uid, dbname, q)
cursor.execute(q)
if write:
conn.commit()
try:
return cursor.fetchall()
finally:
conn.close()
# path is either:
# - keyspace/shard for vttablet and vttablet-streaming
# - zk path for vtdb, vtdb-streaming
def vquery(self, query, path='', user=None, password=None, driver=None,
verbose=False, raise_on_error=True):
return utils.vtclient2(self.port, path, query, user=user,
password=password, driver=driver,
verbose=verbose, raise_on_error=raise_on_error)
def assert_table_count(self, dbname, table, n, where=''):
result = self.mquery(dbname, 'select count(*) from ' + table + ' ' + where)
if result[0][0] != n:
raise utils.TestError("expected %u rows in %s" % (n, table), result)
def reset_replication(self):
self.mquery('', [
'RESET MASTER',
'STOP SLAVE',
'RESET SLAVE',
'CHANGE MASTER TO MASTER_HOST = ""',
])
def populate(self, dbname, create_sql, insert_sqls=[]):
self.create_db(dbname)
if isinstance(create_sql, basestring):
create_sql= [create_sql]
for q in create_sql:
self.mquery(dbname, q)
for q in insert_sqls:
self.mquery(dbname, q, write=True)
def has_db(self, name):
rows = self.mquery('', 'show databases')
for row in rows:
dbname = row[0]
if dbname == name:
return True
return False
def drop_db(self, name):
self.mquery('', 'drop database if exists %s' % name)
while self.has_db(name):
logging.debug("%s sleeping while waiting for database drop: %s",
self.tablet_alias, name)
time.sleep(0.3)
self.mquery('', 'drop database if exists %s' % name)
def create_db(self, name):
self.drop_db(name)
self.mquery('', 'create database %s' % name)
def clean_dbs(self):
logging.debug("mysql(%s): removing all databases", self.tablet_uid)
rows = self.mquery('', 'show databases')
for row in rows:
dbname = row[0]
if dbname in ['information_schema', '_vt', 'mysql']:
continue
self.drop_db(dbname)
def wait_check_db_var(self, name, value):
for _ in range(3):
try:
return self.check_db_var(name, value)
except utils.TestError as e:
print >> sys.stderr, 'WARNING: ', e
time.sleep(1.0)
raise e
def check_db_var(self, name, value):
row = self.get_db_var(name)
if row != (name, value):
raise utils.TestError('variable not set correctly', name, row)
def get_db_var(self, name):
conn, cursor = self.connect()
try:
cursor.execute("show variables like '%s'" % name)
return cursor.fetchone()
finally:
conn.close()
def update_addrs(self, addr=None, secure_addr=None, mysql_addr=None, mysql_ip_addr=None):
args = ['UpdateTabletAddrs']
if addr:
args.extend(['-addr', addr])
if secure_addr:
args.extend(['-secure-addr', secure_addr])
if mysql_addr:
args.extend(['-mysql-addr', mysql_addr])
if mysql_ip_addr:
args.extend(['-mysql-ip-addr', mysql_ip_addr])
args.append(self.tablet_alias)
if len(args) < 3:
return None, None
return utils.run_vtctl(args)
def scrap(self, force=False, skip_rebuild=False):
args = ['ScrapTablet']
if force:
args.append("-force")
if skip_rebuild:
args.append("-skip-rebuild")
args.append(self.tablet_alias)
utils.run_vtctl(args, auto_log=True)
def init_tablet(self, tablet_type, keyspace=None, shard=None, force=True, start=False, dbname=None, parent=True, **kwargs):
self.keyspace = keyspace
self.shard = shard
if dbname is None:
self.dbname = "vt_" + (self.keyspace or "database")
else:
self.dbname = dbname
args = ['InitTablet']
if force:
args.append('-force')
if parent:
args.append('-parent')
if dbname:
args.append('-db-name-override='+dbname)
args.extend([self.tablet_alias,
'localhost',
str(self.mysql_port),
str(self.port)])
if keyspace:
args.append(keyspace)
else:
args.append('')
if shard:
args.append(shard)
else:
args.append('')
args.append(tablet_type)
utils.run_vtctl(args)
if start:
if tablet_type == 'master' or tablet_type == 'replica' or tablet_type == 'rdonly' or tablet_type == 'batch':
expected_state = "SERVING"
else:
expected_state = "NOT_SERVING"
self.start_vttablet(wait_for_state=expected_state, **kwargs)
@property
def tablet_dir(self):
return "%s/vt_%010d" % (utils.vtdataroot, self.tablet_uid)
def flush(self):
utils.run(['curl', '-s', '-N', 'http://localhost:%s/debug/flushlogs' % (self.port)], stderr=utils.devnull, stdout=utils.devnull)
def start_vttablet(self, port=None, auth=False, memcache=False, wait_for_state="SERVING", customrules=None, schema_override=None, cert=None, key=None, ca_cert=None, repl_extra_flags={}):
"""
Starts a vttablet process, and returns it.
The process is also saved in self.proc, so it's easy to kill as well.
"""
utils.prog_compile(['vtaction',
'vttablet',
])
args = [os.path.join(utils.vtroot, 'bin', 'vttablet'),
'-port', '%s' % (port or self.port),
'-tablet-path', self.tablet_alias,
'-log_dir', self.tablet_dir,
'-db-configs-file', self._write_db_configs_file(repl_extra_flags)]
if memcache:
memcache = os.path.join(self.tablet_dir, "memcache.sock")
config = os.path.join(self.tablet_dir, "config.json")
with open(config, 'w') as f:
json.dump({"RowCache": ["memcached", "-s", memcache]}, f)
args.extend(["-queryserver-config-file", config])
if auth:
args.extend(['-auth-credentials', os.path.join(utils.vttop, 'test', 'test_data', 'authcredentials_test.json')])
if customrules:
args.extend(['-customrules', customrules])
if schema_override:
args.extend(['-schema-override', schema_override])
if cert:
self.secure_port = utils.reserve_ports(1)
args.extend(['-secure-port', '%s' % self.secure_port,
'-cert', cert,
'-key', key])
if ca_cert:
args.extend(['-ca-cert', ca_cert])
stderr_fd = open(os.path.join(self.tablet_dir, "vttablet.stderr"), "w")
# increment count only the first time
if not self.proc:
Tablet.tablets_running += 1
self.proc = utils.run_bg(args, stderr=stderr_fd)
stderr_fd.close()
# wait for zookeeper PID just to be sure we have it
utils.run(utils.vtroot+'/bin/zk wait -e ' + self.zk_pid, stdout=utils.devnull)
# wait for query service to be in the right state
self.wait_for_vttablet_state(wait_for_state, port=port)
return self.proc
def wait_for_vttablet_state(self, expected, timeout=5.0, port=None):
while True:
v = utils.get_vars(port or self.port)
if v == None:
logging.debug(" vttablet %s not answering at /debug/vars, waiting...", self.tablet_alias)
else:
if 'Voltron' not in v:
logging.debug(" vttablet %s not exporting Voltron, waiting...", self.tablet_alias)
else:
s = v['Voltron']['States']['Current']
if s != expected:
logging.debug(" vttablet %s in state %s != %s", self.tablet_alias, s, expected)
else:
break
logging.debug("sleeping a bit while we wait")
time.sleep(0.1)
timeout -= 0.1
if timeout <= 0:
raise utils.TestError("timeout waiting for state %s" % expected)
def _write_db_configs_file(self, repl_extra_flags={}):
config = dict(self.default_db_config)
if self.keyspace:
config['app']['dbname'] = self.dbname
config['dba']['dbname'] = self.dbname
config['repl']['dbname'] = self.dbname
config['repl'].update(repl_extra_flags)
path = os.path.join(self.tablet_dir, 'db-configs.json')
with open(path, 'w') as fi:
json.dump(config, fi)
return path
def kill_vttablet(self):
logging.debug("killing vttablet: %s", self.tablet_alias)
if self.proc is not None:
Tablet.tablets_running -= 1
self.proc.terminate()
self.proc.wait()
self.proc = None
@classmethod
def check_vttablet_count(klass):
if Tablet.tablets_running > 0:
raise utils.TestError("This test is not killing all its vttablets")
| |
'''
Created on 12 Aug 2017
@author: Mathias Bucher
'''
from Tkinter import Frame
from PIL import Image, ImageTk
import io
from VStyles import rootColor, getLargeTextBlue, getLabelBlue, getImageLabel, getLargeText, getSmallText, getLabel, getFrame, getMenu
class VEntry(Frame):
'''
Shows an entry and allows changes on it.
'''
width10=50
width15=32
imageSize=100,100
tagPrompt = "right click here and add tags"
tagEnterPrompt = "enter tag"
imagePrompt = "right click here and add images"
filePrompt = "right click here and add files"
def __init__(self, parent, log, actions):
'''
Constructor
'''
Frame.__init__(self, parent)
self.configure(bg=rootColor)
self.log = log
self.actions = actions
self.log.add(self.log.Info, __file__, "init" )
def drawEntry(self, entry):
'''Draws an entry. If the entry is None, it does nothing'''
if entry != None:
self.nameText = getLargeText(self, entry.name)
self.nameText.grid(row=0, column=0)
self.nameText.bind( "<Return>", self.returnPressedInTextFields)
self.description = getSmallText(self, entry.description)
self.description.grid(row=1, column=0)
self.description.bind( "<Return>", self.returnPressedInTextFields)
self.tagRightClickMenu = getMenu(self)
self.tagRightClickMenu.add_command(label="new",
command=self.newTagClicked)
self.tagRightClickMenu.add_command(label="delete",
command=self.deleteTagClicked)
self.tags = getFrame(self)
# if there are no tags, place label which prompts user to enter some
if entry.tags.__len__() == 0:
prompt = getLabelBlue(self.tags, text=self.tagPrompt)
prompt.grid(row=2, column=0)
prompt.bind("<Button-3>", self.showTagRightClickMenu)
else:
for i, key in enumerate(entry.tags):
keyLabel = getLabelBlue(self.tags, text=key)
keyLabel.grid(row=2, column=i)
keyLabel.bind("<Button-3>", self.showTagRightClickMenu)
self.tags.grid()
self.imageRightClickMenu = getMenu(self)
self.imageRightClickMenu.add_command(label="new",
command=self.newImageClicked)
self.imageRightClickMenu.add_command(label="delete",
command=self.deleteImageClicked)
self.images = getFrame(self)
# if there are no images, place label which prompts user to enter some
if entry.images.__len__() == 0:
prompt = getLabelBlue(self.images, text=self.imagePrompt)
prompt.grid(row=3, column=0)
prompt.bind("<Button-3>", self.showImageRightClickMenu)
else:
for i, (key, _content) in enumerate(entry.images.iteritems()):
iobytes = io.BytesIO(_content)
img = Image.open(iobytes)
img.thumbnail(self.imageSize, Image.ANTIALIAS )
photoimg = ImageTk.PhotoImage(img)
imgLabel = getImageLabel(self.images, image=photoimg)
imgLabel["text"] = key
imgLabel.image = photoimg
imgLabel.grid(row=3, column=i)
imgLabel.bind("<Button-1>", self.fileLeftClicked)
imgLabel.bind("<Button-3>", self.showImageRightClickMenu)
self.images.grid()
self.fileRightClickMenu = getMenu(self)
self.fileRightClickMenu.add_command(label="new",
command=self.newFileClicked)
self.fileRightClickMenu.add_command(label="delete",
command=self.deleteFileClicked)
self.files = getFrame(self)
# if there are no files, place label which prompts user to enter some
if entry.files.__len__() == 0:
prompt = getLabelBlue(self.files, text=self.filePrompt)
prompt.grid(row=4, column=0)
prompt.bind("<Button-3>", self.showFilesRightClickMenu)
else:
for i, (key, _content) in enumerate(entry.files.iteritems()):
lbl = getLabelBlue(self.files, key)
lbl.grid(row=4, column=i)
lbl.bind("<Button-1>", self.fileLeftClicked)
lbl.bind("<Button-3>", self.showFilesRightClickMenu)
self.files.grid()
self.log.add(self.log.Info, __file__, "entry " + entry.name + " drawn" )
def returnPressedInTextFields(self, _event):
'''Is called when user hits Return key while writing in name field'''
name = self.nameText.get("1.0", 'end-1c')
description = self.description.get("1.0", 'end-1c')
self.log.add(self.log.Info, __file__, "name change: " + name)
if self.actions != None:
if "entryChangeAction" in self.actions:
self.actions["entryChangeAction"](name, description)
def getName(self):
'''Returns the name of the displayed entry'''
return self.nameText.get("1.0", 'end-1c')
def showTagRightClickMenu(self, event):
'''This menu appears if user right clicks on a tag'''
try:
self.clickedTag = event.widget
self.tagRightClickMenu.tk_popup(event.x_root, event.y_root+20, 0)
finally:
self.tagRightClickMenu.grab_release()
def newTagClicked(self):
'''Is called when user right clicks on a tag and selects new'''
self.log.add(self.log.Info, __file__, "new tag clicked" )
# remove tag enter prompt
if self.tags.winfo_children()[0]["text"] == self.tagPrompt:
self.tags.winfo_children()[0].destroy()
# add text widget for entering new tag
self.newTagText = getLargeTextBlue(self.tags, self.tagEnterPrompt)
self.newTagText.grid(row=3, column=0)
self.newTagText.bind( "<Return>", self.returnPressedAtNewTag)
self.tags.grid()
def deleteTagClicked(self):
'''Is called when user right clicks on a tag and selects new'''
self.log.add(self.log.Info, __file__, "delete tag clicked" )
# only call delete action if there is a tag
if (self.tags.winfo_children().__len__() == 1 and
self.tags.winfo_children()[0]["text"] == self.tagPrompt):
return
else:
tagToDelete = self.clickedTag["text"]
if self.actions != None:
if "deleteTagAction" in self.actions:
self.actions["deleteTagAction"](tagToDelete)
def returnPressedAtNewTag(self, _event):
'''Is called when user hits Return key while adding a new tag'''
newTag = self.newTagText.get("1.0", 'end-1c')
self.log.add(self.log.Info, __file__, "new tag: " + newTag)
if self.actions != None:
if "addTagAction" in self.actions:
self.actions["addTagAction"](newTag)
def showImageRightClickMenu(self, event):
'''Tries to show the right click menu'''
try:
self.clickedImage = event.widget
self.imageRightClickMenu.tk_popup(event.x_root, event.y_root+20, 0)
finally:
self.imageRightClickMenu.grab_release()
def deleteImageClicked(self):
'''Is called when user right clicks on an image and selects delete'''
self.log.add(self.log.Info, __file__, "delete image clicked" )
# only call delete action if there is an image
if (self.images.winfo_children().__len__() == 1 and
self.images.winfo_children()[0]["text"] == self.imagePrompt):
return
else:
imageToDelete = self.clickedImage["text"]
if self.actions != None:
if "deleteImageAction" in self.actions:
self.actions["deleteImageAction"](imageToDelete)
def newImageClicked(self):
'''Is called when user right clicks on an image and selects new'''
self.log.add(self.log.Info, __file__, "new image clicked" )
# remove enter image prompt
if self.images.winfo_children()[0]["text"] == self.imagePrompt:
self.images.winfo_children()[0].destroy()
if self.actions != None:
if "newImageAction" in self.actions:
self.actions["newImageAction"]()
def showFilesRightClickMenu(self, event):
'''Tries to show the right click menu'''
try:
self.clickedFile = event.widget
self.fileRightClickMenu.tk_popup(event.x_root, event.y_root+20, 0)
finally:
self.fileRightClickMenu.grab_release()
def deleteFileClicked(self):
'''Is called when user right clicks on a file and selects delete'''
self.log.add(self.log.Info, __file__, "delete file clicked" )
# only call delete action if there is a file
if (self.files.winfo_children().__len__() == 1 and
self.files.winfo_children()[0]["text"] == self.filePrompt):
return
else:
fileToDelete = self.clickedFile["text"]
if self.actions != None:
if "deleteFileAction" in self.actions:
self.actions["deleteFileAction"](fileToDelete)
def newFileClicked(self):
'''Is called when user right clicks on a file and selects new'''
self.log.add(self.log.Info, __file__, "new file clicked" )
# remove enter file prompt
if self.files.winfo_children()[0]["text"] == self.filePrompt:
self.files.winfo_children()[0].destroy()
if self.actions != None:
if "newFileAction" in self.actions:
self.actions["newFileAction"]()
def fileLeftClicked(self, event):
'''Is called when user left clicks a file'''
self.log.add(self.log.Info, __file__, "file left clicked" )
if self.actions != None:
if "openFileAction" in self.actions:
self.actions["openFileAction"](event.widget["text"])
| |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# dust - Waqas Bhatti (wbhatti@astro.princeton.edu) - Dec 2017
# License: MIT. See the LICENSE file for more details.
'''
This gets extinction tables from the the 2MASS DUST service at:
http://irsa.ipac.caltech.edu/applications/DUST/
If you use this, please cite the SF11 and SFD98 papers and acknowledge the use
of 2MASS/IPAC services.
- http://www.adsabs.harvard.edu/abs/1998ApJ...500..525S
- http://www.adsabs.harvard.edu/abs/2011ApJ...737..103S
Also see:
http://irsa.ipac.caltech.edu/applications/DUST/docs/background.html
'''
#############
## LOGGING ##
#############
import logging
from astrobase import log_sub, log_fmt, log_date_fmt
DEBUG = False
if DEBUG:
level = logging.DEBUG
else:
level = logging.INFO
LOGGER = logging.getLogger(__name__)
logging.basicConfig(
level=level,
style=log_sub,
format=log_fmt,
datefmt=log_date_fmt,
)
LOGDEBUG = LOGGER.debug
LOGINFO = LOGGER.info
LOGWARNING = LOGGER.warning
LOGERROR = LOGGER.error
LOGEXCEPTION = LOGGER.exception
#############
## IMPORTS ##
#############
import os
import os.path
import hashlib
import re
import random
import time
import numpy as np
# to do the queries
import requests
import requests.exceptions
# to read IPAC tables
from astropy.table import Table
##############################
## 2MASS DUST FORM SETTINGS ##
##############################
DUST_URL = 'https://irsa.ipac.caltech.edu/cgi-bin/DUST/nph-dust'
DUST_PARAMS = {'locstr':'',
'regSize':'5.0'}
DUST_REGEX = re.compile(r'http[s|]://\S*extinction\.tbl')
################################
## 2MASS DUST QUERY FUNCTIONS ##
################################
def extinction_query(lon, lat,
coordtype='equatorial',
sizedeg=5.0,
forcefetch=False,
cachedir='~/.astrobase/dust-cache',
verbose=True,
timeout=10.0,
jitter=5.0):
'''This queries the 2MASS DUST service to find the extinction parameters
for the given `lon`, `lat`.
Parameters
----------
lon,lat: float
These are decimal right ascension and declination if `coordtype =
'equatorial'`. These are are decimal Galactic longitude and latitude if
`coordtype = 'galactic'`.
coordtype : {'equatorial','galactic'}
Sets the type of coordinates passed in as `lon`, `lat`.
sizedeg : float
This is the width of the image returned by the DUST service. This can
usually be left as-is if you're interested in the extinction only.
forcefetch : bool
If this is True, the query will be retried even if cached results for
it exist.
cachedir : str
This points to the directory where results will be downloaded.
verbose : bool
If True, will indicate progress and warn of any issues.
timeout : float
This sets the amount of time in seconds to wait for the service to
respond to our request.
jitter : float
This is used to control the scale of the random wait in seconds before
starting the query. Useful in parallelized situations.
Returns
-------
dict
A dict of the following form is returned::
{'Amag':{dict of extinction A_v values for several mag systems},
'table': array containing the full extinction table,
'tablefile': the path to the full extinction table file on disk,
'provenance': 'cached' or 'new download',
'request': string repr of the request made to 2MASS DUST}
'''
dustparams = DUST_PARAMS.copy()
# convert the lon, lat to the required format
# and generate the param dict
if coordtype == 'equatorial':
locstr = '%.3f %.3f Equ J2000' % (lon, lat)
elif coordtype == 'galactic':
locstr = '%.3f %.3f gal' % (lon, lat)
else:
LOGERROR('unknown coordinate type: %s' % coordtype)
return None
dustparams['locstr'] = locstr
dustparams['regSize'] = '%.3f' % sizedeg
# see if the cachedir exists
if '~' in cachedir:
cachedir = os.path.expanduser(cachedir)
if not os.path.exists(cachedir):
os.makedirs(cachedir)
# generate the cachekey and cache filename
cachekey = '%s - %.1f' % (locstr, sizedeg)
cachekey = hashlib.sha256(cachekey.encode()).hexdigest()
cachefname = os.path.join(cachedir, '%s.txt' % cachekey)
provenance = 'cache'
# if this does not exist in cache or if we're forcefetching, do the query
if forcefetch or (not os.path.exists(cachefname)):
time.sleep(random.randint(1,jitter))
provenance = 'new download'
try:
if verbose:
LOGINFO('submitting 2MASS DUST request for '
'lon = %.3f, lat = %.3f, type = %s, size = %.1f' %
(lon, lat, coordtype, sizedeg))
req = requests.get(DUST_URL, dustparams, timeout=timeout)
req.raise_for_status()
resp = req.text
# see if we got an extinction table URL in the response
tableurl = DUST_REGEX.search(resp)
# if we did, download it to the cache directory
if tableurl:
tableurl = tableurl.group(0)
req2 = requests.get(tableurl, timeout=timeout)
# write the table to the cache directory
with open(cachefname,'wb') as outfd:
outfd.write(req2.content)
tablefname = cachefname
else:
LOGERROR('could not get extinction parameters for '
'%s (%.3f, %.3f) with size = %.1f' % (coordtype,
lon,lat,sizedeg))
LOGERROR('error from DUST service follows:\n%s' % resp)
return None
except requests.exceptions.Timeout:
LOGERROR('DUST request timed out for '
'%s (%.3f, %.3f) with size = %.1f' % (coordtype,
lon,lat,sizedeg))
return None
except Exception:
LOGEXCEPTION('DUST request failed for '
'%s (%.3f, %.3f) with size = %.1f' % (coordtype,
lon,lat,sizedeg))
return None
# if this result is available in the cache, get it from there
else:
if verbose:
LOGINFO('getting cached 2MASS DUST result for '
'lon = %.3f, lat = %.3f, coordtype = %s, size = %.1f' %
(lon, lat, coordtype, sizedeg))
tablefname = cachefname
#
# now we should have the extinction table in some form
#
# read and parse the extinction table using astropy.Table
extinction_table = Table.read(tablefname, format='ascii.ipac')
# get the columns we need
filters = np.array(extinction_table['Filter_name'])
a_sf11_byfilter = np.array(extinction_table['A_SandF'])
a_sfd98_byfilter = np.array(extinction_table['A_SFD'])
# generate the output dict
extdict = {'Amag':{x:{'sf11':y, 'sfd98':z} for
x,y,z in zip(filters,a_sf11_byfilter,a_sfd98_byfilter)},
'table':np.array(extinction_table),
'tablefile':os.path.abspath(cachefname),
'provenance':provenance,
'request':'%s (%.3f, %.3f) with size = %.1f' % (coordtype,
lon,lat,
sizedeg)}
return extdict
| |
import unittest
import numpy as np
from pysal.lib import cg, examples
from .. import util
from .. import network
try:
import geopandas
GEOPANDAS_EXTINCT = False
except ImportError:
GEOPANDAS_EXTINCT = True
@unittest.skipIf(GEOPANDAS_EXTINCT, 'Missing Geopandas')
class TestNetwork(unittest.TestCase):
def setUp(self):
path_to_shp = examples.get_path('streets.shp')
gdf = geopandas.read_file(path_to_shp)
self.ntw = network.Network(in_data=gdf)
def tearDown(self):
pass
def test_extract_network(self):
self.assertEqual(len(self.ntw.edges), 303)
self.assertEqual(len(self.ntw.nodes), 230)
edgelengths = self.ntw.edge_lengths.values()
self.assertAlmostEqual(sum(edgelengths), 104414.0920159, places=5)
self.assertIn(0, self.ntw.adjacencylist[1])
self.assertIn(0, self.ntw.adjacencylist[2])
self.assertNotIn(0, self.ntw.adjacencylist[3])
def test_contiguity_weights(self):
w = self.ntw.contiguityweights(graph=False)
self.assertEqual(w.n, 303)
self.assertEqual(w.histogram,
[(2, 35), (3, 89), (4, 105), (5, 61), (6, 13)])
def test_contiguity_weights_graph(self):
w = self.ntw.contiguityweights(graph=True)
self.assertEqual(w.n, 179)
self.assertEqual(w.histogram,
[(2, 2), (3, 2), (4, 45), (5, 82), (6, 48)])
def test_distance_band_weights(self):
# I do not trust this result, test should be manually checked.
w = self.ntw.distancebandweights(threshold=500)
self.assertEqual(w.n, 230)
self.assertEqual(w.histogram,
[(1, 22), (2, 58), (3, 63), (4, 40),
(5, 36), (6, 3), (7, 5), (8, 3)])
def test_edge_segmentation(self):
n200 = self.ntw.segment_edges(200.0)
self.assertEqual(len(n200.edges), 688)
n200 = None
def test_enum_links_node(self):
coincident = self.ntw.enum_links_node(24)
self.assertIn((24, 48), coincident)
@unittest.skipIf(GEOPANDAS_EXTINCT, 'Missing Geopandas')
class TestNetworkPointPattern(unittest.TestCase):
def setUp(self):
path_to_shp = examples.get_path('streets.shp')
gdf = geopandas.read_file(path_to_shp)
self.ntw = network.Network(in_data=gdf)
for obs in ['schools', 'crimes']:
path_to_shp = examples.get_path('{}.shp'.format(obs))
in_data = geopandas.read_file(path_to_shp)
self.ntw.snapobservations(in_data, obs, attribute=True)
setattr(self, obs, self.ntw.pointpatterns[obs])
def tearDown(self):
pass
def test_add_point_pattern(self):
self.assertEqual(self.crimes.npoints, 287)
self.assertIn('properties', self.crimes.points[0])
self.assertIn([1, 1], self.crimes.points[0]['properties'])
def test_count_per_edge(self):
counts = self.ntw.count_per_edge(
self.ntw.pointpatterns['crimes'].obs_to_edge, graph=False)
meancounts = sum(counts.values()) / float(len(counts.keys()))
self.assertAlmostEqual(meancounts, 2.682242, places=5)
def test_count_per_graph_edge(self):
counts = self.ntw.count_per_edge(
self.ntw.pointpatterns['crimes'].obs_to_edge, graph=True)
meancounts = sum(counts.values()) / float(len(counts.keys()))
self.assertAlmostEqual(meancounts, 3.29885, places=5)
def test_simulate_normal_observations(self):
npoints = self.ntw.pointpatterns['crimes'].npoints
sim = self.ntw.simulate_observations(npoints)
self.assertEqual(npoints, sim.npoints)
def test_simulate_poisson_observations(self):
npoints = self.ntw.pointpatterns['crimes'].npoints
sim = self.ntw.simulate_observations(npoints, distribution='poisson')
self.assertEqual(npoints, sim.npoints)
def test_all_neighbor_distances(self):
matrix1, tree = self.ntw.allneighbordistances('schools', gen_tree=True)
known_mtx_val = 17682.436988
known_tree_val = (173, 64)
self.assertAlmostEqual(np.nansum(matrix1[0]), known_mtx_val, places=4)
self.assertEqual(tree[(6, 7)], known_tree_val)
for k, (distances, predlist) in self.ntw.alldistances.items():
self.assertEqual(distances[k], 0)
for p, plists in predlist.items():
self.assertEqual(plists[-1], k)
self.assertEqual(self.ntw.node_list, list(predlist.keys()))
matrix2 = self.ntw.allneighbordistances('schools', fill_diagonal=0.)
observed = matrix2.diagonal()
known = np.zeros(matrix2.shape[0])
self.assertEqual(observed.all(), known.all())
matrix3 = self.ntw.allneighbordistances('schools', snap_dist=True)
known_mtx_val = 3218.2597894
observed_mtx_val = matrix3
self.assertAlmostEqual(observed_mtx_val[0, 1], known_mtx_val, places=4)
def test_nearest_neighbor_distances(self):
# general test
with self.assertRaises(KeyError):
self.ntw.nearestneighbordistances('i_should_not_exist')
nnd1 = self.ntw.nearestneighbordistances('schools')
nnd2 = self.ntw.nearestneighbordistances('schools',
destpattern='schools')
nndv1 = np.array(list(nnd1.values()))[:,1].astype(float)
nndv2 = np.array(list(nnd2.values()))[:,1].astype(float)
np.testing.assert_array_almost_equal_nulp(nndv1, nndv2)
# nearest neighbor keeping zero test
known_zero = ([19], 0.0)[0]
nn_c = self.ntw.nearestneighbordistances('crimes',
keep_zero_dist=True)
self.assertEqual(nn_c[18][0], known_zero)
# nearest neighbor omitting zero test
known_nonzero = ([11], 165.33982412719126)[1]
nn_c = self.ntw.nearestneighbordistances('crimes',
keep_zero_dist=False)
self.assertAlmostEqual(nn_c[18][1], known_nonzero, places=4)
# nearest neighbor with snap distance
known_neigh = ([3], 402.5219673922477)[1]
nn_c = self.ntw.nearestneighbordistances('crimes',
keep_zero_dist=True,
snap_dist=True)
self.assertAlmostEqual(nn_c[0][1], known_neigh, places=4)
@unittest.skipIf(GEOPANDAS_EXTINCT, 'Missing Geopandas')
class TestNetworkAnalysis(unittest.TestCase):
def setUp(self):
path_to_shp = examples.get_path('streets.shp')
gdf = geopandas.read_file(path_to_shp)
self.ntw = network.Network(in_data=gdf)
pt_str = 'crimes'
path_to_shp = examples.get_path('{}.shp'.format(pt_str))
in_data = geopandas.read_file(path_to_shp)
self.ntw.snapobservations(in_data, pt_str, attribute=True)
npts = self.ntw.pointpatterns['crimes'].npoints
self.ntw.simulate_observations(npts)
def tearDown(self):
pass
def test_network_f(self):
obtained = self.ntw.NetworkF(self.ntw.pointpatterns['crimes'],
permutations=5, nsteps=20)
self.assertEqual(obtained.lowerenvelope.shape[0], 20)
def test_network_g(self):
obtained = self.ntw.NetworkG(self.ntw.pointpatterns['crimes'],
permutations=5, nsteps=20)
self.assertEqual(obtained.lowerenvelope.shape[0], 20)
def test_network_k(self):
obtained = self.ntw.NetworkK(self.ntw.pointpatterns['crimes'],
permutations=5, nsteps=20)
self.assertEqual(obtained.lowerenvelope.shape[0], 20)
@unittest.skipIf(GEOPANDAS_EXTINCT, 'Missing Geopandas')
class TestNetworkUtils(unittest.TestCase):
def setUp(self):
path_to_shp = examples.get_path('streets.shp')
gdf = geopandas.read_file(path_to_shp)
self.ntw = network.Network(in_data=gdf)
def tearDown(self):
pass
def test_compute_length(self):
self.point1, self.point2 = (0,0), (1,1)
self.length = util.compute_length( self.point1, self.point2)
self.assertAlmostEqual(self.length, 1.4142135623730951, places=4)
def test_get_neighbor_distances(self):
self.known_neighs = {1: 102.62353453439829, 2: 660.000001049743}
self.neighs = util.get_neighbor_distances(self.ntw, 0,
self.ntw.edge_lengths)
self.assertAlmostEqual(self.neighs[1], 102.62353453439829, places=4)
self.assertAlmostEqual(self.neighs[2], 660.000001049743, places=4)
def test_generate_tree(self):
self.known_path = [23, 22, 20, 19, 170, 2, 0]
self.distance, self.pred = util.dijkstra(self.ntw,
self.ntw.edge_lengths, 0)
self.tree = util.generatetree(self.pred)
self.assertEqual(self.tree[3], self.known_path)
def test_dijkstra(self):
self.distance, self.pred = util.dijkstra(self.ntw,
self.ntw.edge_lengths, 0)
self.assertAlmostEqual(self.distance[196], 5505.668247, places=4)
self.assertEqual(self.pred[196], 133)
def test_dijkstra_mp(self):
self.distance, self.pred = util.dijkstra_mp((self.ntw,
self.ntw.edge_lengths, 0))
self.assertAlmostEqual(self.distance[196], 5505.668247, places=4)
self.assertEqual(self.pred[196], 133)
def test_square_distance_point_segment(self):
self.point, self.segment = (1,1), ((0,0), (2,0))
self.sqrd_nearp = util.squared_distance_point_segment(self.point,
self.segment)
self.assertEqual(self.sqrd_nearp[0], 1.0)
self.assertEqual(self.sqrd_nearp[1].all(), np.array([1., 0.]).all())
def test_snap_points_on_segments(self):
self.points = {0: cg.shapes.Point((1,1))}
self.segments = [cg.shapes.Chain([cg.shapes.Point((0,0)),
cg.shapes.Point((2,0))])]
self.snapped = util.snap_points_on_segments(self.points, self.segments)
self.known_coords = [xy._Point__loc for xy in self.snapped[0][0]]
self.assertEqual(self.known_coords, [(0.0, 0.0), (2.0, 0.0)])
self.assertEqual(self.snapped[0][1].all(), np.array([1., 0.]).all())
if __name__ == '__main__':
unittest.main()
| |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.sessions
from frappe.utils import cstr
import mimetypes, json
from werkzeug.wrappers import Response
from werkzeug.routing import Map, Rule, NotFound
from frappe.website.context import get_context
from frappe.website.utils import get_home_page, can_cache, delete_page_cache
from frappe.website.router import clear_sitemap
from frappe.translate import guess_language
class PageNotFoundError(Exception): pass
def render(path, http_status_code=None):
"""render html page"""
path = resolve_path(path.strip("/ "))
try:
data = render_page_by_language(path)
except frappe.DoesNotExistError, e:
doctype, name = get_doctype_from_path(path)
if doctype and name:
path = "print"
frappe.local.form_dict.doctype = doctype
frappe.local.form_dict.name = name
elif doctype:
path = "list"
frappe.local.form_dict.doctype = doctype
else:
path = "404"
http_status_code = e.http_status_code
try:
data = render_page(path)
except frappe.PermissionError, e:
data, http_status_code = render_403(e, path)
except frappe.PermissionError, e:
data, http_status_code = render_403(e, path)
except frappe.Redirect, e:
return build_response(path, "", 301, {
"Location": frappe.flags.redirect_location,
"Cache-Control": "no-store, no-cache, must-revalidate"
})
except Exception:
path = "error"
data = render_page(path)
http_status_code = 500
data = add_csrf_token(data)
return build_response(path, data, http_status_code or 200)
def build_response(path, data, http_status_code, headers=None):
# build response
response = Response()
response.data = set_content_type(response, data, path)
response.status_code = http_status_code
response.headers[b"X-Page-Name"] = path.encode("utf-8")
response.headers[b"X-From-Cache"] = frappe.local.response.from_cache or False
if headers:
for key, val in headers.iteritems():
response.headers[bytes(key)] = val.encode("utf-8")
return response
def render_page_by_language(path):
translated_languages = frappe.get_hooks("translated_languages_for_website")
user_lang = guess_language(translated_languages)
if translated_languages and user_lang in translated_languages:
try:
if path and path != "index":
lang_path = '{0}/{1}'.format(user_lang, path)
else:
lang_path = user_lang # index
return render_page(lang_path)
except frappe.DoesNotExistError:
return render_page(path)
else:
return render_page(path)
def render_page(path):
"""get page html"""
out = None
if can_cache():
# return rendered page
page_cache = frappe.cache().hget("website_page", path)
if page_cache and frappe.local.lang in page_cache:
out = page_cache[frappe.local.lang]
if out:
frappe.local.response.from_cache = True
return out
return build(path)
def build(path):
if not frappe.db:
frappe.connect()
try:
return build_page(path)
except frappe.DoesNotExistError:
hooks = frappe.get_hooks()
if hooks.website_catch_all:
path = hooks.website_catch_all[0]
return build_page(path)
else:
raise
def build_page(path):
if not getattr(frappe.local, "path", None):
frappe.local.path = path
context = get_context(path)
html = frappe.get_template(context.template).render(context)
# html = frappe.get_template(context.base_template_path).render(context)
if can_cache(context.no_cache):
page_cache = frappe.cache().hget("website_page", path) or {}
page_cache[frappe.local.lang] = html
frappe.cache().hset("website_page", path, page_cache)
return html
def resolve_path(path):
if not path:
path = "index"
if path.endswith('.html'):
path = path[:-5]
if path == "index":
path = get_home_page()
frappe.local.path = path
if path != "index":
path = resolve_from_map(path)
return path
def resolve_from_map(path):
m = Map([Rule(r["from_route"], endpoint=r["to_route"], defaults=r.get("defaults"))
for r in frappe.get_hooks("website_route_rules")])
urls = m.bind_to_environ(frappe.local.request.environ)
try:
endpoint, args = urls.match("/" + path)
path = endpoint
if args:
# don't cache when there's a query string!
frappe.local.no_cache = 1
frappe.local.form_dict.update(args)
except NotFound:
pass
return path
def set_content_type(response, data, path):
if isinstance(data, dict):
response.mimetype = 'application/json'
response.charset = 'utf-8'
data = json.dumps(data)
return data
response.mimetype = 'text/html'
response.charset = 'utf-8'
if "." in path:
content_type, encoding = mimetypes.guess_type(path)
if content_type:
response.mimetype = content_type
if encoding:
response.charset = encoding
return data
def clear_cache(path=None):
frappe.cache().delete_value("website_generator_routes")
delete_page_cache(path)
if not path:
clear_sitemap()
frappe.clear_cache("Guest")
frappe.cache().delete_value("_website_pages")
frappe.cache().delete_value("home_page")
for method in frappe.get_hooks("website_clear_cache"):
frappe.get_attr(method)(path)
def render_403(e, pathname):
path = "message"
frappe.local.message = """<p><strong>{error}</strong></p>
<p>
<a href="/login?redirect-to=/{pathname}" class="btn btn-primary">{login}</a>
</p>""".format(error=cstr(e.message), login=_("Login"), pathname=frappe.local.path)
frappe.local.message_title = _("Not Permitted")
return render_page(path), e.http_status_code
def get_doctype_from_path(path):
doctypes = frappe.db.sql_list("select name from tabDocType")
parts = path.split("/")
doctype = parts[0]
name = parts[1] if len(parts) > 1 else None
if doctype in doctypes:
return doctype, name
# try scrubbed
doctype = doctype.replace("_", " ").title()
if doctype in doctypes:
return doctype, name
return None, None
def add_csrf_token(data):
return data.replace("<!-- csrf_token -->", '<script>frappe.csrf_token = "{0}";</script>'.format(
frappe.local.session.data.csrf_token))
| |
from __future__ import absolute_import
import re
from django.db import connection
from django.db.utils import DatabaseError
from django.contrib.gis import gdal
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
Point, LineString, LinearRing, Polygon, GeometryCollection)
from django.contrib.gis.tests.utils import (
no_mysql, no_oracle, no_spatialite,
mysql, oracle, postgis, spatialite)
from django.test import TestCase
from django.utils import six, unittest
from .models import Country, City, PennsylvaniaCity, State, Track
from .test_feeds import GeoFeedTest
from .test_regress import GeoRegressionTests
from .test_sitemaps import GeoSitemapTest
if not spatialite:
from .models import Feature, MinusOneSRID
class GeoModelTest(TestCase):
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
## Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
try:
nullcity.point = bad
except TypeError:
pass
else:
self.fail('Should throw a TypeError')
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.delete()
## Testing on a Polygon
shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
if gdal.HAS_GDAL:
self.assertEqual(True, isinstance(ns.poly.ogr, gdal.OGRGeometry))
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertEqual(True, isinstance(ns.poly.srs, gdal.SpatialReference))
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
@no_mysql
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# Oracle doesn't have SRID 3084, using 41157.
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157)) FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)' # Used ogr.py in gdal 1.4.1 for this transform
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
nad_pnt = fromstr(nad_wkt, srid=nad_srid)
if oracle:
tx = Country.objects.get(mpoly__contains=nad_pnt)
else:
tx = Country.objects.get(mpoly__intersects=nad_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=nad_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
# SpatiaLite does not support missing SRID values.
if not spatialite:
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertEqual(c.point, None)
@no_spatialite # SpatiaLite does not support abstract geometry columns
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertEqual(True, isinstance(f_1.geom, Point))
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertEqual(True, isinstance(f_2.geom, LineString))
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertEqual(True, isinstance(f_3.geom, Polygon))
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertEqual(True, isinstance(f_4.geom, GeometryCollection))
self.assertEqual(f_3.geom, f_4.geom[2])
@no_mysql
def test_inherited_geofields(self):
"Test GeoQuerySet methods on inherited Geometry fields."
# Creating a Pennsylvanian city.
mansfield = PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.transform(32128)
self.assertEqual(1, qs.count())
for pc in qs: self.assertEqual(32128, pc.point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
# Only PostGIS would support a 'select *' query because of its recognized
# HEXEWKB format for geometry fields
as_text = 'ST_AsText' if postgis else 'asText'
cities2 = City.objects.raw('select id, name, %s(point) from geoapp_city' % as_text)
self.assertEqual(len(cities1), len(list(cities2)))
self.assertTrue(isinstance(cities2[0].point, Point))
class GeoLookupTest(TestCase):
@no_mysql
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name='Pueblo')
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual('Kansas', qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name='Texas')
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if not oracle:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ['Houston', 'Dallas', 'Oklahoma City']
for c in qs: self.assertEqual(True, c.name in cities)
# Pulling out some cities.
houston = City.objects.get(name='Houston')
wellington = City.objects.get(name='Wellington')
pueblo = City.objects.get(name='Pueblo')
okcity = City.objects.get(name='Oklahoma City')
lawrence = City.objects.get(name='Lawrence')
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX
self.assertEqual('Texas', tx.name)
self.assertEqual('New Zealand', nz.name)
# Spatialite 2.3 thinks that Lawrence is in Puerto Rico (a NULL geometry).
if not spatialite:
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual('Kansas', ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas)
# are not contained in Texas or New Zealand.
self.assertEqual(0, len(Country.objects.filter(mpoly__contains=pueblo.point))) # Query w/GEOSGeometry object
self.assertEqual((mysql and 1) or 0,
len(Country.objects.filter(mpoly__contains=okcity.point.wkt))) # Qeury w/WKT
# OK City is contained w/in bounding box of Texas.
if not oracle:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual('Texas', qs[0].name)
# Only PostGIS has `left` and `right` lookup types.
@no_mysql
@no_oracle
@no_spatialite
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name='Colorado').poly
ks_border = State.objects.get(name='Kansas').poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = ['Houston', 'Dallas', 'Oklahoma City',
'Lawrence', 'Chicago', 'Wellington']
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# These cities should be strictly to the right of the KS border.
cities = ['Chicago', 'Wellington']
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual('Victoria', vic.name)
cities = ['Pueblo', 'Victoria']
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# The left/right lookup tests are known failures on PostGIS 2.0+
# until the following bug is fixed:
# http://trac.osgeo.org/postgis/ticket/2035
# TODO: Ensure fixed in 2.0.2, else modify upper bound for version here.
if (2, 0, 0) <= connection.ops.spatial_version <= (2, 0, 1):
test_left_right_lookups = unittest.expectedFailure(test_left_right_lookups)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]: self.assertEqual('Houston', c.name)
@no_mysql
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name='Puerto Rico')
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual('Puerto Rico', nullqs[0].name)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertEqual(True, 'Colorado' in state_names)
self.assertEqual(True, 'Kansas' in state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertEqual(nmi.poly, None)
# Assigning a geomery and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None)
self.assertEqual(None, State.objects.get(name='Northern Mariana Islands').poly)
@no_mysql
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param shoud
# raise a type error when initializing the GeoQuerySet
self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo'))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]:
qs = Country.objects.filter(mpoly__relate=bad_args)
self.assertRaises(e, qs.count)
# Relate works differently for the different backends.
if postgis or spatialite:
contains_mask = 'T*T***FF*'
within_mask = 'T*F**F***'
intersects_mask = 'T********'
elif oracle:
contains_mask = 'contains'
within_mask = 'inside'
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = 'overlapbdyintersect'
# Testing contains relation mask.
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name)
# Testing within relation mask.
ks = State.objects.get(name='Kansas')
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name)
# Testing intersection relation mask.
if not oracle:
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name)
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
class GeoQuerySetTest(TestCase):
# Please keep the tests in GeoQuerySet method's alphabetic order
@no_mysql
def test_centroid(self):
"Testing the `centroid` GeoQuerySet method."
qs = State.objects.exclude(poly__isnull=True).centroid()
if oracle:
tol = 0.1
elif spatialite:
tol = 0.000001
else:
tol = 0.000000001
for s in qs:
self.assertEqual(True, s.poly.centroid.equals_exact(s.centroid, tol))
@no_mysql
def test_diff_intersection_union(self):
"Testing the `difference`, `intersection`, `sym_difference`, and `union` GeoQuerySet methods."
geom = Point(5, 23)
tol = 1
qs = Country.objects.all().difference(geom).sym_difference(geom).union(geom)
# XXX For some reason SpatiaLite does something screwey with the Texas geometry here. Also,
# XXX it doesn't like the null intersection.
if spatialite:
qs = qs.exclude(name='Texas')
else:
qs = qs.intersection(geom)
for c in qs:
if oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
pass
else:
self.assertEqual(c.mpoly.difference(geom), c.difference)
if not spatialite:
self.assertEqual(c.mpoly.intersection(geom), c.intersection)
self.assertEqual(c.mpoly.sym_difference(geom), c.sym_difference)
self.assertEqual(c.mpoly.union(geom), c.union)
@no_mysql
@no_spatialite # SpatiaLite does not have an Extent function
def test_extent(self):
"Testing the `extent` GeoQuerySet method."
# Reference query:
# `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
qs = City.objects.filter(name__in=('Houston', 'Dallas'))
extent = qs.extent()
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
@no_mysql
@no_oracle
@no_spatialite
def test_force_rhr(self):
"Testing GeoQuerySet.force_rhr()."
rings = ( ( (0, 0), (5, 0), (0, 5), (0, 0) ),
( (1, 1), (1, 3), (3, 1), (1, 1) ),
)
rhr_rings = ( ( (0, 0), (0, 5), (5, 0), (0, 0) ),
( (1, 1), (3, 1), (1, 3), (1, 1) ),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
s = State.objects.force_rhr().get(name='Foo')
self.assertEqual(rhr_rings, s.force_rhr.coords)
@no_mysql
@no_oracle
@no_spatialite
def test_geohash(self):
"Testing GeoQuerySet.geohash()."
if not connection.ops.geohash: return
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = '9vk1mfq8jx0c8e0386z6'
h1 = City.objects.geohash().get(name='Houston')
h2 = City.objects.geohash(precision=5).get(name='Houston')
self.assertEqual(ref_hash, h1.geohash)
self.assertEqual(ref_hash[:5], h2.geohash)
def test_geojson(self):
"Testing GeoJSON output from the database using GeoQuerySet.geojson()."
# Only PostGIS 1.3.4+ supports GeoJSON.
if not connection.ops.geojson:
self.assertRaises(NotImplementedError, Country.objects.all().geojson, field_name='mpoly')
return
if connection.ops.spatial_version >= (1, 4, 0):
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.305196,48.462611]}'
chicago_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
else:
pueblo_json = '{"type":"Point","coordinates":[-104.60925200,38.25500100]}'
houston_json = '{"type":"Point","crs":{"type":"EPSG","properties":{"EPSG":4326}},"coordinates":[-95.36315100,29.76337400]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.30519600,48.46261100]}'
chicago_json = '{"type":"Point","crs":{"type":"EPSG","properties":{"EPSG":4326}},"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
# Precision argument should only be an integer
self.assertRaises(TypeError, City.objects.geojson, precision='foo')
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Victoria';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson)
# 1.(3|4).x: SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertEqual(chicago_json, City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson)
def test_gml(self):
"Testing GML output from the database using GeoQuerySet.gml()."
if mysql or (spatialite and not connection.ops.gml) :
self.assertRaises(NotImplementedError, Country.objects.all().gml, field_name='mpoly')
return
# Should throw a TypeError when tyring to obtain GML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.gml, field_name='name')
ptown1 = City.objects.gml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.gml(precision=9).get(name='Pueblo')
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml"><gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ </gml:coordinates></gml:Point>')
elif spatialite and connection.ops.spatial_version < (3, 0, 0):
# Spatialite before 3.0 has extra colon in SrsName
gml_regex = re.compile(r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>')
else:
gml_regex = re.compile(r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>')
for ptown in [ptown1, ptown2]:
self.assertTrue(gml_regex.match(ptown.gml))
# PostGIS < 1.5 doesn't include dimension im GMLv3 output.
if postgis and connection.ops.spatial_version >= (1, 5, 0):
self.assertIn('<gml:pos srsDimension="2">',
City.objects.gml(version=3).get(name='Pueblo').gml)
def test_kml(self):
"Testing KML output from the database using GeoQuerySet.kml()."
# Only PostGIS and Spatialite (>=2.4.0-RC4) support KML serialization
if not (postgis or (spatialite and connection.ops.kml)):
self.assertRaises(NotImplementedError, State.objects.all().kml, field_name='poly')
return
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.kml, 'name')
# The reference KML depends on the version of PostGIS used
# (the output stopped including altitude in 1.3.3).
if connection.ops.spatial_version >= (1, 3, 3):
ref_kml = '<Point><coordinates>-104.609252,38.255001</coordinates></Point>'
else:
ref_kml = '<Point><coordinates>-104.609252,38.255001,0</coordinates></Point>'
# Ensuring the KML is as expected.
ptown1 = City.objects.kml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.kml(precision=9).get(name='Pueblo')
for ptown in [ptown1, ptown2]:
self.assertEqual(ref_kml, ptown.kml)
# Only PostGIS has support for the MakeLine aggregate.
@no_mysql
@no_oracle
@no_spatialite
def test_make_line(self):
"Testing the `make_line` GeoQuerySet method."
# Ensuring that a `TypeError` is raised on models without PointFields.
self.assertRaises(TypeError, State.objects.make_line)
self.assertRaises(TypeError, Country.objects.make_line)
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry('LINESTRING(-95.363151 29.763374,-96.801611 32.782057,-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)', srid=4326)
self.assertEqual(ref_line, City.objects.make_line())
@no_mysql
def test_num_geom(self):
"Testing the `num_geom` GeoQuerySet method."
# Both 'countries' only have two geometries.
for c in Country.objects.num_geom():
self.assertEqual(2, c.num_geom)
for c in City.objects.filter(point__isnull=False).num_geom():
# Oracle and PostGIS 2.0+ will return 1 for the number of
# geometries on non-collections, whereas PostGIS < 2.0.0
# will return None.
if postgis and connection.ops.spatial_version < (2, 0, 0):
self.assertIsNone(c.num_geom)
else:
self.assertEqual(1, c.num_geom)
@no_mysql
@no_spatialite # SpatiaLite can only count vertices in LineStrings
def test_num_points(self):
"Testing the `num_points` GeoQuerySet method."
for c in Country.objects.num_points():
self.assertEqual(c.mpoly.num_points, c.num_points)
if not oracle:
# Oracle cannot count vertices in Point geometries.
for c in City.objects.num_points(): self.assertEqual(1, c.num_points)
@no_mysql
def test_point_on_surface(self):
"Testing the `point_on_surface` GeoQuerySet method."
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05)) FROM GEOAPP_COUNTRY;
ref = {'New Zealand' : fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas' : fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
elif postgis or spatialite:
# Using GEOSGeometry to compute the reference point on surface values
# -- since PostGIS also uses GEOS these should be the same.
ref = {'New Zealand' : Country.objects.get(name='New Zealand').mpoly.point_on_surface,
'Texas' : Country.objects.get(name='Texas').mpoly.point_on_surface
}
for c in Country.objects.point_on_surface():
if spatialite:
# XXX This seems to be a WKT-translation-related precision issue?
tol = 0.00001
else:
tol = 0.000000001
self.assertEqual(True, ref[c.name].equals_exact(c.point_on_surface, tol))
@no_mysql
@no_spatialite
def test_reverse_geom(self):
"Testing GeoQuerySet.reverse_geom()."
coords = [ (-95.363151, 29.763374), (-95.448601, 29.713803) ]
Track.objects.create(name='Foo', line=LineString(coords))
t = Track.objects.reverse_geom().get(name='Foo')
coords.reverse()
self.assertEqual(tuple(coords), t.reverse_geom.coords)
if oracle:
self.assertRaises(TypeError, State.objects.reverse_geom)
@no_mysql
@no_oracle
def test_scale(self):
"Testing the `scale` GeoQuerySet method."
xfac, yfac = 2, 3
tol = 5 # XXX The low precision tolerance is for SpatiaLite
qs = Country.objects.scale(xfac, yfac, model_att='scaled')
for c in qs:
for p1, p2 in zip(c.mpoly, c.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
@no_mysql
@no_oracle
@no_spatialite
def test_snap_to_grid(self):
"Testing GeoQuerySet.snap_to_grid()."
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
self.assertRaises(ValueError, Country.objects.snap_to_grid, *bad_args)
for bad_args in (('1.0',), (1.0, None), tuple(map(six.text_type, range(4)))):
self.assertRaises(TypeError, Country.objects.snap_to_grid, *bad_args)
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,'
'12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,'
'12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,'
'12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,'
'12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,'
'12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,'
'12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,'
'12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))')
sm = Country.objects.create(name='San Marino', mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid, tol))
def test_svg(self):
"Testing SVG output using GeoQuerySet.svg()."
if mysql or oracle:
self.assertRaises(NotImplementedError, City.objects.svg)
return
self.assertRaises(TypeError, City.objects.svg, precision='foo')
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace('c', '')
self.assertEqual(svg1, City.objects.svg().get(name='Pueblo').svg)
self.assertEqual(svg2, City.objects.svg(relative=5).get(name='Pueblo').svg)
@no_mysql
def test_transform(self):
"Testing the transform() GeoQuerySet method."
# Pre-transformed points for Houston and Pueblo.
htown = fromstr('POINT(1947516.83115183 6322297.06040572)', srid=3084)
ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774)
prec = 3 # Precision is low due to version variations in PROJ and GDAL.
# Asserting the result of the transform operation with the values in
# the pre-transformed points. Oracle does not have the 3084 SRID.
if not oracle:
h = City.objects.transform(htown.srid).get(name='Houston')
self.assertEqual(3084, h.point.srid)
self.assertAlmostEqual(htown.x, h.point.x, prec)
self.assertAlmostEqual(htown.y, h.point.y, prec)
p1 = City.objects.transform(ptown.srid, field_name='point').get(name='Pueblo')
p2 = City.objects.transform(srid=ptown.srid).get(name='Pueblo')
for p in [p1, p2]:
self.assertEqual(2774, p.point.srid)
self.assertAlmostEqual(ptown.x, p.point.x, prec)
self.assertAlmostEqual(ptown.y, p.point.y, prec)
@no_mysql
@no_oracle
def test_translate(self):
"Testing the `translate` GeoQuerySet method."
xfac, yfac = 5, -23
qs = Country.objects.translate(xfac, yfac, model_att='translated')
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# XXX The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
@no_mysql
def test_unionagg(self):
"Testing the `unionagg` (aggregate union) GeoQuerySet method."
tx = Country.objects.get(name='Texas').mpoly
# Houston, Dallas -- Oracle has different order.
union1 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
union2 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
qs = City.objects.filter(point__within=tx)
self.assertRaises(TypeError, qs.unionagg, 'name')
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.unionagg(field_name='point')
u2 = qs.order_by('name').unionagg()
tol = 0.00001
if oracle:
union = union2
else:
union = union1
self.assertEqual(True, union.equals_exact(u1, tol))
self.assertEqual(True, union.equals_exact(u2, tol))
qs = City.objects.filter(name='NotACity')
self.assertEqual(None, qs.unionagg(field_name='point'))
| |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from cairis.core.ARM import *
from cairis.core.Obstacle import Obstacle
from cairis.core.Vulnerability import Vulnerability
from cairis.core.ObstacleEnvironmentProperties import ObstacleEnvironmentProperties
from cairis.core.VulnerabilityEnvironmentProperties import VulnerabilityEnvironmentProperties
from cairis.core.ObstacleParameters import ObstacleParameters
from cairis.core.VulnerabilityParameters import VulnerabilityParameters
from cairis.core.GoalAssociationParameters import GoalAssociationParameters
from cairis.daemon.CairisHTTPError import ObjectNotFoundHTTPError, MalformedJSONHTTPError, ARMHTTPError, MissingParameterHTTPError, OverwriteNotAllowedHTTPError
from cairis.misc.KaosModel import KaosModel
from cairis.core.ValueType import ValueType
from cairis.core.ValueTypeParameters import ValueTypeParameters
from cairis.data.CairisDAO import CairisDAO
from cairis.tools.JsonConverter import json_serialize, json_deserialize
from cairis.tools.ModelDefinitions import ObstacleEnvironmentPropertiesModel, ObstacleModel, RefinementModel
from cairis.tools.SessionValidator import check_required_keys, get_fonts
__author__ = 'Shamal Faily'
class ObstacleDAO(CairisDAO):
def __init__(self, session_id):
CairisDAO.__init__(self, session_id, 'obstacle')
def get_objects(self, constraint_id=-1, simplify=True):
try:
obstacles = self.db_proxy.getObstacles(constraint_id)
except DatabaseProxyException as ex:
self.close()
raise ARMHTTPError(ex)
if simplify:
for key, value in list(obstacles.items()):
obstacles[key] = self.simplify(value)
return obstacles
def get_objects_summary(self):
try:
obs = self.db_proxy.getObstaclesSummary()
except DatabaseProxyException as ex:
self.close()
raise ARMHTTPError(ex)
return obs
def get_object_by_name(self, name, simplify=True):
obsId = self.db_proxy.getDimensionId(name,'obstacle')
found_obstacle = None
obstacles = self.get_objects(obsId,simplify=False)
if obstacles is not None:
found_obstacle = obstacles.get(name)
if found_obstacle is None:
self.close()
raise ObjectNotFoundHTTPError('The provided obstacle name')
if simplify:
found_obstacle = self.simplify(found_obstacle)
return found_obstacle
def add_object(self, obstacle):
obsParams = ObstacleParameters(obsName=obstacle.theName,obsOrig=obstacle.theOriginator,tags=obstacle.theTags,properties=obstacle.theEnvironmentProperties)
if not self.check_existing_obstacle(obstacle.theName):
obstacle_id = self.db_proxy.addObstacle(obsParams)
else:
self.close()
raise OverwriteNotAllowedHTTPError('The provided obstacle name')
return obstacle_id
def update_object(self, obstacle, name):
old_obstacle = self.get_object_by_name(name, simplify=False)
id = old_obstacle.theId
params = ObstacleParameters(obsName=obstacle.theName,obsOrig=obstacle.theOriginator,tags=obstacle.theTags,properties=obstacle.theEnvironmentProperties)
params.setId(id)
try:
self.db_proxy.updateObstacle(params)
except DatabaseProxyException as ex:
self.close()
raise ARMHTTPError(ex)
def delete_object(self, name):
try:
obsId = self.db_proxy.getDimensionId(name,'obstacle')
self.db_proxy.deleteObstacle(obsId)
except DatabaseProxyException as ex:
self.close()
raise ARMHTTPError(ex)
except ARMException as ex:
self.close()
raise ARMHTTPError(ex)
def check_existing_obstacle(self, name):
try:
self.db_proxy.nameCheck(name, 'obstacle')
return False
except ARMException as ex:
if str(ex.value).find('already exists') > -1:
return True
self.close()
raise ARMHTTPError(ex)
def get_obstacle_model(self, environment_name, obstacle_name, pathValues = []):
fontName, fontSize, apFontName = get_fonts(session_id=self.session_id)
if obstacle_name == 'all':
obstacle_name = ''
try:
obstacle_filter = 0
associationDictionary = self.db_proxy.obstacleModel(environment_name, obstacle_name, obstacle_filter)
associations = KaosModel(list(associationDictionary.values()), environment_name, 'obstacle',obstacle_name,db_proxy=self.db_proxy, font_name=fontName,font_size=fontSize)
dot_code = associations.graph()
return dot_code
except DatabaseProxyException as ex:
self.close()
raise ARMHTTPError(ex)
# region Obstacle types
def get_obstacle_types(self, environment_name=''):
try:
obstacle_types = self.db_proxy.getValueTypes('obstacle_type', environment_name)
return obstacle_types
except DatabaseProxyException as ex:
raise ARMHTTPError(ex)
except ARMException as ex:
raise ARMHTTPError(ex)
def get_obstacle_type_by_name(self, name, environment_name=''):
found_type = None
obstacle_types = self.get_obstacle_types(environment_name=environment_name)
if obstacle_types is None or len(obstacle_types) < 1:
raise ObjectNotFoundHTTPError('Obstacle types')
idx = 0
while found_type is None and idx < len(obstacle_types):
if obstacle_types[idx].theName == name:
found_type = obstacle_types[idx]
idx += 1
if found_type is None:
raise ObjectNotFoundHTTPError('The provided obstacle type name')
return found_type
def add_obstacle_type(self, obstacle_type, environment_name=''):
assert isinstance(obstacle_type, ValueType)
type_exists = self.check_existing_obstacle_type(obstacle_type.theName, environment_name=environment_name)
if type_exists:
raise OverwriteNotAllowedHTTPError(obj_name='The obstacle type')
params = ValueTypeParameters(vtName=obstacle_type.theName,vtDesc=obstacle_type.theDescription,vType='obstacle_type',envName=environment_name,vtScore=obstacle_type.theScore,vtRat=obstacle_type.theRationale)
try:
return self.db_proxy.addValueType(params)
except DatabaseProxyException as ex:
raise ARMHTTPError(ex)
except ARMException as ex:
raise ARMHTTPError(ex)
def update_obstacle_type(self, obstacle_type, name, environment_name=''):
assert isinstance(obstacle_type, ValueType)
found_type = self.get_obstacle_type_by_name(name, environment_name)
params = ValueTypeParameters(vtName=obstacle_type.theName,vtDesc=obstacle_type.theDescription,vType='obstacle_type',envName=environment_name,vtScore=obstacle_type.theScore,vtRat=obstacle_type.theRationale)
params.setId(found_type.theId)
try:
self.db_proxy.updateValueType(params)
except DatabaseProxyException as ex:
raise ARMHTTPError(ex)
except ARMException as ex:
raise ARMHTTPError(ex)
def delete_obstacle_type(self, name, environment_name=''):
found_type = self.get_obstacle_type_by_name(name, environment_name)
try:
self.db_proxy.deleteObstacleType(found_type.theId)
except DatabaseProxyException as ex:
raise ARMHTTPError(ex)
except ARMException as ex:
raise ARMHTTPError(ex)
def check_existing_obstacle_type(self, name, environment_name):
try:
self.get_obstacle_type_by_name(name, environment_name)
return True
except ObjectNotFoundHTTPError:
return False
# endregion
# region Goal values
def get_obstacle_values(self, environment_name=''):
try:
obstacle_values = self.db_proxy.getValueTypes('obstacle_value', environment_name)
return obstacle_values
except DatabaseProxyException as ex:
raise ARMHTTPError(ex)
except ARMException as ex:
raise ARMHTTPError(ex)
def get_obstacle_value_by_name(self, name, environment_name=''):
found_value = None
obstacle_values = self.get_obstacle_values(environment_name=environment_name)
if obstacle_values is None or len(obstacle_values) < 1:
raise ObjectNotFoundHTTPError('Obstacle values')
idx = 0
while found_value is None and idx < len(obstacle_values):
if obstacle_values[idx].theName == name:
found_value = obstacle_values[idx]
idx += 1
if found_value is None:
raise ObjectNotFoundHTTPError('The provided obstacle value name')
return found_value
def update_obstacle_value(self, obstacle_value, name, environment_name=''):
assert isinstance(obstacle_value, ValueType)
found_value = self.get_obstacle_value_by_name(name, environment_name)
params = ValueTypeParameters(vtName=obstacle_type.theName,vtDesc=obstacle_type.theDescription,vType='obstacle_type',envName=environment_name,vtScore=obstacle_type.theScore,vtRat=obstacle_type.theRationale)
params.setId(found_value.theId)
try:
self.db_proxy.updateValueType(params)
except DatabaseProxyException as ex:
raise ARMHTTPError(ex)
except ARMException as ex:
raise ARMHTTPError(ex)
def check_existing_obstacle_value(self, name, environment_name):
try:
self.get_obstacle_value_by_name(name, environment_name)
return True
except ObjectNotFoundHTTPError:
return False
# endregion
def convert_properties(self, real_props=None, fake_props=None):
new_props = []
if real_props is not None:
for real_prop in real_props:
assert isinstance(real_prop, ObstacleEnvironmentProperties)
del real_prop.theLabel
new_goal_refinements = []
for gr in real_prop.theGoalRefinements:
new_goal_refinements.append(RefinementModel(gr[0],gr[1],gr[2],gr[3],gr[4]))
new_subgoal_refinements = []
for sgr in real_prop.theSubGoalRefinements:
new_subgoal_refinements.append(RefinementModel(sgr[0],sgr[1],sgr[2],sgr[3],sgr[4]))
real_prop.theGoalRefinements = new_goal_refinements
real_prop.theSubGoalRefinements = new_subgoal_refinements
new_props.append(real_prop)
elif fake_props is not None:
for fake_prop in fake_props:
if fake_prop is not None:
check_required_keys(fake_prop, ObstacleEnvironmentPropertiesModel.required)
new_goal_refinements = []
for gr in fake_prop['theGoalRefinements']:
new_goal_refinements.append((gr['theEndName'],gr['theEndType'],gr['theRefType'],gr['isAlternate'],gr['theRationale']))
new_subgoal_refinements = []
for sgr in fake_prop['theSubGoalRefinements']:
new_subgoal_refinements.append((sgr['theEndName'],sgr['theEndType'],sgr['theRefType'],sgr['isAlternate'],sgr['theRationale']))
new_prop = ObstacleEnvironmentProperties(environmentName=fake_prop['theEnvironmentName'],lbl='',definition=fake_prop['theDefinition'],category=fake_prop['theCategory'],gRefs=new_goal_refinements,sgRefs=new_subgoal_refinements,concs=fake_prop['theConcerns'])
new_prop.theProbability = fake_prop['theProbability']
new_prop.theProbabilityRationale = fake_prop['theProbabilityRationale']
new_props.append(new_prop)
else:
self.close()
raise MissingParameterHTTPError(param_names=['real_props', 'fake_props'])
return new_props
def from_json(self, request):
json = request.get_json(silent=True)
if json is False or json is None:
self.close()
raise MalformedJSONHTTPError(data=request.get_data())
json_dict = json['object']
check_required_keys(json_dict, ObstacleModel.required)
json_dict['__python_obj__'] = Obstacle.__module__+'.'+Obstacle.__name__
props_list = json_dict.pop('theEnvironmentProperties', [])
json_dict.pop('theEnvironmentDictionary', None)
real_props = self.convert_properties(fake_props=props_list)
new_json_obstacle = json_serialize(json_dict)
new_json_obstacle = json_deserialize(new_json_obstacle)
new_json_obstacle.theEnvironmentProperties = real_props
if not isinstance(new_json_obstacle, Obstacle):
self.close()
raise MalformedJSONHTTPError(data=request.get_data())
else:
return new_json_obstacle
def simplify(self, obstacle):
obstacle.theEnvironmentProperties = self.convert_properties(real_props=obstacle.theEnvironmentProperties)
assert isinstance(obstacle, Obstacle)
del obstacle.theId
del obstacle.theEnvironmentDictionary
return obstacle
def generate_vulnerability(self, name, pathValues = []):
obs = self.db_proxy.dimensionObject(name,'obstacle')
vps = []
gaps = []
for op in obs.environmentProperties():
vps.append(VulnerabilityEnvironmentProperties(op.name(),'Negligible',op.concerns()))
gaps.append(GoalAssociationParameters(op.name(),obs.name(),'obstacle','and',obs.name() + '(V)','vulnerability'))
v = VulnerabilityParameters(obs.name() + '(V)',obs.name(),self.db_proxy.defaultValue('vulnerability_type'),[],vps)
self.db_proxy.addVulnerability(v)
for gap in gaps:
self.db_proxy.addGoalAssociation(gap)
| |
import re
import math
import numpy as np
from parsimonious.grammar import Grammar
from parsimonious.nodes import NodeVisitor
grammar = Grammar(
r"""
expr_and = expr (and_expr)*
and_expr = wsp andor wsp expr
expr = biexpr / unexpr / value
biexpr = value ws binaryop_no_andor ws expr
unexpr = unaryop expr
value = parenval /
number /
boolean /
function /
string /
attr
parenval = "(" ws expr ws ")"
function = fname "(" ws arg_list? ws ")"
arg_list = expr (ws "," ws expr)*
number = ~"\d*\.?\d+"i
string = ~"\'\w*\'"i
attr = ~"\w[\w\d]*"i
fname = ~"\w[\w\d]*"i
boolean = "true" / "false"
binaryop_no_andor = "+" / "-" / "*" / "/" / "=" / "<>" /
"<=" / ">=" / "<" / ">"
andor = "and" / "or"
unaryop = "+" / "-" / "not"
ws = ~"\s*"i
wsp = ~"\s+"i
"""
)
tree = grammar.parse("a = f(1,2) + f(1 , a)")
def unary(op, v):
if op == "+":
return v
if op == "-":
return -v
if op.lower() == "not":
return not(v)
def binary(op, l, r):
if op == "+": return l + r
if op == "/": return l / r
if op == "*": return l * r
if op == "-": return l - r
if op == "=": return l == r
if op == "<>": return l != r
if op == "and": return (l and r)
if op == "or": return l or r
if op == "<": return l < r
if op == ">": return l > r
if op == "<=": return l <= r
if op == ">=": return l >= r
return True
class Expr(object):
def __init__(self, op, l, r=None):
self.op = op
self.l = l
self.r = r
def __str__(self):
if self.r:
return "%s %s %s" % (self.l, self.op, self.r)
return "%s %s" % (self.op, self.l)
def __call__(self, tup, tup2=None):
l = self.l(tup, tup2)
if self.r is None:
return unary(self.op, l)
r = self.r(tup, tup2)
return binary(self.op, l, r)
class Func(object):
"""
This object needs to deal with scalar AND aggregation functions.
"""
agg_func_lookup = dict(
avg=np.mean,
count=len,
sum=np.sum,
std=np.std,
stddev=np.std
)
scalar_func_lookup = dict(
lower=lambda s: str(s).lower()
)
def __init__(self, name, args):
self.name = name.lower()
self.args = args
def __str__(self):
args = ",".join(map(str, self.args))
return "%s(%s)" % (self.name, args)
def __call__(self, tup, tup2=None):
f = Func.agg_func_lookup.get(self.name, None)
if f:
if "__group__" not in tup:
raise Exception("aggregation function %s called but input is not a group!")
args = []
for gtup in tup["__group__"]:
args.append([arg(gtup) for arg in self.args])
# make the arguments columnar:
# [ (a,a,a,a), (b,b,b,b) ]
args = zip(*args)
return f(*args)
f = agg_func_lookup.get(self.name, None)
if f:
args = [arg(tup, tup2) for arg in self.args]
return f(args)
raise Exception("I don't recognize function %s" % self.name)
class Literal(object):
def __init__(self, v):
self.v = v
def __call__(self, tup=None, tup2=None):
return self.v
def __str__(self):
if isinstance(self.v, basestring):
return "'%s'" % self.v
return str(self.v)
class Attr(object):
def __init__(self, attr):
self.attr = attr
def __call__(self, tup, tup2=None):
if self.attr in tup:
return tup[self.attr]
if tup2 and self.attr in tup2:
return tup2[self.attr]
raise Exception("couldn't find %s in either tuple" % self.attr)
def __str__(self):
return self.attr
def flatten(children, sidx, lidx):
ret = [children[sidx]]
rest = children[lidx]
if not isinstance(rest, list): rest = [rest]
ret.extend(filter(bool, rest))
return ret
class Visitor(NodeVisitor):
grammar = grammar
def visit_expr_and(self, node, children):
l = flatten(children, 0, 1)
ret = l[0]
for op, expr in l[1:]:
ret = Expr(op, ret, expr)
return ret
def visit_and_expr(self, node, children):
return (children[1], children[3])
def visit_expr(self, node, children):
return children[0]
def visit_attr(self, node, children):
return Attr(node.text)
def visit_binaryop_no_andor(self, node, children):
return node.text
def visit_andor(self, node, children):
return node.text
def visit_biexpr(self, node, children):
return Expr(children[2], children[0], children[-1])
def visit_unexpr(self, node, children):
return Expr(children[0], children[1])
def visit_function(self, node, children):
fname = children[0]
arglist = children[3]
return Func(fname, arglist)
def visit_fname(self, node, children):
return node.text
def visit_arg_list(self, node, children):
args = []
e = children[0]
l = filter(bool, children[1])
args.append(e)
args.extend(l)
return args
def visit_number(self, node, children):
return Literal(float(node.text))
def visit_string(self, node, children):
return Literal(node.text)
def visit_parenval(self, node, children):
return children[2]
def visit_value(self, node, children):
return children[0]
def visit_parenval(self, node, children):
return children[2]
def visit_boolean(self, node, children):
if node.text == "true":
return Literal(True)
return Literal(False)
def generic_visit(self, node, children):
children = filter(bool, children)
if len(children) == 1:
return children[0]
return children
def parse(s):
return Visitor().parse(s)
if __name__ == "__main__":
print parse("a < a+1 and a > 10 and a < 9")(dict(a=2))
print parse("a+1")(dict(a=2))
| |
"""
Driver for PDB2PQR
This module takes a PDB file as input and performs optimizations
before yielding a new PDB-style file as output.
Ported to Python by Todd Dolinsky (todd@ccb.wustl.edu)
Washington University in St. Louis
Parsing utilities provided by Nathan A. Baker (baker@biochem.wustl.edu)
Washington University in St. Louis
Copyright (c) 2002-2010, Jens Erik Nielsen, University College Dublin;
Nathan A. Baker, Washington University in St. Louis; Paul Czodrowski &
Gerhard Klebe, University of Marburg
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of University College Dublin, Washington University in
St. Louis, or University of Marburg nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
"""
__date__ = "5 April 2010"
__author__ = "Todd Dolinsky, Nathan Baker, Jens Nielsen, Paul Czodrowski, Jan Jensen, Samir Unni, Yong Huang"
__version__ = "1.6"
import string
import sys
import getopt
import os
import time
import httplib
import re
import glob
import tempfile
from src import pdb
from src import utilities
from src import structures
from src import routines
from src import protein
#from src import server
from src.pdb import *
from src.utilities import *
from src.structures import *
from src.definitions import *
from src.forcefield import *
from src.routines import *
from src.protein import *
from src.server import *
from src.hydrogens import *
from src.aconf import *
from StringIO import *
from main import *
def printHeader(pagetitle,have_opal=None,jobid=None):
"""
Function to print html headers
"""
if jobid:
if have_opal:
print "Location: querystatus.cgi?jobid=%s&typeofjob=opal\n" % (jobid,typeOfJob)
else:
print "Location: querystatus.cgi?jobid=%s&typeofjob=local\n" & (jobid,typeOfJob)
#print "Content-type: text/html\n"
print "<HTML>"
print "<HEAD>"
print "\t<TITLE>%s</TITLE>" % pagetitle
print "\t<link rel=\"stylesheet\" href=\"%s\" type=\"text/css\">\n" % STYLESHEET
print "</HEAD>"
return
def redirector(name):
"""
Prints a page which redirects the user to querystatus.cgi and writes starting time to file
"""
starttimefile = open('%s%s%s/pdb2pqr_start_time' % (INSTALLDIR, TMPDIR, name), 'w')
starttimefile.write(str(time.time()))
starttimefile.close()
string = ""
string+= "<html>\n"
string+= "\t<head>\n"
string+= "\t\t<meta http-equiv=\"Refresh\" content=\"0; url=%squerystatus.cgi?jobid=%s&calctype=pdb2pqr\">\n" % (WEBSITE, name)
string+= "\t</head>\n"
string+= "</html>\n"
return string
def mainCGI():
"""
Main driver for running PDB2PQR from a web page
"""
print "Content-type: text/html\n"
import cgi
import cgitb
cgitb.enable()
form = cgi.FieldStorage()
ff = form["FF"].value
input = 0
apbs_input = form.has_key("INPUT")
typemap = form.has_key("TYPEMAP")
neutraln = form.has_key("NEUTRALN")
neutralc = form.has_key("NEUTRALC")
if HAVE_PDB2PQR_OPAL=="1":
have_opal = True
# Opal-specific import statments
from AppService_client import AppServiceLocator, getAppMetadataRequest, launchJobRequest, launchJobBlockingRequest, getOutputAsBase64ByNameRequest
from AppService_types import ns0
from ZSI.TC import String
else:
have_opal = False
if have_opal:
options = {}
options["ff"] = ff
fffile = None
namesfile = None
else:
options = {"extensions":{}}
if form.has_key("DEBUMP"):
options["debump"] = 1
else:
options["debump"] = 0
if form.has_key("OPT"):
options["opt"] = 1
else:
options["opt"] = 0
if form.has_key("PROPKA"):
try:
ph = float(form["PH"].value)
if ph < 0.0 or ph > 14.0: raise ValueError
options["ph"] = ph
except ValueError:
text = "The entered pH of %.2f is invalid! " % form["PH"].value
text += "Please choose a pH between 0.0 and 14.0."
#print "Content-type: text/html\n"
print text
sys.exit(2)
if form.has_key("PDBID"):
pdbfile = getPDBFile(form["PDBID"].value)
pdbfilename = form["PDBID"].value
elif form.has_key("PDB"):
pdbfile = StringIO(form["PDB"].value)
pdbfilename = form["PDB"].filename
pdbfilename = re.split(r'[/\\]',pdbfilename)[-1]
if form.has_key("INPUT"):
input = 1
options["apbs"] = 1
if form.has_key("USERFF"):
if have_opal:
ffname = form["USERFF"].filename
ffname = re.split(r'[/\\]',ffname)[-1]
if ffname[-4:] == ".DAT":
ffname = ffname[:-4]
fffile = StringIO(form["USERFF"].value)
namesfile = StringIO(form["USERNAMES"].value)
options["ff"] = ffname
options["userff"] = fffile
options["usernames"] = namesfile
else:
userff = StringIO(form["USERFF"].value)
usernames = StringIO(form["USERNAMES"].value)
options["ff"] = "user-defined"
options["userff"] = userff
options["usernames"] = usernames
if form.has_key("FFOUT"):
if form["FFOUT"].value != "internal":
options["ffout"] = form["FFOUT"].value
if form.has_key("CHAIN"):
options["chain"] = 1
if form.has_key("WHITESPACE"):
options["whitespace"] = 1
if form.has_key("TYPEMAP"):
options["typemap"] = 1
if form.has_key("NEUTRALN"):
options["neutraln"] = 1
if form.has_key("NEUTRALC"):
options["neutralc"] = 1
if form.has_key("LIGAND"):
if have_opal:
ligandfilename=str(form["LIGAND"].filename)
ligandfilename=re.split(r'[/\\]',ligandfilename)[-1]
# for Windows-style newline compatibility
templigandfilename = tempfile.mkstemp()[1]
templigandfile = open(templigandfilename,'w')
templigandfile.write(form["LIGAND"].value)
templigandfile.close()
templigandfile = open(templigandfilename,'rU')
if have_opal:
options["ligand"] = templigandfile.read()
else:
templigandstring = templigandfile.read() # this variable is used again later to write this file to output
options["ligand"] = StringIO(templigandstring)
templigandfile.close()
if not have_opal:
pdbfilestring = pdbfile.read()
pdblist, errlist = readPDB(StringIO(pdbfilestring))
dummydef = Definition()
dummyprot = Protein(pdblist, dummydef)
if len(pdblist) == 0 and len(errlist) == 0:
text = "Unable to find PDB file - Please make sure this is "
text += "a valid PDB file ID!"
#print "Content-type: text/html\n"
print text
sys.exit(2)
elif dummyprot.numAtoms() > MAXATOMS and "opt" in options:
text = "<HTML><HEAD>"
text += "<TITLE>PDB2PQR Error</title>"
text += "<link rel=\"stylesheet\" href=\"%s\" type=\"text/css\">" % STYLESHEET
text += "</HEAD><BODY><H2>PDB2PQR Error</H2><P>"
text += "Due to server limits, we are currently unable to optimize "
text += "proteins of greater than MAXATOMS atoms on the server (PDB2PQR "
text += "found %s atoms in the selected PDB file). If you " % dummyprot.numAtoms()
text += "want to forgo optimization please try the server again.<P>"
text += "Otherwise you may use the standalone version of PDB2PQR that "
text += "is available from the <a href=\"http://pdb2pqr.sourceforge.net\">"
text += "PDB2PQR SourceForge project page</a>."
text += "<script type=\"text/javascript\">"
text += "var gaJsHost = ((\"https:\" == document.location.protocol) ? \"https://ssl.\" : \"http://www.\");"
text += "document.write(unescape(\"%3Cscript src=\'\" + gaJsHost + \"google-analytics.com/ga.js\' type=\'text/javascript\'%3E%3C/script%3E\"));"
text += "</script>"
text += "<script type=\"text/javascript\">"
text += "try {"
text += "var pageTracker = _gat._getTracker(\"UA-11026338-3\");"
for key in options:
text += "pageTracker._trackPageview(\"/main_cgi/has_%s_%s.html\");" % (key, options[key])
text += "pageTracker._trackPageview();"
text += "} catch(err) {}</script>"
text += "</BODY></HTML>"
#print "Content-type: text/html\n"
print text
sys.exit(2)
try:
if have_opal:
ligandFile=None
ffFile=None
namesFile=None
#else:
starttime = time.time()
name = setID(starttime)
os.makedirs('%s%s%s' % (INSTALLDIR, TMPDIR, name))
apbsInputFile = open('%s%s%s/apbs_input' % (INSTALLDIR, TMPDIR, name),'w')
apbsInputFile.write(str(apbs_input))
apbsInputFile.close()
typemapInputFile = open('%s%s%s/typemap' % (INSTALLDIR, TMPDIR, name),'w')
typemapInputFile.write(str(typemap))
typemapInputFile.close()
if have_opal:
myopts=""
for key in options:
if key=="opt":
if options[key]==0:
# user does not want optimization
key="noopt"
else:
# pdb2pqr optimizes by default, don't bother with flag
continue
elif key=="debump":
if options[key]==0:
# user does not want debumping
key="nodebump"
else:
# pdb2pqr debumps by default, so change this flag to --nodebump
continue
elif key=="ph":
val=options[key]
key="with-ph=%s" % val
elif key=="ffout":
val=options[key]
key="ffout=%s" % val
elif key=="ligand":
val=ligandfilename
key="ligand=%s" % val
ligandFile = ns0.InputFileType_Def('inputFile')
ligandFile._name = val
ligandFile._contents = options["ligand"]
elif key=="apbs":
key="apbs-input"
elif key=="chain":
key="chain"
elif key=="whitespace":
key="whitespace"
elif key=="typemap":
key="typemap"
elif key=="ff":
val=options[key]
key="ff=%s" % val
if fffile:
ffFile = ns0.InputFileType_Def('inputFile')
ffFile._name = val + ".DAT"
ffFileString = fffile.read()
ffFile._contents = ffFileString
if namesfile:
namesFile = ns0.InputFileType_Def('inputFile')
namesFile._name = val + ".names"
namesFileString = namesfile.read()
namesFile._contents = namesFileString
if key not in ["userff", "usernames"]:
myopts+="--"+str(key)+" "
myopts+=str(pdbfilename)+" "
if pdbfilename[-4:]==".pdb":
myopts+="%s.pqr" % str(pdbfilename[:-4])
else:
myopts+="%s.pqr" % str(pdbfilename)
appLocator = AppServiceLocator()
appServicePort = appLocator.getAppServicePort(PDB2PQR_OPAL_URL)
# launch job
req = launchJobRequest()
req._argList = myopts
inputFiles = []
pdbOpalFile = ns0.InputFileType_Def('inputFile')
pdbOpalFile._name = pdbfilename
pdbOpalFile._contents = pdbfile.read()
pdbfile.close()
inputFiles.append(pdbOpalFile)
if ligandFile:
inputFiles.append(ligandFile)
if ffFile:
inputFiles.append(ffFile)
if namesFile:
inputFiles.append(namesFile)
req._inputFile=inputFiles
try:
resp=appServicePort.launchJob(req)
except Exception, e:
printHeader("PDB2PQR Job Submission - Error")
print "<BODY>\n<P>"
print "There was an error with your job submission<br>"
print "</P>"
print "<script type=\"text/javascript\">"
print "var gaJsHost = ((\"https:\" == document.location.protocol) ? \"https://ssl.\" : \"http://www.\");"
print "document.write(unescape(\"%3Cscript src=\'\" + gaJsHost + \"google-analytics.com/ga.js\' type=\'text/javascript\'%3E%3C/script%3E\"));"
print "</script>"
print "<script type=\"text/javascript\">"
print "try {"
print "var pageTracker = _gat._getTracker(\"UA-11026338-3\");"
for key in options:
print "pageTracker._trackPageview(\"/main_cgi/has_%s_%s.html\");" % (key, options[key])
print "pageTracker._trackPageview();"
print "} catch(err) {}</script>"
print "</BODY>"
print "</HTML>"
sys.exit(2)
#printHeader("PDB2PQR Job Submission",have_opal,jobid=resp._jobID)
pdb2pqrOpalJobIDFile = open('%s%s%s/pdb2pqr_opal_job_id' % (INSTALLDIR, TMPDIR, name), 'w')
pdb2pqrOpalJobIDFile.write(resp._jobID)
pdb2pqrOpalJobIDFile.close()
print redirector(name)
if options.has_key("userff"):
options["userff"] = ffFileString
if options.has_key("usernames"):
options["usernames"] = namesFileString
# Recording CGI run information for PDB2PQR Opal
pdb2pqrOpalLogFile = open('%s%s%s/pdb2pqr_opal_log' % (INSTALLDIR, TMPDIR, name), 'w')
pdb2pqrOpalLogFile.write(str(options)+'\n'+str(ff)+'\n'+str(os.environ["REMOTE_ADDR"]))
pdb2pqrOpalLogFile.close()
else:
#pqrpath = startServer(name)
statusfile = open('%s%s%s/pdb2pqr_status' % (INSTALLDIR, TMPDIR, name), 'w')
statusfile.write('running')
statusfile.close()
pid = os.fork()
if pid:
print redirector(name)
sys.exit()
else:
currentdir = os.getcwd()
os.chdir("/")
os.setsid()
os.umask(0)
os.chdir(currentdir)
os.close(1) # not sure if these
os.close(2) # two lines are necessary
pqrpath = '%s%s%s/%s.pqr' % (INSTALLDIR, TMPDIR, name, name)
options["outname"] = pqrpath
options["verbose"] = ""
orig_stdout = sys.stdout
orig_stderr = sys.stderr
sys.stdout = open('%s%s%s/pdb2pqr_stdout.txt' % (INSTALLDIR, TMPDIR, name), 'w')
sys.stderr = open('%s%s%s/pdb2pqr_stderr.txt' % (INSTALLDIR, TMPDIR, name), 'w')
header, lines, missedligands = runPDB2PQR(pdblist, ff, options)
sys.stdout.close()
sys.stderr.close()
sys.stdout = orig_stdout
sys.stderr = orig_stderr
endtimefile = open('%s%s%s/pdb2pqr_end_time' % (INSTALLDIR, TMPDIR, name), 'w')
endtimefile.write(str(time.time()))
endtimefile.close()
pqrfile = open(pqrpath, "w")
pqrfile.write(header)
for line in lines:
# Adding whitespaces if --whitespace is in the options
if "whitespace" in options.keys() and options["whitespace"] == 1:
if line[0:4] == 'ATOM':
newline = line[0:16] + ' ' + line[16:38] + ' ' + line[38:46] + ' ' + line[46:]
pqrfile.write("%s\n" % string.strip(newline))
elif line[0:6] == 'HETATM':
newline = line[0:16] + ' ' + line[16:38] + ' ' + line[38:46] + ' ' + line[46:]
pqrfile.write("%s\n" % string.strip(newline))
else:
pqrfile.write("%s\n" % string.strip(line))
pqrfile.close()
if input:
from src import inputgen
from src import psize
method = "mg-auto"
size = psize.Psize()
size.parseInput(pqrpath)
size.runPsize(pqrpath)
async = 0 # No async files here!
myinput = inputgen.Input(pqrpath, size, method, async)
myinput.printInputFiles()
myinput.dumpPickle()
endtime = time.time() - starttime
#createResults(header, input, name, endtime, missedligands)
logRun(options, endtime, len(lines), ff, os.environ["REMOTE_ADDR"])
#printHeader("PDB2PQR Job Submission",have_opal,jobid=name)
if form.has_key("LIGAND"):
outputligandfile = open('%s%s%s/%s.mol2' % (INSTALLDIR,TMPDIR, name, name),'w')
outputligandfile.write(templigandstring)
outputligandfile.close()
outputpdbfile = open('%s%s%s/%s.pdb' % (INSTALLDIR,TMPDIR,name,name),'w')
outputpdbfile.write(pdbfilestring)
outputpdbfile.close()
statusfile = open('%s%s%s/pdb2pqr_status' % (INSTALLDIR, TMPDIR, name), 'w')
statusfile.write('complete\n')
filelist = glob.glob('%s%s%s/%s*' % (INSTALLDIR, TMPDIR, name, name))
for filename in filelist:
statusfile.write(filename+'\n')
statusfile.close()
except StandardError, details:
print details
createError(name, details)
| |
# Copyright (c) 2016, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe, re, frappe.utils
from frappe.desk.notifications import get_notifications
from frappe import _
@frappe.whitelist()
def get_bot_reply(question):
return BotReply().get_reply(question)
class BotParser(object):
'''Base class for bot parser'''
def __init__(self, reply, query):
self.query = query
self.reply = reply
self.tables = reply.tables
self.doctype_names = reply.doctype_names
def has(self, *words):
'''return True if any of the words is present int the query'''
for word in words:
if re.search(r'\b{0}\b'.format(word), self.query):
return True
def startswith(self, *words):
'''return True if the query starts with any of the given words'''
for w in words:
if self.query.startswith(w):
return True
def strip_words(self, query, *words):
'''Remove the given words from the query'''
for word in words:
query = re.sub(r'\b{0}\b'.format(word), '', query)
return query.strip()
def format_list(self, data):
'''Format list as markdown'''
return _('I found these: ') + ', '.join([' [{title}](#Form/{doctype}/{name})'.format(
title = d.title or d.name,
doctype=self.get_doctype(),
name=d.name) for d in data])
def get_doctype(self):
'''returns the doctype name from self.tables'''
return self.doctype_names[self.tables[0]]
class ShowNotificationBot(BotParser):
'''Show open notifications'''
def get_reply(self):
if self.has("whatsup", "what's up", "wassup", "whats up", 'notifications', 'open tasks'):
n = get_notifications()
open_items = sorted(n.get('open_count_doctype').items())
if open_items:
return ("Following items need your attention:\n\n"
+ "\n\n".join(["{0} [{1}](#List/{1})".format(d[1], d[0])
for d in open_items if d[1] > 0]))
else:
return 'Take it easy, nothing urgent needs your attention'
class GetOpenListBot(BotParser):
'''Get list of open items'''
def get_reply(self):
if self.startswith('open', 'show open', 'list open', 'get open'):
if self.tables:
doctype = self.get_doctype()
from frappe.desk.notifications import get_notification_config
filters = get_notification_config().get('for_doctype').get(doctype, None)
if filters:
if isinstance(filters, dict):
data = frappe.get_list(doctype, filters=filters)
else:
data = [{'name':d[0], 'title':d[1]} for d in frappe.get_attr(filters)(as_list=True)]
return ", ".join('[{title}](#Form/{doctype}/{name})'.format(doctype=doctype,
name=d.get('name'), title=d.get('title') or d.get('name')) for d in data)
else:
return _("Can't identify open {0}. Try something else.").format(doctype)
class ListBot(BotParser):
def get_reply(self):
if self.query.endswith(' ' + _('list')) and self.startswith(_('list')):
self.query = _('list') + ' ' + self.query.replace(' ' + _('list'), '')
if self.startswith(_('list'), _('show')):
like = None
if ' ' + _('like') + ' ' in self.query:
self.query, like = self.query.split(' ' + _('like') + ' ')
self.tables = self.reply.identify_tables(self.query.split(None, 1)[1])
if self.tables:
doctype = self.get_doctype()
meta = frappe.get_meta(doctype)
fields = ['name']
if meta.title_field:
fields.append('`{0}` as title'.format(meta.title_field))
filters = {}
if like:
filters={
meta.title_field or 'name': ('like', '%' + like + '%')
}
return self.format_list(frappe.get_list(self.get_doctype(), fields=fields, filters=filters))
class CountBot(BotParser):
def get_reply(self):
if self.startswith('how many'):
self.tables = self.reply.identify_tables(self.query.split(None, 1)[1])
if self.tables:
return str(frappe.db.sql('select count(*) from `tab{0}`'.format(self.get_doctype()))[0][0])
class FindBot(BotParser):
def get_reply(self):
if self.startswith('find', 'search'):
query = self.query.split(None, 1)[1]
if self.has('from'):
text, table = query.split('from')
if self.has('in'):
text, table = query.split('in')
if table:
text = text.strip()
self.tables = self.reply.identify_tables(table.strip())
if self.tables:
filters = {'name': ('like', '%{0}%'.format(text))}
or_filters = None
title_field = frappe.get_meta(self.get_doctype()).title_field
if title_field and title_field!='name':
or_filters = {'title': ('like', '%{0}%'.format(text))}
data = frappe.get_list(self.get_doctype(),
filters=filters, or_filters=or_filters)
if data:
return self.format_list(data)
else:
return _("Could not find {0} in {1}").format(text, self.get_doctype())
else:
self.out = _("Could not identify {0}").format(table)
else:
self.out = _("You can find things by asking 'find orange in customers'").format(table)
class BotReply(object):
'''Build a reply for the bot by calling all parsers'''
def __init__(self):
self.tables = []
def get_reply(self, query):
self.query = query.lower()
self.setup()
self.pre_process()
# basic replies
if self.query.split()[0] in ("hello", "hi"):
return _("Hello {0}").format(frappe.utils.get_fullname())
if self.query == "help":
return help_text.format(frappe.utils.get_fullname())
# build using parsers
replies = []
for parser in frappe.get_hooks('bot_parsers'):
reply = None
try:
reply = frappe.get_attr(parser)(self, query).get_reply()
except frappe.PermissionError:
reply = _("Oops, you are not allowed to know that")
if reply:
replies.append(reply)
if replies:
return '\n\n'.join(replies)
if not reply:
return _("Don't know, ask 'help'")
def setup(self):
self.setup_tables()
self.identify_tables()
def pre_process(self):
if self.query.endswith("?"):
self.query = self.query[:-1]
if self.query in ("todo", "to do"):
self.query = "open todo"
def setup_tables(self):
tables = frappe.get_all("DocType", {"istable": 0})
self.all_tables = [d.name.lower() for d in tables]
self.doctype_names = {d.name.lower():d.name for d in tables}
def identify_tables(self, query=None):
if not query:
query = self.query
self.tables = []
for t in self.all_tables:
if t in query or t[:-1] in query:
self.tables.append(t)
return self.tables
help_text = """Hello {0}, I am a K.I.S.S Bot, not AI, so be kind. I can try answering a few questions like,
- "todo": list my todos
- "show customers": list customers
- "show customers like giant": list customer containing giant
- "locate shirt": find where to find item "shirt"
- "open issues": find open issues, try "open sales orders"
- "how many users": count number of users
- "find asian in sales orders": find sales orders where name or title has "asian"
have fun!
"""
| |
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Thu Apr 11 15:06:29 2013 by generateDS.py version 2.9a.
#
import sys
from mixbox.binding_utils import *
from stix.bindings import lookup_extension, register_extension
import stix.bindings.stix_common as stix_common_binding
import stix.bindings.data_marking as data_marking_binding
XML_NS = "http://stix.mitre.org/ThreatActor-1"
#
# Data representation classes.
#
class ObservedTTPsType(stix_common_binding.GenericRelationshipListType):
subclass = None
superclass = stix_common_binding.GenericRelationshipListType
def __init__(self, scope='exclusive', Observed_TTP=None):
super(ObservedTTPsType, self).__init__(scope=scope)
if Observed_TTP is None:
self.Observed_TTP = []
else:
self.Observed_TTP = Observed_TTP
def factory(*args_, **kwargs_):
if ObservedTTPsType.subclass:
return ObservedTTPsType.subclass(*args_, **kwargs_)
else:
return ObservedTTPsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Observed_TTP(self): return self.Observed_TTP
def set_Observed_TTP(self, Observed_TTP): self.Observed_TTP = Observed_TTP
def add_Observed_TTP(self, value): self.Observed_TTP.append(value)
def insert_Observed_TTP(self, index, value): self.Observed_TTP[index] = value
def hasContent_(self):
if (
self.Observed_TTP or
super(ObservedTTPsType, self).hasContent_()
):
return True
else:
return False
def export(self, lwrite, level, nsmap, namespace_=XML_NS, name_='ObservedTTPsType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(lwrite, level, pretty_print)
lwrite('<%s:%s%s' % (nsmap[namespace_], name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(lwrite, level, already_processed, namespace_, name_='ObservedTTPsType')
if self.hasContent_():
lwrite('>%s' % (eol_, ))
self.exportChildren(lwrite, level + 1, nsmap, XML_NS, name_, pretty_print=pretty_print)
showIndent(lwrite, level, pretty_print)
lwrite('</%s:%s>%s' % (nsmap[namespace_], name_, eol_))
else:
lwrite('/>%s' % (eol_, ))
def exportAttributes(self, lwrite, level, already_processed, namespace_='ta:', name_='ObservedTTPsType'):
super(ObservedTTPsType, self).exportAttributes(lwrite, level, already_processed, namespace_, name_='ObservedTTPsType')
def exportChildren(self, lwrite, level, nsmap, namespace_=XML_NS, name_='ObservedTTPsType', fromsubclass_=False, pretty_print=True):
super(ObservedTTPsType, self).exportChildren(lwrite, level, nsmap, namespace_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Observed_TTP_ in self.Observed_TTP:
Observed_TTP_.export(lwrite, level, nsmap, namespace_, name_='Observed_TTP', pretty_print=pretty_print)
def build(self, node):
self.__sourcenode__ = node
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(ObservedTTPsType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Observed_TTP':
obj_ = stix_common_binding.RelatedTTPType.factory()
obj_.build(child_)
self.Observed_TTP.append(obj_)
super(ObservedTTPsType, self).buildChildren(child_, node, nodeName_, True)
# end class ObservedTTPsType
class AssociatedCampaignsType(stix_common_binding.GenericRelationshipListType):
subclass = None
superclass = stix_common_binding.GenericRelationshipListType
def __init__(self, scope='exclusive', Associated_Campaign=None):
super(AssociatedCampaignsType, self).__init__(scope=scope)
if Associated_Campaign is None:
self.Associated_Campaign = []
else:
self.Associated_Campaign = Associated_Campaign
def factory(*args_, **kwargs_):
if AssociatedCampaignsType.subclass:
return AssociatedCampaignsType.subclass(*args_, **kwargs_)
else:
return AssociatedCampaignsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Associated_Campaign(self): return self.Associated_Campaign
def set_Associated_Campaign(self, Associated_Campaign): self.Associated_Campaign = Associated_Campaign
def add_Associated_Campaign(self, value): self.Associated_Campaign.append(value)
def insert_Associated_Campaign(self, index, value): self.Associated_Campaign[index] = value
def hasContent_(self):
if (
self.Associated_Campaign or
super(AssociatedCampaignsType, self).hasContent_()
):
return True
else:
return False
def export(self, lwrite, level, nsmap, namespace_=XML_NS, name_='AssociatedCampaignsType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(lwrite, level, pretty_print)
lwrite('<%s:%s%s' % (nsmap[namespace_], name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(lwrite, level, already_processed, namespace_, name_='AssociatedCampaignsType')
if self.hasContent_():
lwrite('>%s' % (eol_, ))
self.exportChildren(lwrite, level + 1, nsmap, XML_NS, name_, pretty_print=pretty_print)
showIndent(lwrite, level, pretty_print)
lwrite('</%s:%s>%s' % (nsmap[namespace_], name_, eol_))
else:
lwrite('/>%s' % (eol_, ))
def exportAttributes(self, lwrite, level, already_processed, namespace_='ta:', name_='AssociatedCampaignsType'):
super(AssociatedCampaignsType, self).exportAttributes(lwrite, level, already_processed, namespace_, name_='AssociatedCampaignsType')
def exportChildren(self, lwrite, level, nsmap, namespace_=XML_NS, name_='AssociatedCampaignsType', fromsubclass_=False, pretty_print=True):
super(AssociatedCampaignsType, self).exportChildren(lwrite, level, nsmap, namespace_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Associated_Campaign_ in self.Associated_Campaign:
Associated_Campaign_.export(lwrite, level, nsmap, namespace_, name_='Associated_Campaign', pretty_print=pretty_print)
def build(self, node):
self.__sourcenode__ = node
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(AssociatedCampaignsType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Associated_Campaign':
obj_ = stix_common_binding.RelatedCampaignType.factory()
obj_.build(child_)
self.Associated_Campaign.append(obj_)
super(AssociatedCampaignsType, self).buildChildren(child_, node, nodeName_, True)
# end class AssociatedCampaignsType
class AssociatedActorsType(stix_common_binding.GenericRelationshipListType):
subclass = None
superclass = stix_common_binding.GenericRelationshipListType
def __init__(self, scope='exclusive', Associated_Actor=None):
super(AssociatedActorsType, self).__init__(scope=scope)
if Associated_Actor is None:
self.Associated_Actor = []
else:
self.Associated_Actor = Associated_Actor
def factory(*args_, **kwargs_):
if AssociatedActorsType.subclass:
return AssociatedActorsType.subclass(*args_, **kwargs_)
else:
return AssociatedActorsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Associated_Actor(self): return self.Associated_Actor
def set_Associated_Actor(self, Associated_Actor): self.Associated_Actor = Associated_Actor
def add_Associated_Actor(self, value): self.Associated_Actor.append(value)
def insert_Associated_Actor(self, index, value): self.Associated_Actor[index] = value
def hasContent_(self):
if (
self.Associated_Actor or
super(AssociatedActorsType, self).hasContent_()
):
return True
else:
return False
def export(self, lwrite, level, nsmap, namespace_=XML_NS, name_='AssociatedActorsType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(lwrite, level, pretty_print)
lwrite('<%s:%s%s' % (nsmap[namespace_], name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(lwrite, level, already_processed, namespace_, name_='AssociatedActorsType')
if self.hasContent_():
lwrite('>%s' % (eol_, ))
self.exportChildren(lwrite, level + 1, nsmap, XML_NS, name_, pretty_print=pretty_print)
showIndent(lwrite, level, pretty_print)
lwrite('</%s:%s>%s' % (nsmap[namespace_], name_, eol_))
else:
lwrite('/>%s' % (eol_, ))
def exportAttributes(self, lwrite, level, already_processed, namespace_='ta:', name_='AssociatedActorsType'):
super(AssociatedActorsType, self).exportAttributes(lwrite, level, already_processed, namespace_, name_='AssociatedActorsType')
def exportChildren(self, lwrite, level, nsmap, namespace_=XML_NS, name_='AssociatedActorsType', fromsubclass_=False, pretty_print=True):
super(AssociatedActorsType, self).exportChildren(lwrite, level, nsmap, namespace_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Associated_Actor_ in self.Associated_Actor:
Associated_Actor_.export(lwrite, level, nsmap, namespace_, name_='Associated_Actor', pretty_print=pretty_print)
def build(self, node):
self.__sourcenode__ = node
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(AssociatedActorsType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Associated_Actor':
obj_ = stix_common_binding.RelatedThreatActorType.factory()
obj_.build(child_)
self.Associated_Actor.append(obj_)
super(AssociatedActorsType, self).buildChildren(child_, node, nodeName_, True)
# end class AssociatedActorsType
@register_extension
class ThreatActorType(stix_common_binding.ThreatActorBaseType):
"""Specifies the relevant STIX-ThreatActor schema version for this
content."""
subclass = None
superclass = stix_common_binding.ThreatActorBaseType
xmlns = "http://stix.mitre.org/ThreatActor-1"
xmlns_prefix = "ta"
xml_type = "ThreatActorType"
xsi_type = "%s:%s" % (xmlns_prefix, xml_type)
def __init__(self, idref=None, id=None, timestamp=None, version=None, Title=None, Description=None, Short_Description=None, Identity=None, Type=None, Motivation=None, Sophistication=None, Intended_Effect=None, Planning_And_Operational_Support=None, Observed_TTPs=None, Associated_Campaigns=None, Associated_Actors=None, Handling=None, Confidence=None, Information_Source=None, Related_Packages=None):
super(ThreatActorType, self).__init__(idref=idref, id=id, timestamp=timestamp)
self.version = _cast(None, version)
self.Title = Title
if Description is None:
self.Description = []
else:
self.Description = Description
if Short_Description is None:
self.Short_Description = []
else:
self.Short_Description = Short_Description
self.Identity = Identity
if Type is None:
self.Type = []
else:
self.Type = Type
if Motivation is None:
self.Motivation = []
else:
self.Motivation = Motivation
if Sophistication is None:
self.Sophistication = []
else:
self.Sophistication = Sophistication
if Intended_Effect is None:
self.Intended_Effect = []
else:
self.Intended_Effect = Intended_Effect
if Planning_And_Operational_Support is None:
self.Planning_And_Operational_Support = []
else:
self.Planning_And_Operational_Support = Planning_And_Operational_Support
self.Observed_TTPs = Observed_TTPs
self.Associated_Campaigns = Associated_Campaigns
self.Associated_Actors = Associated_Actors
self.Handling = Handling
self.Confidence = Confidence
self.Information_Source = Information_Source
self.Related_Packages = Related_Packages
def factory(*args_, **kwargs_):
if ThreatActorType.subclass:
return ThreatActorType.subclass(*args_, **kwargs_)
else:
return ThreatActorType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Title(self): return self.Title
def set_Title(self, Title): self.Title = Title
def insert_Description(self, index, value): self.Description[index] = value
def add_Description(self, Description): self.Description.append(Description)
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def insert_Short_Description(self, index, value): self.Short_Description[index] = value
def add_Short_Description(self, Short_Description): self.Short_Description.append(Short_Description)
def get_Short_Description(self): return self.Short_Description
def set_Short_Description(self, Short_Description): self.Short_Description = Short_Description
def get_Identity(self): return self.Identity
def set_Identity(self, Identity): self.Identity = Identity
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def add_Type(self, value): self.Type.append(value)
def insert_Type(self, index, value): self.Type[index] = value
def get_Motivation(self): return self.Motivation
def set_Motivation(self, Motivation): self.Motivation = Motivation
def add_Motivation(self, value): self.Motivation.append(value)
def insert_Motivation(self, index, value): self.Motivation[index] = value
def get_Sophistication(self): return self.Sophistication
def set_Sophistication(self, Sophistication): self.Sophistication = Sophistication
def add_Sophistication(self, value): self.Sophistication.append(value)
def insert_Sophistication(self, index, value): self.Sophistication[index] = value
def get_Intended_Effect(self): return self.Intended_Effect
def set_Intended_Effect(self, Intended_Effect): self.Intended_Effect = Intended_Effect
def add_Intended_Effect(self, value): self.Intended_Effect.append(value)
def insert_Intended_Effect(self, index, value): self.Intended_Effect[index] = value
def get_Planning_And_Operational_Support(self): return self.Planning_And_Operational_Support
def set_Planning_And_Operational_Support(self, Planning_And_Operational_Support): self.Planning_And_Operational_Support = Planning_And_Operational_Support
def add_Planning_And_Operational_Support(self, value): self.Planning_And_Operational_Support.append(value)
def insert_Planning_And_Operational_Support(self, index, value): self.Planning_And_Operational_Support[index] = value
def get_Observed_TTPs(self): return self.Observed_TTPs
def set_Observed_TTPs(self, Observed_TTPs): self.Observed_TTPs = Observed_TTPs
def get_Associated_Campaigns(self): return self.Associated_Campaigns
def set_Associated_Campaigns(self, Associated_Campaigns): self.Associated_Campaigns = Associated_Campaigns
def get_Associated_Actors(self): return self.Associated_Actors
def set_Associated_Actors(self, Associated_Actors): self.Associated_Actors = Associated_Actors
def get_Handling(self): return self.Handling
def set_Handling(self, Handling): self.Handling = Handling
def get_Confidence(self): return self.Confidence
def set_Confidence(self, Confidence): self.Confidence = Confidence
def get_Information_Source(self): return self.Information_Source
def set_Information_Source(self, Information_Source): self.Information_Source = Information_Source
def get_Related_Packages(self): return self.Related_Packages
def set_Related_Packages(self, Related_Packages): self.Related_Packages = Related_Packages
def get_version(self): return self.version
def set_version(self, version): self.version = version
def hasContent_(self):
if (
self.Title is not None or
self.Description is not None or
self.Short_Description is not None or
self.Identity is not None or
self.Type or
self.Motivation or
self.Sophistication or
self.Intended_Effect or
self.Planning_And_Operational_Support or
self.Observed_TTPs is not None or
self.Associated_Campaigns is not None or
self.Associated_Actors is not None or
self.Handling is not None or
self.Confidence is not None or
self.Information_Source is not None or
self.Related_Packages is not None or
super(ThreatActorType, self).hasContent_()
):
return True
else:
return False
def export(self, lwrite, level, nsmap, namespace_=XML_NS, name_='Threat_Actor', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(lwrite, level, pretty_print)
lwrite('<%s:%s%s' % (nsmap[namespace_], name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(lwrite, level, already_processed, namespace_, name_='Threat_Actor')
if self.hasContent_():
lwrite('>%s' % (eol_, ))
self.exportChildren(lwrite, level + 1, nsmap, XML_NS, name_, pretty_print=pretty_print)
showIndent(lwrite, level, pretty_print)
lwrite('</%s:%s>%s' % (nsmap[namespace_], name_, eol_))
else:
lwrite('/>%s' % (eol_, ))
def exportAttributes(self, lwrite, level, already_processed, namespace_='ta:', name_='Threat_Actor'):
super(ThreatActorType, self).exportAttributes(lwrite, level, already_processed, namespace_, name_='Threat_Actor')
# if 'xmlns' not in already_processed:
# already_processed.add('xmlns')
# xmlns = " xmlns:%s='%s'" % (self.xmlns_prefix, self.xmlns)
# lwrite(xmlns)
if 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
xsi_type = " xsi:type='%s:%s'" % (self.xmlns_prefix, self.xml_type)
lwrite(xsi_type)
if self.version is not None and 'version' not in already_processed:
already_processed.add('version')
lwrite(' version=%s' % (quote_attrib(self.version), ))
def exportChildren(self, lwrite, level, nsmap, namespace_=XML_NS, name_='ThreatActorType', fromsubclass_=False, pretty_print=True):
super(ThreatActorType, self).exportChildren(lwrite, level, nsmap, namespace_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.Title is not None:
showIndent(lwrite, level, pretty_print)
lwrite('<%s:Title>%s</%s:Title>%s' % (nsmap[namespace_], quote_xml(self.Title), nsmap[namespace_], eol_))
for Description in self.Description:
Description.export(lwrite, level, nsmap, namespace_, name_='Description', pretty_print=pretty_print)
for Short_Description in self.Short_Description:
Short_Description.export(lwrite, level, nsmap, namespace_, name_='Short_Description', pretty_print=pretty_print)
if self.Identity is not None:
self.Identity.export(lwrite, level, nsmap, namespace_, name_='Identity', pretty_print=pretty_print)
for Type_ in self.Type:
Type_.export(lwrite, level, nsmap, namespace_, name_='Type', pretty_print=pretty_print)
for Motivation_ in self.Motivation:
Motivation_.export(lwrite, level, nsmap, namespace_, name_='Motivation', pretty_print=pretty_print)
for Sophistication_ in self.Sophistication:
Sophistication_.export(lwrite, level, nsmap, namespace_, name_='Sophistication', pretty_print=pretty_print)
for Intended_Effect_ in self.Intended_Effect:
Intended_Effect_.export(lwrite, level, nsmap, namespace_, name_='Intended_Effect', pretty_print=pretty_print)
for Planning_And_Operational_Support_ in self.Planning_And_Operational_Support:
Planning_And_Operational_Support_.export(lwrite, level, nsmap, namespace_, name_='Planning_And_Operational_Support', pretty_print=pretty_print)
if self.Observed_TTPs is not None:
self.Observed_TTPs.export(lwrite, level, nsmap, namespace_, name_='Observed_TTPs', pretty_print=pretty_print)
if self.Associated_Campaigns is not None:
self.Associated_Campaigns.export(lwrite, level, nsmap, namespace_, name_='Associated_Campaigns', pretty_print=pretty_print)
if self.Associated_Actors is not None:
self.Associated_Actors.export(lwrite, level, nsmap, namespace_, name_='Associated_Actors', pretty_print=pretty_print)
if self.Handling is not None:
self.Handling.export(lwrite, level, nsmap, namespace_, name_='Handling', pretty_print=pretty_print)
if self.Confidence is not None:
self.Confidence.export(lwrite, level, nsmap, namespace_, name_='Confidence', pretty_print=pretty_print)
if self.Information_Source is not None:
self.Information_Source.export(lwrite, level, nsmap, namespace_, name_='Information_Source', pretty_print=pretty_print)
if self.Related_Packages is not None:
self.Related_Packages.export(lwrite, level, nsmap, namespace_, name_='Related_Packages', pretty_print=pretty_print)
def build(self, node):
self.__sourcenode__ = node
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('version', node)
if value is not None and 'version' not in already_processed:
already_processed.add('version')
self.version = value
super(ThreatActorType, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Title':
Title_ = child_.text
Title_ = self.gds_validate_string(Title_, node, 'Title')
self.Title = Title_
elif nodeName_ == 'Description':
obj_ = stix_common_binding.StructuredTextType.factory()
obj_.build(child_)
self.add_Description(obj_)
elif nodeName_ == 'Short_Description':
obj_ = stix_common_binding.StructuredTextType.factory()
obj_.build(child_)
self.add_Short_Description(obj_)
elif nodeName_ == 'Identity':
from .extensions.identity import ciq_identity_3_0
obj_ = lookup_extension(child_, stix_common_binding.IdentityType).factory()
obj_.build(child_)
self.set_Identity(obj_)
elif nodeName_ == 'Type':
obj_ = stix_common_binding.StatementType.factory()
obj_.build(child_)
self.Type.append(obj_)
elif nodeName_ == 'Motivation':
obj_ = stix_common_binding.StatementType.factory()
obj_.build(child_)
self.Motivation.append(obj_)
elif nodeName_ == 'Sophistication':
obj_ = stix_common_binding.StatementType.factory()
obj_.build(child_)
self.Sophistication.append(obj_)
elif nodeName_ == 'Intended_Effect':
obj_ = stix_common_binding.StatementType.factory()
obj_.build(child_)
self.Intended_Effect.append(obj_)
elif nodeName_ == 'Planning_And_Operational_Support':
obj_ = stix_common_binding.StatementType.factory()
obj_.build(child_)
self.Planning_And_Operational_Support.append(obj_)
elif nodeName_ == 'Observed_TTPs':
obj_ = ObservedTTPsType.factory()
obj_.build(child_)
self.set_Observed_TTPs(obj_)
elif nodeName_ == 'Associated_Campaigns':
obj_ = AssociatedCampaignsType.factory()
obj_.build(child_)
self.set_Associated_Campaigns(obj_)
elif nodeName_ == 'Associated_Actors':
obj_ = AssociatedActorsType.factory()
obj_.build(child_)
self.set_Associated_Actors(obj_)
elif nodeName_ == 'Handling':
obj_ = data_marking_binding.MarkingType.factory()
obj_.build(child_)
self.set_Handling(obj_)
elif nodeName_ == 'Confidence':
obj_ = stix_common_binding.ConfidenceType.factory()
obj_.build(child_)
self.set_Confidence(obj_)
elif nodeName_ == 'Information_Source':
obj_ = stix_common_binding.InformationSourceType.factory()
obj_.build(child_)
self.set_Information_Source(obj_)
elif nodeName_ == 'Related_Packages':
obj_ = stix_common_binding.RelatedPackageRefsType.factory()
obj_.build(child_)
self.set_Related_Packages(obj_)
super(ThreatActorType, self).buildChildren(child_, node, nodeName_, True)
# end class ThreatActorType
GDSClassesMapping = {}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print(USAGE_TEXT)
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Threat_Actor'
rootClass = ThreatActorType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
# sys.stdout.write('<?xml version="1.0" ?>\n')
# rootObj.export(sys.stdout, 0, name_=rootTag,
# namespacedef_='',
# pretty_print=True)
return rootObj
def parseEtree(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Threat_Actor'
rootClass = ThreatActorType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
rootElement = rootObj.to_etree(None, name_=rootTag)
content = etree_.tostring(rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement
def parseString(inString):
from mixbox.vendor.six import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Threat_Actor'
rootClass = ThreatActorType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
# sys.stdout.write('<?xml version="1.0" ?>\n')
# rootObj.export(sys.stdout, 0, name_="Threat_Actor",
# namespacedef_='')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"ObservedTTPsType",
"AssociatedCampaignsType",
"AssociatedActorsType",
"ThreatActorType"
]
| |
import importlib
import json
import time
import traceback
import modular
import modules.gzlogging
import modules.eval
# import modules.repeat
import modules.mapper
importlib.reload(modular)
importlib.reload(modules.gzlogging)
importlib.reload(modules.eval)
# importlib.reload(modules.repeat)
importlib.reload(modules.mapper)
honeToType = {
'heal light': 'cleric',
'harm light': 'cleric',
'armor': 'cleric',
'bless': 'cleric',
'rain brimstone': 'cleric',
'clot': 'cleric',
'create food': 'cleric',
'create water': 'cleric',
'cure poison': 'cleric',
'salve': 'cleric',
'heal serious': 'cleric',
'sterilize': 'cleric',
'remove curse': 'cleric',
'cure disease': 'cleric',
'refresh': 'cleric',
'heal critical': 'cleric',
'harm serious': 'cleric',
'cure blindness': 'cleric',
'cleric repair': 'cleric',
'expel': 'cleric',
'flamestrike': 'cleric',
'curse': 'cleric',
'disease': 'cleric',
'harm critical': 'cleric',
'numb': 'cleric',
'poison': 'cleric',
'infect': 'cleric',
'heal': 'cleric',
'summon': 'cleric',
'harm': 'cleric',
'plague of locusts': 'cleric',
'word of recall': 'cleric',
'blindness': 'cleric',
'paralyze limb': 'cleric',
'knit bone': 'cleric',
'penance': 'theolog',
'attune': 'theolog',
'blunt proficiency': 'combat',
'slash proficiency': 'combat',
'pierce proficiency': 'combat',
'barehand proficiency': 'combat',
'ranged proficiency': 'combat',
'sharpen': 'combat',
'smooth': 'combat',
'tactics': 'advent',
'bandage': 'advent',
'ride': 'advent',
'dissect': 'advent',
'butcher': 'advent',
'swim': 'advent',
'know animal': 'advent',
'defense': 'advent',
'know people': 'advent',
'lumberjack': 'advent',
'fishing': 'advent',
'alcoholism': 'advent',
'offense': 'advent',
'read magic': 'advent',
'climbing': 'advent',
'know veggie': 'advent',
'sign': 'advent',
'mend': 'advent',
'encamp': 'advent',
'know reptile': 'advent',
'know giantkin': 'advent',
'whittle': 'advent',
'evaluate': 'advent',
'know other': 'advent',
'know undead': 'advent',
'gutter cant': 'advent',
'gnoll jargon': 'advent',
'troglodyte pidgin': 'advent',
'know demon': 'advent',
'devotion': 'faith',
}
def honed(mud, groups):
skill = groups[0]
if 'honing' in mud.state:
mud.log("Honed {} in {} tries".format(skill, mud.state['honing'][1]))
del mud.state['honing']
if skill in honeToType:
honeType = 'prac ' + honeToType[skill]
mud.send(honeType)
if 'hones' not in mud.state:
mud.state['hones'] = {}
mud.state['hones'][skill] = time.time()
if 'hone_on_success' in mud.state:
mud.state['hone_on_success'](skill)
mud.timers["hone_again_notification_for_" + skill] = mud.mkdelay(301, lambda m: mud.log("You can now hone " + skill))
def showHones(mud, _):
found = False
if 'hones' in mud.state:
remove = set()
now = time.time()
for skill, honetime in mud.state['hones'].items():
if now - honetime > 300:
remove.add(skill)
else:
found = True
mud.show("{}: {}s remaining\n".format(skill, 300 - int(now - honetime)))
for skill in remove:
del mud.state['hones'][skill]
if not mud.state['hones']:
del mud.state['hones']
if not found:
mud.show("No skills honed recently")
def setSkillLevel(mud, groups):
if 'skillLevels' not in mud.state:
mud.state['skillLevels'] = {}
skill = groups[0]
learned = groups[1]
potential = groups[2] if len(groups) >= 3 else 'maxed'
mud.log('scraped {} at {}/{}'.format(skill, learned, potential))
mud.state['skillLevels'][skill] = (learned, potential)
ALIASES={
'sc': 'score',
'#hones': showHones,
}
TRIGGERS={
r'^You are thirsty\.$': 'drink waterskin',
r'^\*\*\* PRESS RETURN:': '',
r'^You feel your skills honing in regards to (.+)\.': honed,
r'^You feel Mezan, the father favoring you more in respects to (.+)\.': honed,
r'^(.+?) +Current: \((.+)\) +Potential: \((.+)\)': setSkillLevel,
r'^(.+?) +Current: \((.+)\) *': setSkillLevel,
}
class Sneezy(modular.ModularClient):
def __init__(self, mud, name):
self.name = name
self.logfname = '{}.log.gz'.format(name)
self.mapfname = 'sneezy.map'
self.modules = {}
mods = {
'eval': (modules.eval.Eval, []),
# 'repeat': (modules.repeat.Repeat, []),
'gzlogging': (modules.gzlogging.GzLogging, [self.logfname]),
'mapper': (modules.mapper.Mapper, [True, self.mapfname, True]),
}
for modname, module in mods.items():
try:
constructor, args = module
args = [mud] + args
self.modules[modname] = constructor(*args)
except Exception:
traceback.print_exc()
super().__init__(mud)
self.aliases.update(ALIASES)
self.triggers.update(TRIGGERS)
with open('passwords_sneezy.json', 'rb') as pws:
self.triggers.update(json.load(pws))
self.triggers["Type 'C' to connect with an existing character, or <enter> to see account menu."] = 'c\n' + name
self.aliases['#autohone ([^,]+), (.+)'] = lambda mud, groups: self.startAutoHone(groups[0], groups[1])
self.aliases['#killify'] = self.killify
def killify(self, mud, groups):
maxLevelsAboveMine = 0
myLevel = 2
mobs = self.gmcp['room']['mobs']
lowLevelMobs = list(filter(lambda x: x['level'] + maxLevelsAboveMine <= myLevel, mobs))
if not lowLevelMobs:
self.log('No candidate mobs')
return
mob = sorted(lowLevelMobs, key=lambda x: x['level'])[-1]
mud.send('k {}'.format(mob['name']))
def startAutoHone(self, skill, cmd):
self.log("Autohoning {} as {}".format(skill, cmd))
self.timers['autohone_' + cmd] = self.mktimernow(60*5 + 1, lambda mud: self.honeTimer(skill, cmd))
def honeTimer(self, skill, cmd):
def onHoneSuccess(skillHoned):
if skill == skillHoned:
# TODO: check for maxed skills
if skill in self.state['skillLevels'] and self.state['skillLevels'][skill][0] >= 99:
self.log("Removing " + skill + " from autohone")
del self.timers['autohone_' + cmd]
else:
self.setTimerRemaining('autohone_' + cmd, 301)
# multi-hone timers need work
self.state['hone_on_success'] = onHoneSuccess
self.state['honing'] = (cmd, 1)
def getHostPort(self):
return 'sneezymud.org', 7900
def getClass():
return Sneezy
| |
import os,platform,json,subprocess
class Configuration():
"""
Create a Configuration instance, ready to use with or without authentication
:param username: If authentication is required, the username to use
:type username: str
:param password: If authentication is required, the password to use
:type password: str
**No authentication example**
>>> import jbclipy
>>> conf = jbclipy.Configuration()
>>> conf.print_execution()
jboss-cli.sh -c --commands=batch,<commands>,run-batch
**Authentication example**
>>> import jbclipy
>>> conf = jbclipy.Configuration("username","password")
>>> conf.print_execution()
jboss-cli.sh -c --user=username --password=password --commands=batch,<commands>,run-batch
.. Note::
On Windows, jbclipy will automatically be configured to use `jboss-cli.bat`
|
"""
#Constructor
def __init__(self,username=None,password=None):
if platform.system() == 'Windows':
self.connect = [os.environ["JBOSS_HOME"] + '/bin/jboss-cli.bat', '-c']
else:
self.connect = [os.environ["JBOSS_HOME"] + '/bin/jboss-cli.sh', '-c']
if username and password:
self.connect.append('--user=%s' % username)
self.connect.append('--password=%s' % password)
self.commands = []
def _dict2params(self,dictionary):
"""
Private method to conver dictionaries to parameter strings
|
"""
s = ''
for key in dictionary:
if dictionary[key] == None:
pass
elif isinstance(dictionary[key],dict):
s = s + ',' + key + '=%s' + json.dumps(dictionary[key],separators=(',','=>'))
elif isinstance(dictionary[key],list):
if len(dictionary[key]) > 0:
s = s + ',' + key.replace('_','-') + '=["%s"]' % '","'.join(item for item in dictionary[key])
else:
pass
else:
s = s + ',' + key + '="%s"' % dictionary[key]
return s
# User Actions
def execute(self):
"""
Call to execute commands batch
|
"""
if platform.system() == 'Windows':
name = r'C:\WINDOWS\TEMP\execute.cli'
else:
name = r'/tmp/execute.cli'
fh = open(name,'w')
fh.write('batch\n%s\nrun-batch' % '\n'.join(self.commands))
fh.close()
subprocess.call(self.connect + ['--file='+name])
os.remove(name)
self.reset()
def reset(self):
"""
Removes all commands in the batch
|
"""
self.commands = []
def print_execution(self):
"""
Prints the execution string
|
"""
print(' '.join(self.connect + ['--commands=batch,%s,run-batch' % ','.join(self.commands)]))
#Build Methods
def custom(self,cmd):
"""
Allows a user to add a custom command to the command list
:param cmd: The command to add for execution
:type cmd: str
|
"""
self.commands.append(cmd)
def _add_resource(self,base,params):
"""
Private helper method to build command strings
|
"""
if isinstance(params,dict):
self.commands.append((base + self._dict2params(params) + ')').replace('(,','('))
else:
raise TypeError('Input [params] type should be a dictionary')
def remove_subsystem(self,subsystem):
"""
Removes a subsystem from JBoss
:param subsystem: The name of the subsystem to remove
:type subsystem: str
Example of removing the modcluster subsystem::
conf = jbclipy.Configuration()
conf.remove_subsystem('modcluster')
conf.execute()
.. warning::
You should call :func:`remove_extension` in addition to removing the subsystem
|
"""
self.commands.append('/subsystem=%s:remove()' % subsystem)
def add_extension(self,extension):
"""
Adds an extension to JBoss
:param extension: The name of the extension to add
:type extension: str
Example of adding the modcluster extension::
conf = jbclipy.Configuration()
conf.add_extension('org.jboss.as.modcluster')
conf.execute()
.. note::
You should make sure that a subsystem is present before calling :func:`add_extension`
|
"""
self.commands.append('/extension=%s:add()' % extension)
def remove_extension(self,extension):
"""
Removes an extension from JBoss
:param extension: The name of the extension to remove
:type extension: str
Example of removing the modcluster extension::
conf = jbclipy.Configuration()
conf.remove_extension('modcluster')
conf.execute()
.. warning::
You should call :func:`remove_subsystem` before calling :func:`remove_extension`
|
"""
self.commands.append('/extension=%s:remove()' % extension)
def add_socket_binding(self):
raise NotImplementedError('This method will be available at a future date')
def remove_socket_binding(self,binding):
"""
Removes a socket binding
:param binding: Name of the socket-binding to remove
:type bindind: str
.. warning::
You should only remove a socket binding that is not being referenced in any subsystems
|
"""
self.commands.append('/socket-binding-group=standard-sockets/socket-binding=%s:remove()' % binding)
def add_connector(self,name,protocol,scheme,socket_binding,*args,**kwargs):
"""
Add a connector
:param name: The name of the connector
:type name: str
:param protocol: The protocol to use
:type protocol: str
:param scheme: The scheme to direct traffic under
:type scheme: str
:param socket_binding: The name of the socket binding to use for traffic
:type socket_binding: str
:param args: An optional dictionary with default overrides
:type args: dict
:param kwargs: Instead of a dict, supply keyword args for optional parameters
:type kwargs: various
:raises: TypeError
|
**Optional Parameters:**
+----------------+--------+-------------------------+-------------------------+
| Parameter Name | Type | Default | Example |
+================+========+=========================+=========================+
| enable-lookups | str | None | 'true' |
+----------------+--------+-------------------------+-------------------------+
| max-connections| str | None | '512' |
+----------------+--------+-------------------------+-------------------------+
| proxy-port | str | None | '1234' |
+----------------+--------+-------------------------+-------------------------+
| secure | str | None | 'true' |
+----------------+--------+-------------------------+-------------------------+
| enabled | str | None | 'true' |
+----------------+--------+-------------------------+-------------------------+
| redirect-port | str | None | '1234' |
+----------------+--------+-------------------------+-------------------------+
| executor | str | None | 'myExecutor' |
+----------------+--------+-------------------------+-------------------------+
| max-post-size | str | None | '2097152' |
+----------------+--------+-------------------------+-------------------------+
| max-save-post- | | | |
| size | str | None | '4096' |
+----------------+--------+-------------------------+-------------------------+
| proxy-name | str | None | 'myProxy' |
+----------------+--------+-------------------------+-------------------------+
| virtual-server | list | None | ['server1','server2'] |
+----------------+--------+-------------------------+-------------------------+
|
"""
params = {'protocol':protocol,'scheme':scheme,'socket-binding':socket_binding}
if len(args):
if isinstance(args[0],dict):
params.update(args[0])
else:
raise TypeError('argument must be a dictionary')
else:
params.update(kwargs)
self._add_resource('/subsystem=web/connector=%s:add(name="%s"' % (name,name),params)
def remove_connector(self,name):
"""
Removes a connector
:param name: Name of the connector to remove
:type name: str
|
"""
self.commands.append('/subsystem=web/connector=%s:remove()' % name)
def add_console_handler(self,name,*args,**kwargs):
"""
Add a console handler
:param name: A name for the console handler
:type name: str
:param args: An optional dictionary with default overrides
:type args: dict
:param kwargs: Instead of a dict, supply keyword args for optional parameters
:type kwargs: various
:raises: TypeError
|
**Optional Parameters:**
+----------------+--------+-------------------------+-------------------------+
| Parameter Name | Type | Default | Example |
+================+========+=========================+=========================+
| level | str | 'INFO' | 'DEBUG' |
+----------------+--------+-------------------------+-------------------------+
| formatter | str | '%d{HH:mm:ss,SSS} | '%d{HH:mm:ss,SSS} |
| | | %-5p [%c] (%t) %s%E%n' | %-5p [%c] (%t) %s%E%n' |
+----------------+--------+-------------------------+-------------------------+
| autoflush | str | None | 'true' |
+----------------+--------+-------------------------+-------------------------+
| encoding | str | None | 'UTF-8' |
+----------------+--------+-------------------------+-------------------------+
| filter | dict | None | See :func:`make_filter` |
+----------------+--------+-------------------------+-------------------------+
| target | str | 'System.out' | 'Systen.err' |
+----------------+--------+-------------------------+-------------------------+
|
"""
params = {
'level':'INFO',
'formatter':'%d{HH:mm:ss,SSS} %-5p [%c] (%t) %s%E%n',
'target':'System.out'
}
if len(args):
if isinstance(args[0],dict):
params.update(args[0])
else:
raise TypeError('argument must be a dictionary')
else:
params.update(kwargs)
self._add_resource('/subsystem=logging/console-handler=%s:add(name="%s"' % (name,name),params)
def remove_console_handler(self,name):
"""
Removes a console handler
:param name: Name of the console handler to remove
:type name: str
.. warning::
You must make sure the handler is not part of a logger or root logger to remove
|
"""
self.commands.append('/subsystem=logging/console-handler=%s:remove()' % name)
def add_periodic_rotating_file_handler(self,name,params):
self._add_resource('/subsystem=logging/periodic_rotating_file_handler=%s:add(name="%s"' % (name,name),params)
def remove_periodic_rotating_file_handler(self,name):
"""
Removes a periodic rotating file handler
:param name: Name of the periodic rotating file handler to remove
:type name: str
.. warning::
You must make sure the handler is not part of a logger or root logger to remove
|
"""
self.commands.append('/subsystem=logging/periodic_rotating_file_handler=%s:remove()' % name)
def add_size_rotating_file_handler(self,name,params):
self._add_resource('/subsystem=logging/size_rotating_file_handler=%s:add(name="%s",',params)
def remove_size_rotating_file_handler(self,name):
"""
Removes a size rotating file handler
:param name: Name of the size rotating file handler to remove
:type name: str
.. warning::
You must make sure the handler is not part of a logger or root logger to remove
|
"""
self.commands.append('/subsystem=logging/size_rotating_file_handler=%s:remove()' % name)
def add_logger(self,name,params):
self._add_resource('/subsystem=logging/logger=%s:add(' % name,params)
def add_handler_to_root_logger(self,name):
self.commands.append('/subsystem=logging/root-logger=ROOT:root-logger-assign-handler(name="%s")' % name)
def remove_handler_from_root_logger(self,name):
"""
Removes a handler from the root logger
:param name: Name of the handler reference to remove
:type name: str,bool
|
"""
self.commands.append('/subsystem=logging/root-logger=ROOT:root-logger-unassign-handler(name="%s")' % name)
def add_jdbc_driver(self,name,params):
self._add_resource('/subsystem=datasources/jdbc-driver=%s:add(driver-name="%s"' % (name,name),params)
def remove_jdbc_driver(self,name):
"""
Removes a jdbc driver
:param name: Name of the driver to remove
:type name: str
.. warning::
You must remove any datasources using the driver before the driver can be removed.
See :func:`remove_datasource` and :func:`remove_xa_datasource`.
|
"""
self.commands.append('/subsystem=datasources/jdbc-driver=%s:remove()' % name)
def add_datasource(self,name,params):
self._add_resource('/subsystem=datasources/data-source=%s:add(' % (name),params)
def remove_datasource(self,name):
"""
Removes a datasource
:param name: Name of the datasource to remove
:type name: str
|
"""
self.commands.append('/subsystem=datasources/data-source=%s:remove()' % name)
def enable_datasource(self,name):
"""
Enables a datasource for application use
:param name: The name of the datasource to enable
:type name: str
.. Note::
It is a good idea to run this command after :func:`add_datasource`
|
"""
self.commands.append('/subsystem=datasources/data-source=%s:enable()' % name)
def disable_datasource(self,name):
"""
Disables a datasource from application use
:param name: The name of the datasource to disable
:type name: str
.. Warning::
Applications **cannot** use a disabled datasource
|
"""
self.commands.append('/subsystem=datasources/data-source=%s:disable()' % name)
def test_datasource(self,name):
"""
Tests a datasource's connection
:param name: The name of the datasource to test
:type name: str
|
"""
self.commands.append('/subsystem=datasources/data-source=%s:test-connection-in-pool' % name)
def add_xa_datasource(self,name,params):
self._add_resource('/subsystem=datasources/xa-data-source=%s:add(' % name, params)
def remove_xa_datasource(self,name):
"""
Removes an xa-datasource
:param name: Name of the xa-datasource to remove
:type name: str
|
"""
self.commands.append('/subsystem=datasources/xa-data-source=%s:remove()' % name)
def enable_xa_datasource(self,name):
"""
Enables an xa-datasource for application use
:param name: The name of the xa-datasource to enable
:type name: str
.. Note::
It is a good idea to run this command after :func:`add_xa_datasource`
|
"""
self.commands.append('/subsystem=datasources/xa-data-source=%s:enable()' % name)
def disable_xa_datasource(self,name):
"""
Disables an xa-datasource from application use
:param name: The name of the xa-datasource to disable
:type name: str
.. Warning::
Applications **cannot** use a disabled xa-datasource
|
"""
self.commands.append('/subsystem=datasources/xa-data-source=%s:disable()' % name)
def test_xa_datasource(self,name):
"""
Tests an xa-datasource's connection
:param name: The name of the xa-datasource to test
:type name: str
|
"""
self.commands.append('/subsystem=datasources/xa-data-source=%s:test-connection-in-pool' % name)
#TODO: Convert to dictionary input
def setup_vault(self,directory,url,password,alias,salt,iteration):
self.commands.append('/core-service=vault:add(vault-options=[KEYSTORE_URL=%s,KEYSTORE_PASSWORD=%s,KEYSTORE_ALIAS=%s,SALT=%s,ITERATION_COUNT=%s,ENC_FILE_DIR=%s])' % (url,password,alias,salt,iteration,directory))
def take_snapshot(self):
"""
Takes a snapshot of the current JBoss profile
.. Note::
This is highly recommended to do before adding/removing any resources
|
"""
self.commands.append(':take-snapshot')
def delete_snapshot(self,name):
"""
Deletes a stored snapshot
:param name: Name of the snapshot to delete
:type name: str
|
"""
self.commands.append(':delete-snapshot(name=%s)' % name)
#TODO: Convert to dictionary input
def add_jms_queue(self,name,entries,selector=None,durable=None):
s = '/subsystem=messaging/hornetq-server=default/jms-queue=%s:add(' % name
if entries:
s = s + 'entries=["' + '","'.join(entries) + '"]'
if selector:
s = s + ',selector=%s' % selector
if durable:
s = s + ',durable=%s' % durable
self.commands.append(s + ')')
def remove_jms_queue(self,name):
"""
Removes a JMS Queue
:param name: Name of the queue to remove
:type name: str
|
"""
self.commands.append('/subsystem=messaging/hornetq-server=default/jms-queue=%s:remove()' % name)
#TODO: Convert to dictionary input
def add_jms_topic(self,name,entries):
s = '/subsystem=messaging/hornetq-server=default/jms-topic=%s:add(' % name
if entries:
s = s + 'entries=["' + '","'.join(entries) + '"]'
self.commands.append(s + ')')
def remove_jms_topic(self,name):
"""
Removes a JMS Topic
:param name: Name of the topic to remove
:type name: str
|
"""
self.commands.append('/subsystem=messaging/hornetq-server=default/jms-topic=%s:remove()' % name)
"""Bulk Methods"""
def remove_jgroups(self):
"""
Removes the jgroups subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('jgroups')
conf.remove_extension('org.jboss.as.clustering.jgroups')
conf.remove_socket_binding('jgroups-mping')
conf.remove_socket_binding('jgroups-tcp')
conf.remove_socket_binding('jgroups-tcp-fd')
conf.remove_socket_binding('jgroups-udp')
conf.remove_socket_binding('jgroups-udp-fd')
|
"""
self.remove_subsystem('jgroups')
self.remove_extension('org.jboss.as.clustering.jgroups')
self.remove_socket_binding('jgroups-mping')
self.remove_socket_binding('jgroups-tcp')
self.remove_socket_binding('jgroups-tcp-fd')
self.remove_socket_binding('jgroups-udp')
self.remove_socket_binding('jgroups-udp-fd')
def remove_modcluster(self):
"""
Removes the modcluster subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('modcluster')
conf.remove_extension('org.jboss.as.modcluster')
conf.remove_socket_binding('modcluster')
|
"""
self.remove_subsystem('modcluster')
self.remove_extension('org.jboss.as.modcluster')
self.remove_socket_binding('modcluster')
def remove_clustering(self):
"""
Removes all of Clustering (jgroups + modcluster)
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_jgroups()
conf.remove_modcluster()
|
"""
self.remove_jgroups()
self.remove_modcluster()
def add_ajp_connector(self,https=False):
"""
Adds the AJP Connector
:param https: Use https if True, http if false
:type https: bool
Equivalent to::
conf = jbclipy.Configuration()
conf.add_connector('ajp','AJP/1.3','http','ajp')
# http would be https if https field is true
|
"""
self.add_connector('ajp','AJP/1.3','https' if https else 'http','ajp')
def remove_messaging(self):
"""
Removes the messaging subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('messaging')
conf.remove_extension('org.jboss.as.messaging')
conf.remove_socket_binding('messaging')
conf.remove_socket_binding('messaging-group')
conf.remove_socket_binding('messaging-throughput')
|
"""
self.remove_subsystem('messaging')
self.remove_extension('org.jboss.as.messaging')
self.remove_socket_binding('messaging')
self.remove_socket_binding('messaging-group')
self.remove_socket_binding('messaging-throughput')
def remove_mail(self):
"""
Removes the mail subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('mail')
conf.remove_extension('org.jboss.as.mail')
conf.custom('/socket-binding-group=standard-sockets/remote-destination-outbound-socket-binding=mail-smtp:remove()')
|
"""
self.remove_subsystem('mail')
self.remove_extension('org.jboss.as.mail')
self.custom('/socket-binding-group=standard-sockets/remote-destination-outbound-socket-binding=mail-smtp:remove()')
def remove_cmp(self):
"""
Removes the cmp subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('cmp')
conf.remove_extension('org.jboss.as.cmp')
|
"""
self.remove_subsystem('cmp')
self.remove_extension('org.jboss.as.cmp')
def remove_jacorb(self):
"""
Removes the jacorb subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('jacorb')
conf.remove_extension('org.jboss.as.jacorb')
conf.remove_socket_binding('jacorb')
conf.remove_socket_binding('jacorb-ssl')
|
"""
self.remove_subsystem('jacorb')
self.remove_extension('org.jboss.as.jacorb')
self.remove_socket_binding('jacorb')
self.remove_socket_binding('jacorb-ssl')
def remove_jaxr(self):
"""
Removes the JAXR subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('jaxr')
conf.remove_extension('org.jboss.as.jaxr')
|
"""
self.remove_subsystem('jaxr')
self.remove_extension('org.jboss.as.jaxr')
def remove_jsr77(self):
"""
Removes the JSR77 subsystem
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_subsystem('jsr77')
conf.remove_extension('org.jboss.as.jsr77')
|
"""
self.remove_subsystem('jsr77')
self.remove_extension('org.jboss.as.jsr77')
def remove_h2(self):
"""
Removes the Hypersonic database and driver
Equivalent to::
conf = jbclipy.Configuration()
conf.remove_datasource('ExampleDS')
conf.remove_jdbc_driver('h2')
|
"""
self.remove_datasource('ExampleDS')
self.remove_jdbc_driver('h2')
def make_filter(self):
"""
Makes a logging filter object
.. warning::
Method currently not availabale
:returns: dict
:raises: NotImplementedError
"""
raise NotImplementedError('This has not yet been implemented')
| |
# Copyright 2011,2012,2013 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Classes and utilities for addresses of various types.
"""
from __future__ import print_function
import struct
import socket
# Slightly tested attempt at Python 3 friendliness
import sys
if 'long' not in sys.modules['__builtin__'].__dict__:
long = int
"""
# Unfinished oui name stuff formerly from packet library.
oui = int(a[0]) << 16 | int(a[1]) << 8 | int(a[2])
# check if globally unique
if resolve_name and not (a[0] & 0x2):
if _ethoui2name.has_key(oui):
return "(%s):%02x:%02x:%02x" %( _ethoui2name[oui], a[3],a[4],a[5])
"""
_eth_oui_to_name = {}
def _load_oui_names ():
import inspect
import os.path
filename = os.path.join(os.path.dirname(inspect.stack()[0][1]), 'oui.txt')
f = None
try:
f = open(filename)
for line in f.readlines():
if len(line) < 1:
continue
if line[0].isspace():
continue
split = line.split(' ')
if not '-' in split[0]:
continue
# grab 3-byte OUI
oui_str = split[0].replace('-','')
# strip off (hex) identifer and keep rest of name
end = ' '.join(split[1:]).strip()
end = end.split('\t')
end.remove('(hex)')
oui_name = ' '.join(end)
# convert oui to int
oui = int(oui_str, 16)
_eth_oui_to_name[oui] = oui_name.strip()
except:
import logging
logging.getLogger().warn("Could not load OUI list")
if f: f.close()
_load_oui_names()
class EthAddr (object):
"""
An Ethernet (MAC) address type.
"""
def __init__ (self, addr):
"""
Understands Ethernet address is various forms. Hex strings, raw byte
strings, etc.
"""
# Always stores as a 6 character string
if isinstance(addr, bytes) or isinstance(addr, basestring):
if len(addr) == 6:
# raw
pass
elif len(addr) == 17 or len(addr) == 12 or addr.count(':') == 5:
# hex
if len(addr) == 17:
if addr[2::3] != ':::::' and addr[2::3] != '-----':
raise RuntimeError("Bad format for ethernet address")
# Address of form xx:xx:xx:xx:xx:xx
# Pick out the hex digits only
addr = ''.join((addr[x*3:x*3+2] for x in xrange(0,6)))
elif len(addr) == 12:
pass
else:
# Assume it's hex digits but they may not all be in two-digit
# groupings (e.g., xx:x:x:xx:x:x). This actually comes up.
addr = ''.join(["%02x" % (int(x,16),) for x in addr.split(":")])
# We should now have 12 hex digits (xxxxxxxxxxxx).
# Convert to 6 raw bytes.
addr = b''.join((chr(int(addr[x*2:x*2+2], 16)) for x in range(0,6)))
else:
raise RuntimeError("Expected ethernet address string to be 6 raw "
"bytes or some hex")
self._value = addr
elif isinstance(addr, EthAddr):
self._value = addr.toRaw()
elif type(addr) == list or (hasattr(addr, '__len__') and len(addr) == 6
and hasattr(addr, '__iter__')):
self._value = b''.join( (chr(x) for x in addr) )
elif addr is None:
self._value = b'\x00' * 6
else:
raise RuntimeError("Expected ethernet address to be a string of 6 raw "
"bytes or some hex")
def isBridgeFiltered (self):
"""
Checks if address is an IEEE 802.1D MAC Bridge Filtered MAC Group Address
This range is 01-80-C2-00-00-00 to 01-80-C2-00-00-0F. MAC frames that
have a destination MAC address within this range are not relayed by
bridges conforming to IEEE 802.1D
"""
return ((ord(self._value[0]) == 0x01)
and (ord(self._value[1]) == 0x80)
and (ord(self._value[2]) == 0xC2)
and (ord(self._value[3]) == 0x00)
and (ord(self._value[4]) == 0x00)
and (ord(self._value[5]) <= 0x0F))
@property
def is_bridge_filtered (self):
return self.isBridgeFiltered()
def isGlobal (self):
"""
Returns True if this is a globally unique (OUI enforced) address.
"""
return not self.isLocal()
def isLocal (self):
"""
Returns True if this is a locally-administered (non-global) address.
"""
return True if (ord(self._value[0]) & 2) else False
@property
def is_local (self):
return self.isLocal()
@property
def is_global (self):
return self.isGlobal()
def isMulticast (self):
"""
Returns True if this is a multicast address.
"""
return True if (ord(self._value[0]) & 1) else False
@property
def is_multicast (self):
return self.isMulticast()
def toRaw (self):
return self.raw
@property
def raw (self):
"""
Returns the address as a 6-long bytes object.
"""
return self._value
def toTuple (self):
"""
Returns a 6-entry long tuple where each entry is the numeric value
of the corresponding byte of the address.
"""
return tuple((ord(x) for x in self._value))
def toStr (self, separator = ':', resolveNames = False):
"""
Returns the address as string consisting of 12 hex chars separated
by separator.
If resolveNames is True, it may return company names based on
the OUI. (Currently unimplemented)
"""
#TODO: show OUI info from packet lib ?
return separator.join(('%02x' % (ord(x),) for x in self._value))
def __str__ (self):
return self.toStr()
def __cmp__ (self, other):
#TODO: Revisit this and other __cmp__ in Python 3.4
try:
if type(other) == EthAddr:
other = other._value
elif type(other) == bytes:
pass
else:
other = EthAddr(other)._value
return cmp(self._value, other)
except:
return -cmp(other, self)
def __hash__ (self):
return self._value.__hash__()
def __repr__ (self):
return self.__class__.__name__ + "('" + self.toStr() + "')"
def __len__ (self):
return 6
def __setattr__ (self, a, v):
if hasattr(self, '_value'):
raise TypeError("This object is immutable")
object.__setattr__(self, a, v)
class IPAddr (object):
"""
Represents an IPv4 address.
"""
def __init__ (self, addr, networkOrder = False):
"""
Initialize using several possible formats
If addr is an int/long, then it is assumed to be in host byte order
unless networkOrder = True
Stored in network byte order as a signed int
"""
# Always stores as a signed network-order int
if isinstance(addr, basestring) or isinstance(addr, bytes):
if len(addr) != 4:
# dotted quad
self._value = struct.unpack('i', socket.inet_aton(addr))[0]
else:
self._value = struct.unpack('i', addr)[0]
elif isinstance(addr, IPAddr):
self._value = addr._value
elif isinstance(addr, int) or isinstance(addr, long):
addr = addr & 0xffFFffFF # unsigned long
self._value = struct.unpack("!i",
struct.pack(('!' if networkOrder else '') + "I", addr))[0]
else:
raise RuntimeError("Unexpected IP address format")
def toSignedN (self):
""" A shortcut """
return self.toSigned(networkOrder = True)
def toUnsignedN (self):
""" A shortcut """
return self.toUnsigned(networkOrder = True)
def toSigned (self, networkOrder = False):
""" Return the address as a signed int """
if networkOrder:
return self._value
v = socket.htonl(self._value & 0xffFFffFF)
return struct.unpack("i", struct.pack("I", v))[0]
def toRaw (self):
return self.raw
@property
def raw (self):
"""
Returns the address as a four-character byte string.
"""
return struct.pack("i", self._value)
def toUnsigned (self, networkOrder = False):
"""
Returns the address as an integer in either network or host (the
default) byte order.
"""
if not networkOrder:
return socket.htonl(self._value & 0xffFFffFF)
return self._value & 0xffFFffFF
def toStr (self):
""" Return dotted quad representation """
return socket.inet_ntoa(self.toRaw())
def in_network (self, *args, **kw):
return self.inNetwork(*args, **kw)
def inNetwork (self, network, netmask = None):
"""
Returns True if this network is in the specified network.
network is a dotted quad (with or without a CIDR or normal style
netmask, which can also be specified separately via the netmask
parameter), or it can be a tuple of (address,network-bits) like that
returned by parse_cidr().
"""
if type(network) is not tuple:
if netmask is not None:
network = str(network)
network += "/" + str(netmask)
n,b = parse_cidr(network)
else:
n,b = network
if type(n) is not IPAddr:
n = IPAddr(n)
return (self.toUnsigned() & ~((1 << (32-b))-1)) == n.toUnsigned()
@property
def is_multicast (self):
return ((self.toSigned(networkOrder = False) >> 24) & 0xe0) == 0xe0
@property
def multicast_ethernet_address (self):
"""
Returns corresponding multicast EthAddr
Assumes this is, in fact, a multicast IP address!
"""
if not self.is_multicast:
raise RuntimeError("No multicast EthAddr for non-multicast IPAddr!")
n = self.toUnsigned(networkOrder = False) & 0x7fffff
return EthAddr("01005e" + ("%06x" % (n)))
def __str__ (self):
return self.toStr()
def __cmp__ (self, other):
if other is None: return 1
try:
if not isinstance(other, IPAddr):
other = IPAddr(other)
return cmp(self.toUnsigned(), other.toUnsigned())
except:
return -other.__cmp__(self)
def __hash__ (self):
return self._value.__hash__()
def __repr__ (self):
return self.__class__.__name__ + "('" + self.toStr() + "')"
def __len__ (self):
return 4
def __setattr__ (self, a, v):
if hasattr(self, '_value'):
raise TypeError("This object is immutable")
object.__setattr__(self, a, v)
class IPAddr6 (object):
"""
Represents an IPv6 address.
"""
@classmethod
def from_raw (cls, raw):
return cls(raw, raw=True)
@classmethod
def from_num (cls, num):
o = b''
for i in xrange(16):
o = chr(num & 0xff) + o
num >>= 8
return cls.from_raw(o)
def __init__ (self, addr = None, raw = False, network_order = False):
# When we move to Python 3, we can use bytes to infer raw.
if addr is None and isinstance(raw, (bytes,bytearray)):
addr = raw
raw = True
if addr is None:
return self.UNDEFINED
if isinstance(addr, unicode) or (isinstance(addr, bytes) and not raw):
ip4part = None
if '.' in addr:
addr,ip4part = addr.rsplit(':',1)
if '.' in addr:
raise RuntimeError('IPv4-compatible representation unimplemented')
if ':' in ip4part:
raise RuntimeError('Bad address format')
addr += ':0:0'
segs = addr.split(':')
if addr.count('::') > 1:
raise RuntimeError("Bad address format " + str(addr))
if len(segs) < 3 or len(segs) > 8:
raise RuntimeError("Bad address format " + str(addr))
p = ([],[])
side = 0
for i,s in enumerate(segs):
if len(s) == 0:
#if side != 0:
#if i != len(segs)-1:
# raise RuntimeError("Bad address format " + str(addr))
side = 1
continue
s = int(s,16)
if s < 0 or s > 0xffff:
raise RuntimeError("Bad address format " + str(addr))
p[side].append(s)
o = p[0] + ([0] * (8-len(p[0])-len(p[1]))) + p[1]
v = b''
for b in o:
v += struct.pack('!H', b)
if ip4part is not None:
v = v[:-4] + IPAddr(ip4part).toRaw()
self._value = v
elif isinstance(addr, type(self)):
self._value = addr._value
elif isinstance(addr, IPAddr):
#FIXME: This is hacky.
self._value = IPAddr6("::ffff:0:0:" + str(addr))
elif isinstance(addr, bytearray):
self._value = bytes(addr)
elif isinstance(addr, bytes):
self._value = addr
else:
raise RuntimeError("Unexpected IP address format")
@property
def raw (self):
return self._value
@property
def ipv4 (self):
return self.to_ipv4(check_ipv4=False)
def to_ipv4 (self, check_ipv4 = True):
"""
Only makes sense if this address is ipv4 mapped/compatible
"""
if check_ipv4:
if not self.is_ipv4:
raise RuntimeError('Not an IPv4ish IPv6 address')
return IPAddr(self._value[-4:])
@property
def num (self):
o = 0
for b in self._value:
o = (o << 8) | ord(b)
return o
@property
def is_multicast (self):
return self.in_network('ff00::/8')
@property
def is_global_unicast (self):
return self.in_network('2000::/3')
@property
def is_unique_local_unicast (self):
return self.in_network('fc00::/7')
@property
def is_link_unicast (self):
return self.in_network('fe80::/10')
@property
def is_ipv4 (self):
return self.in_network('::/80')
@property
def is_ipv4_compatible (self):
return self.in_network('::/96')
@property
def is_ipv4_mapped (self):
return self.in_network('::ffff:0:0/96')
@property
def is_reserved (self):
#TODO
raise RuntimeError("Not implemented")
@staticmethod
def netmask_to_cidr (dq):
"""
Takes a netmask as either an IPAddr or a string, and returns the number
of network bits. e.g., 255.255.255.0 -> 24
Raise exception if subnet mask is not CIDR-compatible.
"""
if isinstance(dq, basestring):
dq = IPAddr6(dq)
v = dq.num
c = 0
while v & (1<<127):
c += 1
v <<= 1
v = v & ((1<<128)-1)
if v != 0:
raise RuntimeError("Netmask %s is not CIDR-compatible" % (dq,))
return c
@staticmethod
def cidr_to_netmask (bits):
"""
Takes a number of network bits, and returns the corresponding netmask
as an IPAddr6.
"""
v = (1 << bits) - 1
v = v << (128-bits)
return IPAddr6.from_num(v)
@staticmethod
def parse_cidr (addr_and_net, allow_host = False):
"""
Parses addr/netbits or addr/netmask
Returns (IPAddr6,netbits)
"""
addr = addr_and_net
def check (r0, r1):
a = r0.num
b = r1
if (not allow_host) and (a & ((1<<b)-1)):
raise RuntimeError("Host part of CIDR address is not zero (%s)"
% (addr,))
return (r0,128-r1)
addr = addr.split('/', 2)
if len(addr) == 1:
return check(IPAddr6(addr[0]), 0)
try:
wild = 128-int(addr[1])
except:
# Maybe they passed a netmask
m = IPAddr6(addr[1]).num
b = 0
while m & (1<<127):
b += 1
m <<= 1
if m & ((1<<127)-1) != 0:
raise RuntimeError("Netmask " + str(addr[1])
+ " is not CIDR-compatible")
wild = 128-b
assert wild >= 0 and wild <= 128
return check(IPAddr6(addr[0]), wild)
assert wild >= 0 and wild <= 128
return check(IPAddr6(addr[0]), wild)
def in_network (self, network, netmask = None):
"""
Returns True if this address is in the specified network.
network can be specified as:
IPAddr6 with numeric netbits or netmask in netmask parameter
textual network with numeric netbits or netmask in netmask parameter
textual network with netbits or netmask separated by a slash
tuple of textual address and numeric netbits
tuple of IPAddr6 and numeric netbits
"""
if type(network) is not tuple:
if netmask is not None:
network = str(network) + "/" + str(netmask)
n,b = self.parse_cidr(network)
else:
n,b = network
if type(n) is not IPAddr6:
n = IPAddr6(n)
return (self.num & ~((1 << (128-b))-1)) == n.num
def to_str (self, zero_drop = True, section_drop = True, ipv4 = None):
o = [ord(lo) | (ord(hi)<<8) for hi,lo in
(self._value[i:i+2] for i in xrange(0,16,2))]
if (ipv4 is None and self.is_ipv4_mapped) or ipv4:
ip4part = o[-2:]
o[-2:] = [1,1]
def finalize (s):
s = s.rsplit(':',2)[0]
return s + ":" + str(IPAddr(self.raw[-4:]))
else:
def finalize (s):
return s
if zero_drop:
def fmt (n):
return ':'.join('%x' % (b,) for b in n)
else:
def fmt (n):
return ':'.join('%04x' % (b,) for b in n)
if section_drop:
z = [] # [length,pos] of zero run
run = None
for i,b in enumerate(o):
if b == 0:
if run is None:
run = [1,i]
z.append(run)
else:
run[0] += 1
else:
run = None
if len(z):
# Sloppy!
max_len = max([length for length,pos in z])
if max_len > 1:
z = [pos for length,pos in z if length == max_len]
z.sort()
pos = z[0]
return finalize('::'.join((fmt(o[:pos]),fmt(o[pos+max_len:]))))
return finalize(fmt(o))
def __str__ (self):
return self.to_str()
def __cmp__ (self, other):
if other is None: return 1
try:
if not isinstance(other, type(self)):
other = type(self)(other)
return cmp(self._value, other._value)
except:
return -cmp(other,self)
def __hash__ (self):
return self._value.__hash__()
def __repr__ (self):
return type(self).__name__ + "('" + self.to_str() + "')"
def __len__ (self):
return 16
def __setattr__ (self, a, v):
if hasattr(self, '_value'):
raise TypeError("This object is immutable")
object.__setattr__(self, a, v)
def set_mac (self, eth):
e = list(EthAddr(eth).toTuple())
e[0] ^= 2
e[3:3] = [0xff,0xfe]
e = ''.join(chr(b) for b in e)
return IPAddr6.from_raw(self._value[:8]+e)
IPAddr6.UNDEFINED = IPAddr6('::')
IPAddr6.ALL_NODES_LINK_LOCAL = IPAddr6('ff02::1')
IPAddr6.ALL_ROUTERS_LINK_LOCAL = IPAddr6('ff02::2')
IPAddr6.ALL_NODES_INTERFACE_LOCAL = IPAddr6('ff01::1')
IPAddr6.ALL_ROUTERS_INTERFACE_LOCAL = IPAddr6('ff01::2')
#ff02::1:3 link local multicast name resolution
#ff02::1:ff00:0/104 solicited-node
#ff02::2:ff00:0/104 node information query
def netmask_to_cidr (dq):
"""
Takes a netmask as either an IPAddr or a string, and returns the number
of network bits. e.g., 255.255.255.0 -> 24
Raise exception if subnet mask is not CIDR-compatible.
"""
if isinstance(dq, basestring):
dq = IPAddr(dq)
v = dq.toUnsigned(networkOrder=False)
c = 0
while v & 0x80000000:
c += 1
v <<= 1
v = v & 0xffFFffFF
if v != 0:
raise RuntimeError("Netmask %s is not CIDR-compatible" % (dq,))
return c
def cidr_to_netmask (bits):
"""
Takes a number of network bits, and returns the corresponding netmask
as an IPAddr. e.g., 24 -> 255.255.255.0
"""
v = (1 << bits) - 1
v = v << (32-bits)
return IPAddr(v, networkOrder = False)
def parse_cidr (addr, infer=True, allow_host=False):
"""
Takes a CIDR address or plain dotted-quad, and returns a tuple of address
and count-of-network-bits.
Can infer the network bits based on network classes if infer=True.
Can also take a string in the form 'address/netmask', as long as the
netmask is representable in CIDR.
FIXME: This function is badly named.
"""
def check (r0, r1):
a = r0.toUnsigned()
b = r1
if (not allow_host) and (a & ((1<<b)-1)):
raise RuntimeError("Host part of CIDR address is not zero (%s)"
% (addr,))
return (r0,32-r1)
addr = addr.split('/', 2)
if len(addr) == 1:
if infer is False:
return check(IPAddr(addr[0]), 0)
addr = IPAddr(addr[0])
b = 32-infer_netmask(addr)
m = (1<<b)-1
if (addr.toUnsigned() & m) == 0:
# All bits in wildcarded part are 0, so we'll use the wildcard
return check(addr, b)
else:
# Some bits in the wildcarded part are set, so we'll assume it's a host
return check(addr, 0)
try:
wild = 32-int(addr[1])
except:
# Maybe they passed a netmask
m = IPAddr(addr[1]).toUnsigned()
b = 0
while m & (1<<31):
b += 1
m <<= 1
if m & 0x7fffffff != 0:
raise RuntimeError("Netmask " + str(addr[1]) + " is not CIDR-compatible")
wild = 32-b
assert wild >= 0 and wild <= 32
return check(IPAddr(addr[0]), wild)
assert wild >= 0 and wild <= 32
return check(IPAddr(addr[0]), wild)
def infer_netmask (addr):
"""
Uses network classes to guess the number of network bits
"""
addr = addr.toUnsigned()
if addr == 0:
# Special case -- default network
return 32-32 # all bits wildcarded
if (addr & (1 << 31)) == 0:
# Class A
return 32-24
if (addr & (3 << 30)) == 2 << 30:
# Class B
return 32-16
if (addr & (7 << 29)) == 6 << 29:
# Class C
return 32-8
if (addr & (15 << 28)) == 14 << 28:
# Class D (Multicast)
return 32-0 # exact match
# Must be a Class E (Experimental)
return 32-0
IP_ANY = IPAddr("0.0.0.0")
IP_BROADCAST = IPAddr("255.255.255.255")
if __name__ == '__main__':
# A couple sanity checks
#TODO: move to tests
import code
a = IPAddr('255.0.0.1')
for v in [('255.0.0.1',True), (0xff000001, True), (0x010000ff, False)]:
print("== " + str(v) + " =======================")
a = IPAddr(v[0],v[1])
print(a._value,-16777215)
#print(hex(a._value),'ff000001')
print(str(a),'255.0.0.1')
print(hex(a.toUnsigned()),'010000ff')
print(hex(a.toUnsigned(networkOrder=True)),'ff000001')
print(a.toSigned(),16777471)
print(a.toSigned(networkOrder=True),-16777215)
print("----")
print([parse_cidr(x)[1]==24 for x in
["192.168.101.0","192.168.102.0/24","1.1.168.103/255.255.255.0"]])
code.interact(local=locals())
| |
import os
import os.path
import re
from multiprocessing import Pool
from unittest.mock import patch, MagicMock, Mock
import pytest
from ichnaea.models.content import DataMap, encode_datamap_grid
from ichnaea.scripts import datamap
from ichnaea.scripts.datamap import (
csv_to_quadtree,
csv_to_quadtrees,
export_to_csv,
generate,
main,
merge_quadtrees,
render_tiles,
)
from ichnaea import util
@pytest.fixture
def temp_dir():
with util.selfdestruct_tempdir() as temp_dir:
yield temp_dir
@pytest.fixture
def mock_main_fixtures():
with patch.object(
datamap, "generate", return_value={}
) as mock_generate, patch.object(
datamap, "check_bucket", return_value=(True, None)
) as mock_check_bucket:
yield (mock_generate, mock_check_bucket)
@pytest.fixture
def mock_db_worker_session():
"""
Mock the db_worker_session used in export_to_csv()
Other tests use the database test fixtures, but they can't be used in
export_to_csv when called from export_to_csvs, because the test
fixtures can't be pickled. This complicated mock works around that
limitation by patching db_worker_session directly.
"""
class FakeQueryItem:
"""A fake query row with .grid and .num"""
def __init__(self, lat, lon):
self.grid = encode_datamap_grid(*DataMap.scale(lat, lon))
self.num = 0
# Test data, by database table
test_data = {
"datamap_ne": [],
"datamap_nw": [],
"datamap_se": [
[FakeQueryItem(lat=12.345, lon=12.345)],
[FakeQueryItem(lat=0, lon=12.345)],
],
"datamap_sw": [[FakeQueryItem(lat=-10.000, lon=-11.000)]],
}
# The expected SQL statement, with binding placeholders
re_stmt = re.compile(
r"SELECT `grid`,"
r" CAST\(ROUND\(DATEDIFF\(CURDATE\(\), `modified`\) / 30\) AS UNSIGNED\) as `num`"
r" FROM (?P<tablename>datamap_[ns][ew])"
r" WHERE `grid` > :grid ORDER BY `grid` LIMIT :limit "
)
def get_test_data(statement):
"""
Validate the SQL call and return test data.
The tablename is extracted from the SQL statement.
On the first call, the test data, if any, is returned.
On the second call, an empty list is returned.
"""
match = re_stmt.match(statement.text)
assert match
tablename = match.group("tablename")
result = Mock(spec_set=("fetchall", "close"))
try:
data = test_data[tablename].pop(0)
except IndexError:
data = [] # No more data, end DB read loop
result.fetchall.return_value = data
return result
# db_worker_session() returns a context manager
mock_context = MagicMock(spec_set=("__enter__", "__exit__"))
# The context manager returns a session
mock_session = Mock(spec_set=["execute"])
mock_context.__enter__.return_value = mock_session
# session.execute(SQL_STATEMENT) returns rows of data
mock_session.execute.side_effect = get_test_data
with patch("ichnaea.scripts.datamap.db_worker_session") as mock_db_worker_session:
mock_db_worker_session.return_value = mock_context
yield mock_db_worker_session
class TestMap(object):
def _check_quadtree(self, path):
assert os.path.isdir(path)
for name in ("1,0", "meta"):
assert os.path.isfile(os.path.join(path, name))
def test_files(self, temp_dir, mock_db_worker_session):
lines = []
rows = 0
csvdir = os.path.join(temp_dir, "csv")
os.mkdir(csvdir)
quaddir = os.path.join(temp_dir, "quadtrees")
os.mkdir(quaddir)
shapes = os.path.join(temp_dir, "shapes")
tiles = os.path.join(temp_dir, "tiles")
expected = {"ne": (0, 0), "nw": (0, 0), "se": (12, 1), "sw": (6, 1)}
for shard_id, shard in DataMap.shards().items():
filename = f"map_{shard_id}.csv"
filepath = os.path.join(csvdir, filename)
row_count, file_count = export_to_csv(filename, csvdir, shard.__tablename__)
assert row_count == expected[shard_id][0]
assert file_count == expected[shard_id][1]
if not row_count:
assert not os.path.isfile(filepath)
continue
rows += row_count
with open(filepath, "r") as fd:
written = fd.read()
lines.extend([line.split(",") for line in written.split()])
csv_to_quadtree(filename, csvdir, quaddir)
quadfolder = os.path.join(quaddir, "map_" + shard_id)
assert os.path.isdir(quadfolder)
self._check_quadtree(quadfolder)
assert rows
merge_quadtrees(quaddir, shapes)
self._check_quadtree(shapes)
with Pool() as pool:
render_tiles(pool, shapes, tiles, max_zoom=2)
assert sorted(os.listdir(tiles)) == ["0", "1", "2"]
assert sorted(os.listdir(os.path.join(tiles, "0", "0"))) == [
"0.png",
"0@2x.png",
]
assert rows == 18
assert len(lines) == 18
lats = [round(float(line[0]), 2) for line in lines]
longs = [round(float(line[1]), 2) for line in lines]
assert set(lats) == set([-10.0, 0.0, 12.35])
assert set(longs) == set([-11.0, 12.35])
def test_multiple_csv(self, temp_dir, raven, mock_db_worker_session):
"""export_to_csv creates multiple CSVs at the file_limit."""
expected = {"ne": (0, 0), "nw": (0, 0), "se": (12, 2), "sw": (6, 1)}
csv_dir = os.path.join(temp_dir, "csv")
os.mkdir(csv_dir)
for shard_id, shard in DataMap.shards().items():
filename = f"map_{shard_id}.csv"
filepath = os.path.join(csv_dir, filename)
row_count, file_count = export_to_csv(
filename, csv_dir, shard.__tablename__, file_limit=1
)
assert row_count == expected[shard_id][0]
assert file_count == expected[shard_id][1]
if not row_count:
assert not os.path.isfile(filepath)
elif file_count == 1:
assert os.path.isfile(filepath)
else:
assert not os.path.isfile(filepath)
for num in range(1, file_count + 1):
filename_n = f"submap_{shard_id}_{num:04}.csv"
filepath_n = os.path.join(csv_dir, filename_n)
assert os.path.isfile(filepath_n)
quad_dir = os.path.join(temp_dir, "quadtrees")
os.mkdir(quad_dir)
with Pool() as pool:
result = csv_to_quadtrees(pool, csv_dir, quad_dir)
csv_count, intermediate_quad_count, final_quad_count = result
assert csv_count == 3
assert intermediate_quad_count == 2
assert final_quad_count == 2
def test_generate(self, temp_dir, raven, mock_db_worker_session):
"""generate() calls the steps for tile generation."""
result = generate(
temp_dir,
"bucket_name",
raven,
create=True,
upload=False,
concurrency=1,
max_zoom=2,
)
assert set(result.keys()) == {
"csv_count",
"csv_converted_count",
"export_duration_s",
"intermediate_quadtree_count",
"merge_duration_s",
"quadtree_count",
"quadtree_duration_s",
"render_duration_s",
"row_count",
"tile_count",
}
assert result["quadtree_count"] == 2
assert result["row_count"] == 18
assert result["tile_count"] == 6
assert result["csv_count"] == 2
assert result["csv_converted_count"] == 2
assert result["intermediate_quadtree_count"] == 0
def test_main(self, raven, temp_dir, mock_main_fixtures):
"""main() calls generate with passed arguments"""
mock_generate, mock_check_bucket = mock_main_fixtures
argv = [
"--create",
"--upload",
"--concurrency=1",
f"--output={temp_dir}",
]
main(argv, _raven_client=raven, _bucket_name="bucket")
assert len(mock_generate.mock_calls) == 1
args, kw = mock_generate.call_args
assert args == (temp_dir, "bucket", raven)
assert kw == {"concurrency": 1, "create": True, "upload": True}
mock_check_bucket.assert_called_once_with("bucket")
def test_main_create_only(self, raven, temp_dir, mock_main_fixtures):
"""main() can just generate tiles."""
mock_generate, mock_check_bucket = mock_main_fixtures
argv = ["--create", "--concurrency=1", f"--output={temp_dir}"]
main(argv, _raven_client=raven, _bucket_name="bucket")
assert len(mock_generate.mock_calls) == 1
args, kw = mock_generate.call_args
assert args == (temp_dir, "bucket", raven)
assert kw == {"concurrency": 1, "create": True, "upload": False}
assert not mock_check_bucket.mock_calls
def test_main_upload_only(self, raven, temp_dir, mock_main_fixtures):
"""main() can upload if tiles subfolder exists."""
mock_generate, mock_check_bucket = mock_main_fixtures
tiles = os.path.join(temp_dir, "tiles")
os.makedirs(tiles)
argv = [
"--upload",
f"--output={temp_dir}",
"--concurrency=1",
]
result = main(argv, _raven_client=raven, _bucket_name="bucket")
assert result == 0
assert len(mock_generate.mock_calls) == 1
args, kw = mock_generate.call_args
assert args == (temp_dir, "bucket", raven)
assert kw == {"concurrency": 1, "create": False, "upload": True}
mock_check_bucket.assert_called_once_with("bucket")
def test_main_tmp_dir(self, raven, mock_main_fixtures):
"""main() generates a temporary directory if --output omitted."""
mock_generate, mock_check_bucket = mock_main_fixtures
argv = ["--create", "--upload"]
result = main(argv, _raven_client=raven, _bucket_name="bucket")
assert result == 0
assert len(mock_generate.mock_calls) == 1
args, kw = mock_generate.call_args
assert args[0] # Some system-specific temporary folder
assert not os.path.exists(args[0])
assert args[1:] == ("bucket", raven)
affinity = len(os.sched_getaffinity(0))
assert kw == {"concurrency": affinity, "create": True, "upload": True}
mock_check_bucket.assert_called_once_with("bucket")
@pytest.mark.parametrize(
"argv,exitcode",
(([], 0), (["--create"], 1), (["--upload"], 1)),
ids=("no args", "create no output", "upload no output"),
)
def test_main_early_exit(self, raven, mock_main_fixtures, argv, exitcode):
"""main() exits early for some argument combinations"""
mock_generate, mock_check_bucket = mock_main_fixtures
result = main(argv, _raven_client=raven, _bucket_name="bucket")
assert result == exitcode
assert not mock_generate.mock_calls
assert not mock_check_bucket.mock_calls
def test_main_upload_no_tiles_exits(self, raven, tmpdir, mock_main_fixtures):
"""main() exits early if upload folder has no tiles."""
mock_generate, mock_check_bucket = mock_main_fixtures
argv = ["--upload", f"--output={tmpdir}"]
result = main(argv, _raven_client=raven, _bucket_name="bucket")
assert result == 1
assert not mock_generate.mock_calls
assert not mock_check_bucket.mock_calls
| |
import unittest
from vdf import VDFDict
class VDFDictCase(unittest.TestCase):
def test_init(self):
with self.assertRaises(ValueError):
VDFDict("asd zxc")
with self.assertRaises(ValueError):
VDFDict(5)
with self.assertRaises(ValueError):
VDFDict((('1',1), ('2', 2)))
def test_repr(self):
self.assertIsInstance(repr(VDFDict()), str)
def test_len(self):
self.assertEqual(len(VDFDict()), 0)
self.assertEqual(len(VDFDict({'1':1})), 1)
def test_verify_key_tuple(self):
a = VDFDict()
with self.assertRaises(ValueError):
a._verify_key_tuple([])
with self.assertRaises(ValueError):
a._verify_key_tuple((1,))
with self.assertRaises(ValueError):
a._verify_key_tuple((1,1,1))
with self.assertRaises(TypeError):
a._verify_key_tuple((None, 'asd'))
with self.assertRaises(TypeError):
a._verify_key_tuple(('1', 'asd'))
with self.assertRaises(TypeError):
a._verify_key_tuple((1, 1))
with self.assertRaises(TypeError):
a._verify_key_tuple((1, None))
def test_normalize_key(self):
a = VDFDict()
self.assertEqual(a._normalize_key('AAA'), (0, 'AAA'))
self.assertEqual(a._normalize_key((5, 'BBB')), (5, 'BBB'))
def test_normalize_key_exception(self):
a = VDFDict()
with self.assertRaises(TypeError):
a._normalize_key(5)
with self.assertRaises(TypeError):
a._normalize_key([])
with self.assertRaises(TypeError):
a._normalize_key(None)
def test_setitem(self):
a = list(zip(map(str, range(5, 0, -1)), range(50, 0, -10)))
b = VDFDict()
for k,v in a:
b[k] = v
self.assertEqual(a, list(b.items()))
def test_setitem_with_duplicates(self):
a = list(zip(['5']*5, range(50, 0, -10)))
b = VDFDict()
for k,v in a:
b[k] = v
self.assertEqual(a, list(b.items()))
def test_setitem_key_exceptions(self):
with self.assertRaises(TypeError):
VDFDict()[5] = None
with self.assertRaises(TypeError):
VDFDict()[(0, 5)] = None
with self.assertRaises(ValueError):
VDFDict()[(0, '5', 1)] = None
def test_setitem_key_valid_types(self):
VDFDict()['5'] = None
VDFDict({'5': None})[(0, '5')] = None
def test_setitem_keyerror_fullkey(self):
with self.assertRaises(KeyError):
VDFDict([("1", None)])[(1, "1")] = None
def test_getitem(self):
a = VDFDict([('1',2), ('1',3)])
self.assertEqual(a['1'], 2)
self.assertEqual(a[(0, '1')], 2)
self.assertEqual(a[(1, '1')], 3)
def test_del(self):
a = VDFDict([("1",1),("1",2),("5",51),("1",3),("5",52)])
b = [("1",1),("1",2),("1",3),("5",52)]
del a["5"]
self.assertEqual(list(a.items()), b)
def test_del_by_fullkey(self):
a = VDFDict([("1",1),("1",2),("5",51),("1",3),("5",52)])
b = [("1",1),("1",2),("1",3),("5",52)]
del a[(0, "5")]
self.assertEqual(list(a.items()), b)
def test_del_first_duplicate(self):
a = [("1",1),("1",2),("1",3),("1",4)]
b = VDFDict(a)
del b["1"]
del b["1"]
del b[(0, "1")]
del b[(0, "1")]
self.assertEqual(len(b), 0)
def test_del_exception(self):
with self.assertRaises(KeyError):
a = VDFDict()
del a["1"]
with self.assertRaises(KeyError):
a = VDFDict({'1':1})
del a[(1, "1")]
def test_iter(self):
a = VDFDict({"1": 1})
iter(a).__iter__
self.assertEqual(len(list(iter(a))), 1)
def test_in(self):
a = VDFDict({"1":2, "3":4, "5":6})
self.assertTrue('1' in a)
self.assertTrue((0, '1') in a)
self.assertFalse('6' in a)
self.assertFalse((1, '1') in a)
def test_eq(self):
self.assertEqual(VDFDict(), VDFDict())
self.assertNotEqual(VDFDict(), VDFDict({'1':1}))
self.assertNotEqual(VDFDict(), {'1':1})
a = [("a", 1), ("b", 5), ("a", 11)]
self.assertEqual(VDFDict(a), VDFDict(a))
self.assertNotEqual(VDFDict(a), VDFDict(a[1:]))
def test_clear(self):
a = VDFDict([("1",2),("1",2),("5",3),("1",2)])
a.clear()
self.assertEqual(len(a), 0)
self.assertEqual(len(a.keys()), 0)
self.assertEqual(len(list(a.iterkeys())), 0)
self.assertEqual(len(a.values()), 0)
self.assertEqual(len(list(a.itervalues())), 0)
self.assertEqual(len(a.items()), 0)
self.assertEqual(len(list(a.iteritems())), 0)
def test_get(self):
a = VDFDict([('1',11), ('1',22)])
self.assertEqual(a.get('1'), 11)
self.assertEqual(a.get((1, '1')), 22)
self.assertEqual(a.get('2', 33), 33)
self.assertEqual(a.get((0, '2'), 44), 44)
def test_setdefault(self):
a = VDFDict([('1',11), ('1',22)])
self.assertEqual(a.setdefault('1'), 11)
self.assertEqual(a.setdefault((0, '1')), 11)
self.assertEqual(a.setdefault('2'), None)
self.assertEqual(a.setdefault((0, '2')), None)
self.assertEqual(a.setdefault('3', 33), 33)
def test_pop(self):
a = VDFDict([('1',11),('2',22),('1',33),('2',44),('2',55)])
self.assertEqual(a.pop('1'), 11)
self.assertEqual(a.pop('1'), 33)
with self.assertRaises(KeyError):
a.pop('1')
self.assertEqual(a.pop((1, '2')), 44)
self.assertEqual(a.pop((1, '2')), 55)
def test_popitem(self):
a = [('1',11),('2',22),('1',33)]
b = VDFDict(a)
self.assertEqual(b.popitem(), a.pop())
self.assertEqual(b.popitem(), a.pop())
self.assertEqual(b.popitem(), a.pop())
with self.assertRaises(KeyError):
b.popitem()
def test_update(self):
a = VDFDict([("1",2),("1",2),("5",3),("1",2)])
b = VDFDict()
b.update([("1",2),("1",2)])
b.update([("5",3),("1",2)])
self.assertEqual(list(a.items()), list(b.items()))
def test_update_exceptions(self):
a = VDFDict()
with self.assertRaises(TypeError):
a.update(None)
with self.assertRaises(TypeError):
a.update(1)
with self.assertRaises(TypeError):
a.update("asd zxc")
with self.assertRaises(ValueError):
a.update([(1,1,1), (2,2,2)])
map_test = [
("1", 2),
("4", 3),("4", 3),("4", 2),
("7", 2),
("1", 2),
]
def test_keys(self):
_dict = VDFDict(self.map_test)
self.assertSequenceEqual(
list(_dict.keys()),
list(x[0] for x in self.map_test))
def test_values(self):
_dict = VDFDict(self.map_test)
self.assertSequenceEqual(
list(_dict.values()),
list(x[1] for x in self.map_test))
def test_items(self):
_dict = VDFDict(self.map_test)
self.assertSequenceEqual(
list(_dict.items()),
self.map_test)
def test_direct_access_get(self):
b = dict()
a = VDFDict({"1":2, "3":4, "5":6})
for k,v in a.items():
b[k] = v
self.assertEqual(dict(a.items()), b)
def test_duplicate_keys(self):
items = [('key1', 1), ('key1', 2), ('key3', 3), ('key1', 1)]
keys = [x[0] for x in items]
values = [x[1] for x in items]
_dict = VDFDict(items)
self.assertEqual(list(_dict.items()), items)
self.assertEqual(list(_dict.keys()), keys)
self.assertEqual(list(_dict.values()), values)
def test_same_type_init(self):
self.assertSequenceEqual(
tuple(VDFDict(self.map_test).items()),
tuple(VDFDict(VDFDict(self.map_test)).items()))
def test_get_all_for(self):
a = VDFDict([("1",2),("1",2**31),("5",3),("1",2)])
self.assertEqual(
list(a.get_all_for("1")),
[2,2**31,2],
)
def test_get_all_for_invalid_key(self):
a = VDFDict()
with self.assertRaises(TypeError):
a.get_all_for(None)
with self.assertRaises(TypeError):
a.get_all_for(5)
with self.assertRaises(TypeError):
a.get_all_for((0, '5'))
def test_remove_all_for(self):
a = VDFDict([("1",2),("1",2),("5",3),("1",2)])
a.remove_all_for("1")
self.assertEqual(list(a.items()), [("5",3)])
self.assertEqual(len(a), 1)
def test_remove_all_for_invalid_key(self):
a = VDFDict()
with self.assertRaises(TypeError):
a.remove_all_for(None)
with self.assertRaises(TypeError):
a.remove_all_for(5)
with self.assertRaises(TypeError):
a.remove_all_for((0, '5'))
def test_has_duplicates(self):
# single level duplicate
a = [('1', 11), ('1', 22)]
b = VDFDict(a)
self.assertTrue(b.has_duplicates())
# duplicate in nested
c = VDFDict({'1': b})
self.assertTrue(c.has_duplicates())
# duplicate in nested dict
d = VDFDict({'1': {'2': {'3': b}}})
self.assertTrue(d.has_duplicates())
# duplicate in nested dict
d = VDFDict({'1': {'2': {'3': None}}})
self.assertFalse(d.has_duplicates())
| |
"""Common operations on Posix pathnames.
Instead of importing this module directly, import os and refer to
this module as os.path. The "os.path" name is an alias for this
module on Posix systems; on other systems (e.g. Mac, Windows),
os.path provides the same operations in a manner specific to that
platform, and is an alias to another module (e.g. macpath, ntpath).
Some of this can actually be useful on non-Posix systems too, e.g.
for manipulation of the pathname component of URLs.
"""
import os
import sys
import stat
import genericpath
import warnings
from genericpath import *
from genericpath import _unicode
__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
"basename","dirname","commonprefix","getsize","getmtime",
"getatime","getctime","islink","exists","lexists","isdir","isfile",
"ismount","walk","expanduser","expandvars","normpath","abspath",
"samefile","sameopenfile","samestat",
"curdir","pardir","sep","pathsep","defpath","altsep","extsep",
"devnull","realpath","supports_unicode_filenames","relpath"]
# strings representing various path-related bits and pieces
curdir = '.'
pardir = '..'
extsep = '.'
sep = '/'
pathsep = ':'
defpath = ':/bin:/usr/bin'
altsep = None
devnull = '/dev/null'
# Normalize the case of a pathname. Trivial in Posix, string.lower on Mac.
# On MS-DOS this may also turn slashes into backslashes; however, other
# normalizations (such as optimizing '../' away) are not allowed
# (another function should be defined to do that).
def normcase(s):
"""Normalize case of pathname. Has no effect under Posix"""
return s
# Return whether a path is absolute.
# Trivial in Posix, harder on the Mac or MS-DOS.
def isabs(s):
"""Test whether a path is absolute"""
return s.startswith('/')
# Join pathnames.
# Ignore the previous parts if a part is absolute.
# Insert a '/' unless the first part is empty or already ends in '/'.
def join(a, *p):
"""Join two or more pathname components, inserting '/' as needed.
If any component is an absolute path, all previous path components
will be discarded. An empty last part will result in a path that
ends with a separator."""
path = a
for b in p:
if b.startswith('/'):
path = b
elif path == '' or path.endswith('/'):
path += b
else:
path += '/' + b
return path
# Split a path in head (everything up to the last '/') and tail (the
# rest). If the path ends in '/', tail will be empty. If there is no
# '/' in the path, head will be empty.
# Trailing '/'es are stripped from head unless it is the root.
def split(p):
"""Split a pathname. Returns tuple "(head, tail)" where "tail" is
everything after the final slash. Either part may be empty."""
i = p.rfind('/') + 1
head, tail = p[:i], p[i:]
if head and head != '/'*len(head):
head = head.rstrip('/')
return head, tail
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
def splitext(p):
return genericpath._splitext(p, sep, altsep, extsep)
splitext.__doc__ = genericpath._splitext.__doc__
# Split a pathname into a drive specification and the rest of the
# path. Useful on DOS/Windows/NT; on Unix, the drive is always empty.
def splitdrive(p):
"""Split a pathname into drive and path. On Posix, drive is always
empty."""
return '', p
# Return the tail (basename) part of a path, same as split(path)[1].
def basename(p):
"""Returns the final component of a pathname"""
i = p.rfind('/') + 1
return p[i:]
# Return the head (dirname) part of a path, same as split(path)[0].
def dirname(p):
"""Returns the directory component of a pathname"""
i = p.rfind('/') + 1
head = p[:i]
if head and head != '/'*len(head):
head = head.rstrip('/')
return head
# Is a path a symbolic link?
# This will always return false on systems where os.lstat doesn't exist.
def islink(path):
"""Test whether a path is a symbolic link"""
try:
st = os.lstat(path)
except (os.error, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
# Being true for dangling symbolic links is also useful.
def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
os.lstat(path)
except os.error:
return False
return True
# Are two filenames really pointing to the same file?
def samefile(f1, f2):
"""Test whether two pathnames reference the same actual file"""
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
# Are two open files really referencing the same file?
# (Not necessarily the same file descriptor!)
def sameopenfile(fp1, fp2):
"""Test whether two open file objects reference the same file"""
s1 = os.fstat(fp1)
s2 = os.fstat(fp2)
return samestat(s1, s2)
# Are two stat buffers (obtained from stat, fstat or lstat)
# describing the same file?
def samestat(s1, s2):
"""Test whether two stat buffers reference the same file"""
return s1.st_ino == s2.st_ino and \
s1.st_dev == s2.st_dev
# Is a path a mount point?
# (Does this work for all UNIXes? Is it even guaranteed to work by Posix?)
def ismount(path):
"""Test whether a path is a mount point"""
if islink(path):
# A symlink can never be a mount point
return False
try:
s1 = os.lstat(path)
s2 = os.lstat(join(path, '..'))
except os.error:
return False # It doesn't exist -- so not a mount point :-)
dev1 = s1.st_dev
dev2 = s2.st_dev
if dev1 != dev2:
return True # path/.. on a different device as path
ino1 = s1.st_ino
ino2 = s2.st_ino
if ino1 == ino2:
return True # path/.. is the same i-node as path
return False
# Directory tree walk.
# For each directory under top (including top itself, but excluding
# '.' and '..'), func(arg, dirname, filenames) is called, where
# dirname is the name of the directory and filenames is the list
# of files (and subdirectories etc.) in the directory.
# The func may modify the filenames list, to implement a filter,
# or to impose a different order of visiting.
def walk(top, func, arg):
"""Directory tree walk with callback function.
For each directory in the directory tree rooted at top (including top
itself, but excluding '.' and '..'), call func(arg, dirname, fnames).
dirname is the name of the directory, and fnames a list of the names of
the files and subdirectories in dirname (excluding '.' and '..'). func
may modify the fnames list in-place (e.g. via del or slice assignment),
and walk will only recurse into the subdirectories whose names remain in
fnames; this can be used to implement a filter, or to impose a specific
order of visiting. No semantics are defined for, or required of, arg,
beyond that arg is always passed to func. It can be used, e.g., to pass
a filename pattern, or a mutable object designed to accumulate
statistics. Passing None for arg is common."""
warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.",
stacklevel=2)
try:
names = os.listdir(top)
except os.error:
return
func(arg, top, names)
for name in names:
name = join(top, name)
try:
st = os.lstat(name)
except os.error:
continue
if stat.S_ISDIR(st.st_mode):
walk(name, func, arg)
# Expand paths beginning with '~' or '~user'.
# '~' means $HOME; '~user' means that user's home directory.
# If the path doesn't begin with '~', or if the user or $HOME is unknown,
# the path is returned unchanged (leaving error reporting to whatever
# function is called with the expanded path as argument).
# See also module 'glob' for expansion of *, ? and [...] in pathnames.
# (A function should also be defined to do full *sh-style environment
# variable expansion.)
def expanduser(path):
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
do nothing."""
if not path.startswith('~'):
return path
i = path.find('/', 1)
if i < 0:
i = len(path)
if i == 1:
if 'HOME' not in os.environ:
import pwd
userhome = pwd.getpwuid(os.getuid()).pw_dir
else:
userhome = os.environ['HOME']
else:
import pwd
try:
pwent = pwd.getpwnam(path[1:i])
except KeyError:
return path
userhome = pwent.pw_dir
userhome = userhome.rstrip('/')
return (userhome + path[i:]) or '/'
# Expand paths containing shell variable substitutions.
# This expands the forms $variable and ${variable} only.
# Non-existent variables are left unchanged.
_varprog = None
_uvarprog = None
def expandvars(path):
"""Expand shell variables of form $var and ${var}. Unknown variables
are left unchanged."""
global _varprog, _uvarprog
if '$' not in path:
return path
if isinstance(path, _unicode):
if not _uvarprog:
import re
_uvarprog = re.compile(ur'\$(\w+|\{[^}]*\})', re.UNICODE)
varprog = _uvarprog
encoding = sys.getfilesystemencoding()
else:
if not _varprog:
import re
_varprog = re.compile(r'\$(\w+|\{[^}]*\})')
varprog = _varprog
encoding = None
i = 0
while True:
m = varprog.search(path, i)
if not m:
break
i, j = m.span(0)
name = m.group(1)
if name.startswith('{') and name.endswith('}'):
name = name[1:-1]
if encoding:
name = name.encode(encoding)
if name in os.environ:
tail = path[j:]
value = os.environ[name]
if encoding:
value = value.decode(encoding)
path = path[:i] + value
i = len(path)
path += tail
else:
i = j
return path
# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
# It should be understood that this may change the meaning of the path
# if it contains symbolic links!
def normpath(path):
"""Normalize path, eliminating double slashes, etc."""
# Preserve unicode (if path is unicode)
slash, dot = (u'/', u'.') if isinstance(path, _unicode) else ('/', '.')
if path == '':
return dot
initial_slashes = path.startswith('/')
# POSIX allows one or two initial slashes, but treats three or more
# as single slash.
if (initial_slashes and
path.startswith('//') and not path.startswith('///')):
initial_slashes = 2
comps = path.split('/')
new_comps = []
for comp in comps:
if comp in ('', '.'):
continue
if (comp != '..' or (not initial_slashes and not new_comps) or
(new_comps and new_comps[-1] == '..')):
new_comps.append(comp)
elif new_comps:
new_comps.pop()
comps = new_comps
path = slash.join(comps)
if initial_slashes:
path = slash*initial_slashes + path
return path or dot
def abspath(path):
"""Return an absolute path."""
if not isabs(path):
if isinstance(path, _unicode):
cwd = os.getcwdu()
else:
cwd = os.getcwd()
path = join(cwd, path)
return normpath(path)
# Return a canonical path (i.e. the absolute location of a file on the
# filesystem).
def realpath(filename):
"""Return the canonical path of the specified filename, eliminating any
symbolic links encountered in the path."""
path, ok = _joinrealpath('', filename, {})
return abspath(path)
# Join two paths, normalizing ang eliminating any symbolic links
# encountered in the second path.
def _joinrealpath(path, rest, seen):
if isabs(rest):
rest = rest[1:]
path = sep
while rest:
name, _, rest = rest.partition(sep)
if not name or name == curdir:
# current dir
continue
if name == pardir:
# parent dir
if path:
path, name = split(path)
if name == pardir:
path = join(path, pardir, pardir)
else:
path = pardir
continue
newpath = join(path, name)
if not islink(newpath):
path = newpath
continue
# Resolve the symbolic link
if newpath in seen:
# Already seen this path
path = seen[newpath]
if path is not None:
# use cached value
continue
# The symlink is not resolved, so we must have a symlink loop.
# Return already resolved part + rest of the path unchanged.
return join(newpath, rest), False
seen[newpath] = None # not resolved symlink
path, ok = _joinrealpath(path, os.readlink(newpath), seen)
if not ok:
return join(path, rest), False
seen[newpath] = path # resolved symlink
return path, True
supports_unicode_filenames = (sys.platform == 'darwin')
def relpath(path, start=curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_list = [x for x in abspath(start).split(sep) if x]
path_list = [x for x in abspath(path).split(sep) if x]
# Work out how much of the filepath is shared by start and path.
i = len(commonprefix([start_list, path_list]))
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return curdir
return join(*rel_list)
| |
# -*- coding: utf-8 -*-
from django.db import models, migrations
import datetime
import django.utils.timezone
import devilry.apps.core.models.abstract_is_examiner
import devilry.apps.core.models.abstract_is_candidate
import devilry.apps.core.models.custom_db_fields
import devilry.apps.core.models.static_feedback
import devilry.apps.core.models.basenode
import devilry.apps.core.models.model_utils
import django.db.models.deletion
from django.conf import settings
import devilry.apps.core.models.abstract_is_admin
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Assignment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('short_name', devilry.apps.core.models.custom_db_fields.ShortNameField(help_text='A short name with at most 20 letters. Can only contain lowercase english letters (a-z), numbers, underscore ("_") and hyphen ("-"). This is used when the the regular name takes to much space. Be VERY careful about changing the short name - it is typically used as an identifier when importing and exporting data from Devilry.', max_length=20, verbose_name='Short name')),
('long_name', devilry.apps.core.models.custom_db_fields.LongNameField(max_length=100, verbose_name='Name', db_index=True)),
('publishing_time', models.DateTimeField(help_text='The time when the assignment is to be published (visible to students and examiners).', verbose_name='Publishing time')),
('anonymous', models.BooleanField(default=False, help_text='On anonymous assignments, examiners and students can NOT see each other and they can NOT communicate. If an assignment is anonymous, examiners see candidate-id instead of any personal information about the students. This means that anonymous assignments is perfect for exams, and for assignments where you do not want prior experiences with a student to affect results.', verbose_name='Anonymous?')),
('students_can_see_points', models.BooleanField(default=True, verbose_name='Students can see points')),
('delivery_types', models.PositiveIntegerField(default=0, help_text='This option controls what types of deliveries this assignment accepts. See the Delivery documentation for more info.', choices=[(0, 'Electronic'), (1, 'Non electronic'), (2, 'Alias')])),
('deadline_handling', models.PositiveIntegerField(default=0, help_text='With HARD deadlines, students will be unable to make deliveries when a deadline has expired. With SOFT deadlines students will be able to make deliveries after the deadline has expired. All deliveries after their deadline are clearly highligted. NOTE: Devilry is designed from the bottom up to gracefully handle SOFT deadlines. Students have to perform an extra confirm-step when adding deliveries after their active deadline, and assignments where the deadline has expired is clearly marked for both students and examiners.', verbose_name='Deadline handling', choices=[(0, 'Soft deadlines'), (1, 'Hard deadlines')])),
('scale_points_percent', models.PositiveIntegerField(default=100, help_text='Percent to scale points on this assignment by for period overviews. The default is 100, which means no change to the points.')),
('first_deadline', models.DateTimeField(null=True, blank=True)),
('max_points', models.PositiveIntegerField(default=1, help_text='Specify the maximum number of points possible for this assignment.', null=True, verbose_name='Maximum points', blank=True)),
('passing_grade_min_points', models.PositiveIntegerField(default=1, null=True, verbose_name='Minumum number of points required to pass', blank=True)),
('points_to_grade_mapper', models.CharField(default='passed-failed', max_length=25, null=True, blank=True, choices=[('passed-failed', 'As passed or failed'), ('raw-points', 'As points'), ('custom-table', 'As a text looked up in a custom table')])),
('grading_system_plugin_id', models.CharField(default='devilry_gradingsystemplugin_approved', max_length=300, null=True, blank=True)),
('students_can_create_groups', models.BooleanField(default=False, help_text='Select this if students should be allowed to join/leave groups. Even if this is not selected, you can still organize your students in groups manually.', verbose_name='Students can create project groups?')),
('students_can_not_create_groups_after', models.DateTimeField(default=None, help_text='Students can not create project groups after this time. Ignored if "Students can create project groups" is not selected.', null=True, verbose_name='Students can not create project groups after', blank=True)),
('feedback_workflow', models.CharField(default='', max_length=50, verbose_name='Feedback workflow', blank=True, choices=[('', 'Simple - Examiners write feedback, and publish it whenever they want. Does not handle coordination of multiple examiners at all.'), ('trusted-cooperative-feedback-editing', 'Trusted cooperative feedback editing - Examiners can only save feedback drafts. Examiners share the same feedback drafts, which means that one examiner can start writing feedback and another can continue. When an administrator is notified by their examiners that they have finished correcting, they can publish the drafts via the administrator UI. If you want one examiner to do the bulk of the work, and just let another examiner read it over and adjust the feedback, make the first examiner the only examiner, and reassign the students to the other examiner when the first examiner is done.')])),
('admins', models.ManyToManyField(to=settings.AUTH_USER_MODEL, verbose_name='Administrators', blank=True)),
],
options={
'ordering': ['short_name'],
},
bases=(models.Model, devilry.apps.core.models.basenode.BaseNode, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer, devilry.apps.core.models.abstract_is_candidate.AbstractIsCandidate),
),
migrations.CreateModel(
name='AssignmentGroup',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text='An optional name for the group. Typically used a project name on project assignments.', max_length=30, null=True, blank=True)),
('is_open', models.BooleanField(default=True, help_text='If this is checked, the group can add deliveries.')),
('etag', models.DateTimeField(auto_now_add=True)),
('delivery_status', models.CharField(blank=True, max_length=30, null=True, help_text='The delivery_status of a group', choices=[('no-deadlines', 'No deadlines'), ('corrected', 'Corrected'), ('closed-without-feedback', 'Closed without feedback'), ('waiting-for-something', 'Waiting for something')])),
('created_datetime', models.DateTimeField(default=django.utils.timezone.now, blank=True)),
('copied_from', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.AssignmentGroup', null=True)),
],
options={
'ordering': ['id'],
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer, devilry.apps.core.models.model_utils.Etag),
),
migrations.CreateModel(
name='AssignmentGroupTag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
# ('tag', models.SlugField(help_text='A tag can contain a-z, A-Z, 0-9 and "_".', max_length=20)),
# ('assignment_group', models.ForeignKey(related_name='tags', to='core.AssignmentGroup')),
('tag', models.SlugField(help_text='A tag can contain a-z, A-Z, 0-9 and "_".', max_length=20)),
('assignment_group', models.ForeignKey(related_name='tags', to='core.AssignmentGroup', on_delete=models.CASCADE)),
],
options={
'ordering': ['tag'],
},
),
migrations.CreateModel(
name='Candidate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('candidate_id', models.CharField(help_text='An optional candidate id. This can be anything as long as it is less than 30 characters. Used to show the user on anonymous assignmens.', max_length=30, null=True, blank=True)),
('automatic_anonymous_id', models.CharField(default='', help_text='An automatically generated anonymous ID.', max_length=255, blank=True)),
('assignment_group', models.ForeignKey(related_name='candidates', to='core.AssignmentGroup', on_delete=models.CASCADE)),
('student', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='Deadline',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('deadline', models.DateTimeField(help_text='The time of the deadline.')),
('text', models.TextField(help_text='An optional text to show to students and examiners.', null=True, blank=True)),
('deliveries_available_before_deadline', models.BooleanField(default=False, help_text='Should deliveries on this deadline be available to examiners before thedeadline expires? This is set by students.')),
('why_created', models.CharField(default=None, max_length=50, null=True, blank=True, choices=[(None, 'Unknown.'), ('examiner-gave-another-chance', 'Examiner gave the student another chance.')])),
('added_by', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('assignment_group', models.ForeignKey(related_name='deadlines', to='core.AssignmentGroup', on_delete=models.CASCADE)),
],
options={
'ordering': ['-deadline'],
'verbose_name': 'Deadline',
'verbose_name_plural': 'Deadlines',
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer, devilry.apps.core.models.abstract_is_candidate.AbstractIsCandidate),
),
migrations.CreateModel(
name='Delivery',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('delivery_type', models.PositiveIntegerField(default=0, help_text='0: Electronic delivery, 1: Non-electronic delivery, 2: Alias delivery. Default: 0.', verbose_name='Type of delivery')),
('time_of_delivery', models.DateTimeField(default=datetime.datetime.now, help_text='Holds the date and time the Delivery was uploaded.', verbose_name='Time of delivery')),
('number', models.PositiveIntegerField(help_text='The delivery-number within this assignment-group. This number is automatically incremented within each AssignmentGroup, starting from 1. Always unique within the assignment-group.')),
('successful', models.BooleanField(default=True, help_text='Has the delivery and all its files been uploaded successfully?')),
('alias_delivery', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.Delivery', help_text='Links to another delivery. Used when delivery_type is Alias.', null=True)),
('copy_of', models.ForeignKey(related_name='copies', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.Delivery', help_text='Link to a delivery that this delivery is a copy of. This is set by the copy-method.', null=True)),
('deadline', models.ForeignKey(related_name='deliveries', on_delete=models.CASCADE, verbose_name='Deadline', to='core.Deadline')),
('delivered_by', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.Candidate', help_text='The candidate that delivered this delivery. If this is None, the delivery was made by an administrator for a student.', null=True)),
],
options={
'ordering': ['-time_of_delivery'],
'verbose_name': 'Delivery',
'verbose_name_plural': 'Deliveries',
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin, devilry.apps.core.models.abstract_is_candidate.AbstractIsCandidate, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer),
),
migrations.CreateModel(
name='DevilryUserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('full_name', models.CharField(max_length=300, null=True, blank=True)),
('languagecode', models.CharField(max_length=100, null=True, blank=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='Examiner',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
# ('automatic_anonymous_id', models.CharField(default='', help_text='An automatically generated anonymous ID.', max_length=255, blank=True)),
# ('assignmentgroup', models.ForeignKey(related_name='examiners', to='core.AssignmentGroup')),
# ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
('automatic_anonymous_id', models.CharField(default='', help_text='An automatically generated anonymous ID.', max_length=255, blank=True)),
('assignmentgroup', models.ForeignKey(related_name='examiners', to='core.AssignmentGroup', on_delete=models.CASCADE)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'db_table': 'core_assignmentgroup_examiners',
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin),
),
migrations.CreateModel(
name='FileMeta',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
# ('filename', models.CharField(help_text='Name of the file.', max_length=255)),
# ('size', models.IntegerField(help_text='Size of the file in bytes.')),
# ('delivery', models.ForeignKey(related_name='filemetas', to='core.Delivery')),
('filename', models.CharField(help_text='Name of the file.', max_length=255)),
('size', models.IntegerField(help_text='Size of the file in bytes.')),
('delivery', models.ForeignKey(related_name='filemetas', to='core.Delivery', on_delete=models.CASCADE)),
],
options={
'ordering': ['filename'],
'verbose_name': 'FileMeta',
'verbose_name_plural': 'FileMetas',
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer, devilry.apps.core.models.abstract_is_candidate.AbstractIsCandidate),
),
migrations.CreateModel(
name='GroupInvite',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('sent_datetime', models.DateTimeField(default=datetime.datetime.now)),
('accepted', models.NullBooleanField(default=None)),
('responded_datetime', models.DateTimeField(default=None, null=True, blank=True)),
('group', models.ForeignKey(to='core.AssignmentGroup', on_delete=models.CASCADE)),
('sent_by', models.ForeignKey(related_name='groupinvite_sent_by_set', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
('sent_to', models.ForeignKey(related_name='groupinvite_sent_to_set', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='Node',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('short_name', devilry.apps.core.models.custom_db_fields.ShortNameField(help_text='A short name with at most 20 letters. Can only contain lowercase english letters (a-z), numbers, underscore ("_") and hyphen ("-"). This is used when the the regular name takes to much space. Be VERY careful about changing the short name - it is typically used as an identifier when importing and exporting data from Devilry.', max_length=20, verbose_name='Short name')),
('long_name', devilry.apps.core.models.custom_db_fields.LongNameField(max_length=100, verbose_name='Name', db_index=True)),
('etag', models.DateTimeField(auto_now=True)),
('admins', models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True)),
('parentnode', models.ForeignKey(related_name='child_nodes', blank=True, to='core.Node', null=True, on_delete=models.CASCADE)),
],
options={
'ordering': ['short_name'],
},
bases=(models.Model, devilry.apps.core.models.basenode.BaseNode, devilry.apps.core.models.model_utils.Etag),
),
migrations.CreateModel(
name='Period',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('short_name', devilry.apps.core.models.custom_db_fields.ShortNameField(help_text='A short name with at most 20 letters. Can only contain lowercase english letters (a-z), numbers, underscore ("_") and hyphen ("-"). This is used when the the regular name takes to much space. Be VERY careful about changing the short name - it is typically used as an identifier when importing and exporting data from Devilry.', max_length=20, verbose_name='Short name')),
('long_name', devilry.apps.core.models.custom_db_fields.LongNameField(max_length=100, verbose_name='Name', db_index=True)),
('start_time', models.DateTimeField(help_text='Start time and end time defines when the period is active.')),
('end_time', models.DateTimeField(help_text='Start time and end time defines when the period is active.')),
('etag', models.DateTimeField(auto_now_add=True)),
('admins', models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True)),
],
options={
'ordering': ['short_name'],
},
bases=(models.Model, devilry.apps.core.models.basenode.BaseNode, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer, devilry.apps.core.models.abstract_is_candidate.AbstractIsCandidate, devilry.apps.core.models.model_utils.Etag),
),
migrations.CreateModel(
name='PeriodApplicationKeyValue',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
# ('application', models.CharField(help_text='Application identifier. Max 300 chars.', max_length=300, db_index=True)),
# ('key', models.CharField(help_text='Key. Max 300 chars.', max_length=300, db_index=True)),
# ('value', models.TextField(help_text='Value.', null=True, db_index=True, blank=True)),
# ('period', models.ForeignKey(help_text='The period where this metadata belongs.', to='core.Period')),
('application', models.CharField(help_text='Application identifier. Max 300 chars.', max_length=300, db_index=True)),
('key', models.CharField(help_text='Key. Max 300 chars.', max_length=300, db_index=True)),
('value', models.TextField(help_text='Value.', null=True, db_index=True, blank=True)),
('period', models.ForeignKey(help_text='The period where this metadata belongs.', to='core.Period', on_delete=models.CASCADE)),
],
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin),
),
migrations.CreateModel(
name='PointRangeToGrade',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('minimum_points', models.PositiveIntegerField()),
('maximum_points', models.PositiveIntegerField()),
('grade', models.CharField(max_length=12)),
],
options={
'ordering': ['minimum_points'],
},
),
migrations.CreateModel(
name='PointToGradeMap',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('invalid', models.BooleanField(default=True)),
('assignment', models.OneToOneField(to='core.Assignment', on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='RelatedExaminer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tags', models.TextField(help_text='Comma-separated list of tags. Each tag is a word with the following letters allowed: a-z, 0-9, ``_`` and ``-``. Each word is separated by a comma, and no whitespace.', null=True, blank=True)),
('period', models.ForeignKey(verbose_name='Period', to='core.Period', help_text='The period.', on_delete=models.CASCADE)),
('user', models.ForeignKey(help_text='The related user.', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin),
),
migrations.CreateModel(
name='RelatedExaminerSyncSystemTag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tag', models.CharField(max_length=15, db_index=True)),
('relatedexaminer', models.ForeignKey(to='core.RelatedExaminer', on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='RelatedStudent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tags', models.TextField(help_text='Comma-separated list of tags. Each tag is a word with the following letters allowed: a-z, 0-9, ``_`` and ``-``. Each word is separated by a comma, and no whitespace.', null=True, blank=True)),
('candidate_id', models.CharField(max_length=30, null=True, blank=True)),
('automatic_anonymous_id', models.CharField(default='', max_length=255, editable=False, blank=True)),
('period', models.ForeignKey(verbose_name='Period', to='core.Period', help_text='The period.', on_delete=models.CASCADE)),
('user', models.ForeignKey(help_text='The related user.', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin),
),
migrations.CreateModel(
name='RelatedStudentKeyValue',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
# ('application', models.CharField(help_text='Application identifier. Max 300 chars.', max_length=300, db_index=True)),
# ('key', models.CharField(help_text='Key. Max 300 chars.', max_length=300, db_index=True)),
# ('value', models.TextField(help_text='Value.', null=True, db_index=True, blank=True)),
# ('student_can_read', models.BooleanField(default=False, help_text='Specifies if a student can read the value or not.')),
# ('relatedstudent', models.ForeignKey(to='core.RelatedStudent')),
('application', models.CharField(help_text='Application identifier. Max 300 chars.', max_length=300, db_index=True)),
('key', models.CharField(help_text='Key. Max 300 chars.', max_length=300, db_index=True)),
('value', models.TextField(help_text='Value.', null=True, db_index=True, blank=True)),
('student_can_read', models.BooleanField(default=False, help_text='Specifies if a student can read the value or not.')),
('relatedstudent', models.ForeignKey(to='core.RelatedStudent', on_delete=models.CASCADE)),
],
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin),
),
migrations.CreateModel(
name='RelatedStudentSyncSystemTag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tag', models.CharField(max_length=15, db_index=True)),
('relatedstudent', models.ForeignKey(to='core.RelatedStudent', on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='StaticFeedback',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('rendered_view', models.TextField(help_text='A rendered HTML version of the feedback, containing whatever the grade-editor chose to dump in this field.', blank=True)),
('grade', models.CharField(help_text='The rendered grade, such as "A" or "approved".', max_length=12)),
('points', models.PositiveIntegerField(help_text='Number of points given on this feedback.')),
('is_passing_grade', models.BooleanField(default=False, help_text='Is this a passing grade?')),
('save_timestamp', models.DateTimeField(help_text='Time when this feedback was saved. Since StaticFeedback is immutable, this never changes.', null=True, blank=True)),
('delivery', models.ForeignKey(related_name='feedbacks', to='core.Delivery', on_delete=models.CASCADE)),
('saved_by', models.ForeignKey(help_text='The user (examiner) who saved this feedback', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'ordering': ['-save_timestamp'],
'verbose_name': 'Static feedback',
'verbose_name_plural': 'Static feedbacks',
},
bases=(models.Model, devilry.apps.core.models.abstract_is_admin.AbstractIsAdmin, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer, devilry.apps.core.models.abstract_is_candidate.AbstractIsCandidate),
),
migrations.CreateModel(
name='StaticFeedbackFileAttachment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('filename', models.TextField()),
('file', models.FileField(upload_to=devilry.apps.core.models.static_feedback.staticfeedback_fileattachment_upload_to)),
('staticfeedback', models.ForeignKey(related_name='files', to='core.StaticFeedback', on_delete=models.CASCADE)),
],
options={
'ordering': ['filename'],
'verbose_name': 'Static feedback file attachment',
'verbose_name_plural': 'Static feedback file attachments',
},
),
migrations.CreateModel(
name='Subject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('short_name', devilry.apps.core.models.custom_db_fields.ShortNameField(help_text='A short name with at most 20 letters. Can only contain lowercase english letters (a-z), numbers, underscore ("_") and hyphen ("-"). This is used when the the regular name takes to much space. Be VERY careful about changing the short name - it is typically used as an identifier when importing and exporting data from Devilry.', unique=True, max_length=20, verbose_name='Short name')),
('long_name', devilry.apps.core.models.custom_db_fields.LongNameField(max_length=100, verbose_name='Name', db_index=True)),
('etag', models.DateTimeField(auto_now_add=True)),
('admins', models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True)),
('parentnode', models.ForeignKey(related_name='subjects', to='core.Node', on_delete=models.CASCADE)),
],
options={
'ordering': ['short_name'],
'verbose_name': 'Course',
'verbose_name_plural': 'Courses',
},
bases=(models.Model, devilry.apps.core.models.basenode.BaseNode, devilry.apps.core.models.abstract_is_examiner.AbstractIsExaminer, devilry.apps.core.models.abstract_is_candidate.AbstractIsCandidate, devilry.apps.core.models.model_utils.Etag),
),
migrations.AddField(
model_name='pointrangetograde',
name='point_to_grade_map',
field=models.ForeignKey(to='core.PointToGradeMap', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='period',
name='parentnode',
field=models.ForeignKey(related_name='periods', verbose_name='Subject', to='core.Subject', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='delivery',
name='last_feedback',
field=models.OneToOneField(related_name='latest_feedback_for_delivery', null=True, blank=True, to='core.StaticFeedback', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='assignmentgroup',
name='feedback',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.StaticFeedback'),
),
migrations.AddField(
model_name='assignmentgroup',
name='last_deadline',
field=models.OneToOneField(related_name='last_deadline_for_group', null=True, on_delete=django.db.models.deletion.SET_NULL, blank=True, to='core.Deadline'),
),
migrations.AddField(
model_name='assignmentgroup',
name='parentnode',
field=models.ForeignKey(related_name='assignmentgroups', to='core.Assignment', on_delete=models.CASCADE),
),
migrations.AddField(
model_name='assignment',
name='parentnode',
field=models.ForeignKey(related_name='assignments', verbose_name='Period', to='core.Period', on_delete=models.CASCADE),
),
migrations.AlterUniqueTogether(
name='relatedstudentsyncsystemtag',
unique_together=set([('relatedstudent', 'tag')]),
),
migrations.AlterUniqueTogether(
name='relatedstudentkeyvalue',
unique_together=set([('relatedstudent', 'application', 'key')]),
),
migrations.AlterUniqueTogether(
name='relatedstudent',
unique_together=set([('period', 'user')]),
),
migrations.AlterUniqueTogether(
name='relatedexaminersyncsystemtag',
unique_together=set([('relatedexaminer', 'tag')]),
),
migrations.AlterUniqueTogether(
name='relatedexaminer',
unique_together=set([('period', 'user')]),
),
migrations.AlterUniqueTogether(
name='pointrangetograde',
unique_together=set([('point_to_grade_map', 'grade')]),
),
migrations.AlterUniqueTogether(
name='periodapplicationkeyvalue',
unique_together=set([('period', 'application', 'key')]),
),
migrations.AlterUniqueTogether(
name='period',
unique_together=set([('short_name', 'parentnode')]),
),
migrations.AlterUniqueTogether(
name='node',
unique_together=set([('short_name', 'parentnode')]),
),
migrations.AlterUniqueTogether(
name='filemeta',
unique_together=set([('delivery', 'filename')]),
),
migrations.AlterUniqueTogether(
name='examiner',
unique_together=set([('user', 'assignmentgroup')]),
),
migrations.AlterUniqueTogether(
name='assignmentgrouptag',
unique_together=set([('assignment_group', 'tag')]),
),
migrations.AlterUniqueTogether(
name='assignment',
unique_together=set([('short_name', 'parentnode')]),
),
]
| |
from __future__ import print_function
import numpy as np
import astropy.units as u
_quantity = u.Quantity
import os
from .utils import QuantityOff,ImmutableDict,unitless,grouper
from . import utils
from astropy import units as u
from astropy import constants
from astropy import log
import astropy.table
# maybe an ABC?
class RadiativeTransferApproximator(object):
_u_brightness = (u.erg * u.s**-1 * u.cm**-2 * u.Hz**-1 * u.sr**-1)
_u_sc = u.cm**-2
_u_cc = u.cm**-3
_u_gradient = u.cm**-2 / (u.km/u.s) / u.pc
_u_kms = u.km/u.s
_u_cms = u.cm/u.s
@property
def locked_parameter(self):
return self._locked_parameter
def _lock_param(self, parname):
self._locked_parameter = parname
_all_valid_colliders = {'H2':'H2',
'PH2':'pH2',
'OH2':'oH2',
'E':'e',
'H':'H',
'HE':'He',
'H+':'H+'}
@property
def density(self):
raise NotImplementedError
@density.setter
def density(self, collider_density):
raise NotImplementedError
@property
def valid_colliders(self):
return self._valid_colliders
@property
def total_density(self):
"""
The total density *by number of particles*
The *mass density* can be dramatically different!
"""
vc = [x.lower() for x in self.valid_colliders]
if 'h2' in vc:
useh2 = 1
useoph2 = 0
elif 'oh2' in vc or 'ph2' in vc:
useh2 = 0
useoph2 = 1
else:
# Weird case: no H2 colliders at all
useoph2 = 0
useh2 = 0
weights = {'H2': useh2,
'PH2': useoph2,
'OH2': useoph2,
'E': 1,
'H': 1,
'He': 1,
'H+': 1,}
return u.Quantity([self.density[k]*weights[k] for k in self.density]).sum()
@property
def mass_density(self):
vc = [x.lower() for x in self.valid_colliders]
if 'h2' in vc:
useh2 = 1
useoph2 = 0
elif 'oh2' in vc or 'ph2' in vc:
useh2 = 0
useoph2 = 1
else:
# Weird case: no H2 colliders at all
useoph2 = 0
useh2 = 0
weights = {'H2': 2*useh2,
'PH2': 2*useoph2,
'OH2': 2*useoph2,
'E': 1/1836.,
'H': 1,
'He': 4,
'H+': 1,}
return np.sum( (self.density[k]*weights[k] for k in self.density)
)*constants.m_p
@property
def opr(self):
return self.density['OH2']/self.density['PH2']
@property
def oprh2(self):
return self.opr
@property
def species(self):
return self._species
@species.setter
def species(self, species):
if hasattr(self,'_species') and self._species == species:
return
self._species = species
try:
self.molpath = os.path.join(self.datapath,species+'.dat')
except IOError:
log.warn("Did not find data file for species {0} "
"in path {1}. Downloading it.".format(species,
self.datapath))
utils.get_datafile(species, self.datapath)
self.molpath = os.path.join(self.datapath,species+'.dat')
self._valid_colliders = utils.get_colliders(self.molpath)
vc = [x.lower() for x in self._valid_colliders]
if 'h2' in vc and ('oh2' in vc or 'ph2' in vc):
log.warn("oH2/pH2 and h2 are both in the datafile: "
"The resulting density/total density are invalid.")
@property
def molpath(self):
raise NotImplementedError
@molpath.setter
def molpath(self, molfile):
raise NotImplementedError
@property
def datapath(self):
return os.path.dirname(self.molpath)
@datapath.setter
def datapath(self, radat):
raise NotImplementedError
@property
def escapeProbGeom(self):
raise NotImplementedError
@escapeProbGeom.setter
def escapeProbGeom(self, escapeProbGeom):
raise NotImplementedError
@property
def column(self):
return self.column_per_bin
@column.setter
def column(self, value):
self.column_per_bin = value
@property
def column_per_kms_perpc(self):
return self.column_per_bin / self.deltav
@column_per_kms_perpc.setter
def column_per_kms_perpc(self, cddv):
cddv = u.Quantity(cddv, self._u_gradient)
self.column_per_bin = cddv * u.Quantity(self.deltav, self._u_kms) * self.length()
@property
def abundance(self):
return self._abundance
@abundance.setter
def abundance(self, abund):
self._abundance = abund
if not self._is_locked:
assert self.locked_parameter in ('column', 'abundance', 'density')
if self.locked_parameter == 'abundance': # self is locked, still need to update
if hasattr(self, '_previous_locked_parameter'):
self._lock_param(self._previous_locked_parameter)
else:
self._lock_param('density') # choose arbitrarily
self._is_locked = True
if self.locked_parameter == 'column':
dens = self.column_per_bin / self.length / abund
self.density = dens
elif self.locked_parameter == 'density':
col = self.total_density*self.length*abund
self.column_per_bin = u.Quantity(col, u.cm**-2)
self._lock_param('abundance')
self._is_locked=False
np.testing.assert_almost_equal((self.total_density / (self.column /
self.length)),
1/self.abundance)
@property
def deltav(self):
return self._deltav
@deltav.setter
def deltav(self, dv):
self._deltav = u.Quantity(dv, self._u_kms)
@property
def length(self):
""" Hard-coded, assumed length-scale """
return u.Quantity(1, u.pc)
@property
def tbg(self):
raise NotImplementedError
def _validate_colliders(self):
"""
Check whether the density of at least one collider in the associated
LAMDA data file is nonzero
"""
valid_colliders = [x.lower() for x in self.valid_colliders]
density = self.density
OK = False
matched_colliders = []
for collider in valid_colliders:
if unitless(density[self._all_valid_colliders[collider.upper()]]) > 0:
OK = True
matched_colliders.append(collider.lower())
if not OK:
raise ValueError("The colliders in the data file {0} ".format(self.molpath)
+ "have density 0.")
bad_colliders = []
for collider in density:
if (unitless(density[collider]) > 0
and (collider.lower() not in valid_colliders)):
if (collider.lower() in ('oh2','ph2') and 'h2' in
matched_colliders):
# All is OK: we're allowed to have mismatches of this sort
continue
elif (collider.lower() == 'h2' and ('oh2' in matched_colliders
or 'ph2' in
matched_colliders)):
# again, all OK
continue
bad_colliders.append(collider)
OK = False
if not OK:
raise ValueError("There are colliders with specified densities >0 "
"that do not have corresponding collision rates."
" The bad colliders are {0}".format(bad_colliders))
@property
def source_area(self):
if hasattr(self, '_source_area'):
return self._source_area
@source_area.setter
def source_area(self, source_area):
self._source_area = source_area
@property
def source_line_surfbrightness(self):
return self.source_brightness - self.background_brightness
def line_brightness_temperature(self,beamsize):
"""
Return the line surface brightness in kelvins for a given beam area
(Assumes the frequencies are rest frequencies)
"""
#return (self.line_flux * beamsize)
# because each line has a different frequency, have to loop it
try:
return u.Quantity([x.to(u.K, u.brightness_temperature(beamsize, f)).value
for x,f in zip(self.line_flux_density,self.frequency)
],
unit=u.K)
except AttributeError as ex:
raise NotImplementedError("line brightness temperature is not implemented "
"without reference to astropy units yet")
@property
def source_line_brightness_temperature(self):
"""
The surface brightness of the source assuming it is observed with a
beam matched to its size and it has ff=1
(this is consistent with the online RADEX calculator)
"""
#return (self.line_flux * beamsize)
# because each line has a different frequency, have to loop it
return ((self.source_line_surfbrightness*u.sr).
to(u.K, u.brightness_temperature(1*u.sr,
self.frequency)))
@property
def T_B(self):
return self.source_line_brightness_temperature
@property
def background_brightness(self):
raise NotImplementedError
@property
def flux_density(self):
"""
Convert the source surface brightness to a flux density by specifying
the emitting area of the source (in steradian-equivalent units)
This is the non-background-subtracted version
"""
if not self.source_area:
raise AttributeError("Need to specify a source area in order to compute the flux density")
return self.source_brightness * self.source_area
@property
def line_flux_density(self):
"""
Background-subtracted version of flux_density
"""
if not self.source_area:
raise AttributeError("Need to specify a source area in order to compute the flux density")
return self.source_line_surfbrightness * self.source_area
@property
def source_brightness(self):
"""
RADEX compat? (check)
"""
raise NotImplementedError
@property
def source_brightness_beta(self):
raise NotImplementedError
@property
def beta(self):
raise NotImplementedError
def get_table(self):
columns = [
astropy.table.Column(name='Tex', data=self.tex, unit=u.K),
astropy.table.Column(name='tau', data=self.tau, unit=''),
astropy.table.Column(name='frequency', data=self.frequency,
unit=u.GHz),
astropy.table.Column(name='upperstateenergy',
data=self.upperstateenergy, unit=u.K),
astropy.table.Column(name='upperlevel',
data=self.upperlevelnumber,
unit=''),
astropy.table.Column(name='lowerlevel',
data=self.lowerlevelnumber,
unit=''),
astropy.table.Column(name='upperlevelpop',
data=self.upperlevelpop,
unit=''),
astropy.table.Column(name='lowerlevelpop',
data=self.lowerlevelpop,
unit=''),
astropy.table.Column(name='brightness',
data=self.source_line_surfbrightness),
astropy.table.Column(name='T_B', data=self.T_B), # T_B is pre-masked
]
if self.source_area:
columns.append(astropy.table.Column(name='flux',data=self.line_flux_density[mask]))
T = astropy.table.Table(columns)
return T
def get_synthspec(self, fmin, fmax, npts=1000, **kwargs):
"""
Generate a synthetic spectrum of the selected molecule over the
specified frequency range. This task is good for quick-looks but has a
lot of overhead for generating models and should not be used for
fitting (unless you have a conveniently small amount of data)
Parameters
----------
fmin : `~astropy.units.Quantity`
fmax : `~astropy.units.Quantity`
Frequency-equivalent quantity
"""
wcs = synthspec.FrequencyArray(fmin, fmax, npts)
S = synthspec.SyntheticSpectrum.from_RADEX(wcs, self, **kwargs)
return S
| |
# uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: _threading.py
"""Thread module emulating a subset of Java's threading model."""
import sys as _sys
try:
import thread
except ImportError:
del _sys.modules[__name__]
raise
import warnings
from time import time as _time, sleep as _sleep
from traceback import format_exc as _format_exc
from collections import deque
__all__ = [
'activeCount', 'active_count', 'Condition', 'currentThread',
'current_thread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
'Timer', 'setprofile', 'settrace', 'local', 'stack_size']
_start_new_thread = thread.start_new_thread
_allocate_lock = thread.allocate_lock
_get_ident = thread.get_ident
ThreadError = thread.error
del thread
warnings.filterwarnings('ignore', category=DeprecationWarning, module='threading', message='sys.exc_clear')
_VERBOSE = False
class _Verbose(object):
def __init__(self, verbose=None):
pass
def _note(self, *args):
pass
_profile_hook = None
_trace_hook = None
def setprofile(func):
global _profile_hook
_profile_hook = func
def settrace(func):
global _trace_hook
_trace_hook = func
Lock = _allocate_lock
def RLock(*args, **kwargs):
return _RLock(*args, **kwargs)
class _RLock(_Verbose):
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__block = _allocate_lock()
self.__owner = None
self.__count = 0
return
def __repr__(self):
owner = self.__owner
try:
owner = _active[owner].name
except KeyError:
pass
return '<%s owner=%r count=%d>' % (
self.__class__.__name__, owner, self.__count)
def acquire(self, blocking=1):
me = _get_ident()
if self.__owner == me:
self.__count = self.__count + 1
return 1
rc = self.__block.acquire(blocking)
if rc:
self.__owner = me
self.__count = 1
return rc
__enter__ = acquire
def release(self):
if self.__owner != _get_ident():
raise RuntimeError('cannot release un-acquired lock')
self.__count = count = self.__count - 1
if not count:
self.__owner = None
self.__block.release()
return
def __exit__(self, t, v, tb):
self.release()
def _acquire_restore(self, count_owner):
count, owner = count_owner
self.__block.acquire()
self.__count = count
self.__owner = owner
def _release_save(self):
count = self.__count
self.__count = 0
owner = self.__owner
self.__owner = None
self.__block.release()
return (
count, owner)
def _is_owned(self):
return self.__owner == _get_ident()
def Condition(*args, **kwargs):
return _Condition(*args, **kwargs)
class _Condition(_Verbose):
def __init__(self, lock=None, verbose=None):
_Verbose.__init__(self, verbose)
if lock is None:
lock = RLock()
self.__lock = lock
self.acquire = lock.acquire
self.release = lock.release
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self.__waiters = []
return
def __enter__(self):
return self.__lock.__enter__()
def __exit__(self, *args):
return self.__lock.__exit__(*args)
def __repr__(self):
return '<Condition(%s, %d)>' % (self.__lock, len(self.__waiters))
def _release_save(self):
self.__lock.release()
def _acquire_restore(self, x):
self.__lock.acquire()
def _is_owned(self):
if self.__lock.acquire(0):
self.__lock.release()
return False
else:
return True
def wait(self, timeout=None):
if not self._is_owned():
raise RuntimeError('cannot wait on un-acquired lock')
waiter = _allocate_lock()
waiter.acquire()
self.__waiters.append(waiter)
saved_state = self._release_save()
try:
if timeout is None:
waiter.acquire()
else:
endtime = _time() + timeout
delay = 0.0005
while True:
gotit = waiter.acquire(0)
if gotit:
break
remaining = endtime - _time()
if remaining <= 0:
break
delay = min(delay * 2, remaining, 0.05)
_sleep(delay)
if not gotit:
try:
self.__waiters.remove(waiter)
except ValueError:
pass
finally:
self._acquire_restore(saved_state)
return
def notify(self, n=1):
if not self._is_owned():
raise RuntimeError('cannot notify on un-acquired lock')
__waiters = self.__waiters
waiters = __waiters[:n]
if not waiters:
return
self._note('%s.notify(): notifying %d waiter%s', self, n, n != 1 and 's' or '')
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notifyAll(self):
self.notify(len(self.__waiters))
notify_all = notifyAll
def Semaphore(*args, **kwargs):
return _Semaphore(*args, **kwargs)
class _Semaphore(_Verbose):
def __init__(self, value=1, verbose=None):
if value < 0:
raise ValueError('semaphore initial value must be >= 0')
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__value = value
def acquire(self, blocking=1):
rc = False
self.__cond.acquire()
while self.__value == 0:
if not blocking:
break
self.__cond.wait()
else:
self.__value = self.__value - 1
rc = True
self.__cond.release()
return rc
__enter__ = acquire
def release(self):
self.__cond.acquire()
self.__value = self.__value + 1
self.__cond.notify()
self.__cond.release()
def __exit__(self, t, v, tb):
self.release()
def BoundedSemaphore(*args, **kwargs):
return _BoundedSemaphore(*args, **kwargs)
class _BoundedSemaphore(_Semaphore):
"""Semaphore that checks that # releases is <= # acquires"""
def __init__(self, value=1, verbose=None):
_Semaphore.__init__(self, value, verbose)
self._initial_value = value
def release(self):
if self._Semaphore__value >= self._initial_value:
raise ValueError, 'Semaphore released too many times'
return _Semaphore.release(self)
def Event(*args, **kwargs):
return _Event(*args, **kwargs)
class _Event(_Verbose):
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__flag = False
def _reset_internal_locks(self):
self.__cond.__init__()
def isSet(self):
return self.__flag
is_set = isSet
def set(self):
self.__cond.acquire()
try:
self.__flag = True
self.__cond.notify_all()
finally:
self.__cond.release()
def clear(self):
self.__cond.acquire()
try:
self.__flag = False
finally:
self.__cond.release()
def wait(self, timeout=None):
self.__cond.acquire()
try:
if not self.__flag:
self.__cond.wait(timeout)
return self.__flag
finally:
self.__cond.release()
_counter = 0
def _newname(template='Thread-%d'):
global _counter
_counter = _counter + 1
return template % _counter
_active_limbo_lock = _allocate_lock()
_active = {}
_limbo = {}
class Thread(_Verbose):
__initialized = False
__exc_info = _sys.exc_info
__exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, verbose=None):
_Verbose.__init__(self, verbose)
if kwargs is None:
kwargs = {}
self.__target = target
self.__name = str(name or _newname())
self.__args = args
self.__kwargs = kwargs
self.__daemonic = self._set_daemon()
self.__ident = None
self.__started = Event()
self.__stopped = False
self.__block = Condition(Lock())
self.__initialized = True
self.__stderr = _sys.stderr
return
def _reset_internal_locks(self):
if hasattr(self, '_Thread__block'):
self.__block.__init__()
self.__started._reset_internal_locks()
@property
def _block(self):
return self.__block
def _set_daemon(self):
return current_thread().daemon
def __repr__(self):
status = 'initial'
if self.__started.is_set():
status = 'started'
if self.__stopped:
status = 'stopped'
if self.__daemonic:
status += ' daemon'
if self.__ident is not None:
status += ' %s' % self.__ident
return '<%s(%s, %s)>' % (self.__class__.__name__, self.__name, status)
def start(self):
global _active_limbo_lock
if not self.__initialized:
raise RuntimeError('thread.__init__() not called')
if self.__started.is_set():
raise RuntimeError('threads can only be started once')
with _active_limbo_lock:
_limbo[self] = self
try:
_start_new_thread(self.__bootstrap, ())
except Exception:
with _active_limbo_lock:
del _limbo[self]
raise
self.__started.wait()
def run(self):
try:
if self.__target:
self.__target(*self.__args, **self.__kwargs)
finally:
del self.__target
del self.__args
del self.__kwargs
def __bootstrap(self):
try:
self.__bootstrap_inner()
except:
if self.__daemonic and _sys is None:
return
raise
return
def _set_ident(self):
self.__ident = _get_ident()
def __bootstrap_inner(self):
try:
self._set_ident()
self.__started.set()
with _active_limbo_lock:
_active[self.__ident] = self
del _limbo[self]
if _trace_hook:
self._note('%s.__bootstrap(): registering trace hook', self)
_sys.settrace(_trace_hook)
if _profile_hook:
self._note('%s.__bootstrap(): registering profile hook', self)
_sys.setprofile(_profile_hook)
try:
try:
self.run()
except SystemExit:
pass
except:
if _sys:
_sys.stderr.write('Exception in thread %s:\n%s\n' % (
self.name, _format_exc()))
else:
exc_type, exc_value, exc_tb = self.__exc_info()
try:
print >> self.__stderr, 'Exception in thread ' + self.name + ' (most likely raised during interpreter shutdown):'
print >> self.__stderr, 'Traceback (most recent call last):'
while exc_tb:
print >> self.__stderr, ' File "%s", line %s, in %s' % (
exc_tb.tb_frame.f_code.co_filename,
exc_tb.tb_lineno,
exc_tb.tb_frame.f_code.co_name)
exc_tb = exc_tb.tb_next
print >> self.__stderr, '%s: %s' % (exc_type, exc_value)
finally:
del exc_type
del exc_value
del exc_tb
finally:
self.__exc_clear()
finally:
with _active_limbo_lock:
self.__stop()
try:
del _active[_get_ident()]
except:
pass
def __stop(self):
self.__block.acquire()
self.__stopped = True
self.__block.notify_all()
self.__block.release()
def __delete(self):
"""Remove current thread from the dict of currently running threads."""
try:
with _active_limbo_lock:
del _active[_get_ident()]
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
def join(self, timeout=None):
if not self.__initialized:
raise RuntimeError('Thread.__init__() not called')
if not self.__started.is_set():
raise RuntimeError('cannot join thread before it is started')
if self is current_thread():
raise RuntimeError('cannot join current thread')
self.__block.acquire()
try:
if timeout is None:
while not self.__stopped:
self.__block.wait()
else:
deadline = _time() + timeout
while not self.__stopped:
delay = deadline - _time()
if delay <= 0:
break
self.__block.wait(delay)
finally:
self.__block.release()
return
@property
def name(self):
return self.__name
@name.setter
def name(self, name):
self.__name = str(name)
@property
def ident(self):
return self.__ident
def isAlive(self):
return self.__started.is_set() and not self.__stopped
is_alive = isAlive
@property
def daemon(self):
return self.__daemonic
@daemon.setter
def daemon(self, daemonic):
if not self.__initialized:
raise RuntimeError('Thread.__init__() not called')
if self.__started.is_set():
raise RuntimeError('cannot set daemon status of active thread')
self.__daemonic = daemonic
def isDaemon(self):
return self.daemon
def setDaemon(self, daemonic):
self.daemon = daemonic
def getName(self):
return self.name
def setName(self, name):
self.name = name
def Timer(*args, **kwargs):
return _Timer(*args, **kwargs)
class _Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=[], kwargs={})
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
def __init__(self, interval, function, args=[], kwargs={}):
Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet"""
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
self.finished.set()
class _MainThread(Thread):
def __init__(self):
Thread.__init__(self, name='MainThread')
self._Thread__started.set()
self._set_ident()
with _active_limbo_lock:
_active[_get_ident()] = self
def _set_daemon(self):
return False
def _exitfunc(self):
self._Thread__stop()
t = _pickSomeNonDaemonThread()
if t:
pass
while t:
t.join()
t = _pickSomeNonDaemonThread()
self._Thread__delete()
def _pickSomeNonDaemonThread():
for t in enumerate():
if not t.daemon and t.is_alive():
return t
return None
class _DummyThread(Thread):
def __init__(self):
Thread.__init__(self, name=_newname('Dummy-%d'))
del self._Thread__block
self._Thread__started.set()
self._set_ident()
with _active_limbo_lock:
_active[_get_ident()] = self
def _set_daemon(self):
return True
def join(self, timeout=None):
pass
def currentThread():
try:
return _active[_get_ident()]
except KeyError:
return _DummyThread()
current_thread = currentThread
def activeCount():
with _active_limbo_lock:
return len(_active) + len(_limbo)
active_count = activeCount
def _enumerate():
return _active.values() + _limbo.values()
def enumerate():
with _active_limbo_lock:
return _active.values() + _limbo.values()
from thread import stack_size
_shutdown = _MainThread()._exitfunc
try:
from thread import _local as local
except ImportError:
from _threading_local import local
def _after_fork():
global _active_limbo_lock
_active_limbo_lock = _allocate_lock()
new_active = {}
current = current_thread()
with _active_limbo_lock:
for thread in _active.itervalues():
if thread is current:
ident = _get_ident()
thread._Thread__ident = ident
if hasattr(thread, '_reset_internal_locks'):
thread._reset_internal_locks()
new_active[ident] = thread
else:
thread._Thread__stopped = True
_limbo.clear()
_active.clear()
_active.update(new_active)
def _test():
class BoundedQueue(_Verbose):
def __init__(self, limit):
_Verbose.__init__(self)
self.mon = RLock()
self.rc = Condition(self.mon)
self.wc = Condition(self.mon)
self.limit = limit
self.queue = deque()
def put(self, item):
self.mon.acquire()
while len(self.queue) >= self.limit:
self._note('put(%s): queue full', item)
self.wc.wait()
self.queue.append(item)
self._note('put(%s): appended, length now %d', item, len(self.queue))
self.rc.notify()
self.mon.release()
def get(self):
self.mon.acquire()
while not self.queue:
self._note('get(): queue empty')
self.rc.wait()
item = self.queue.popleft()
self._note('get(): got %s, %d left', item, len(self.queue))
self.wc.notify()
self.mon.release()
return item
class ProducerThread(Thread):
def __init__(self, queue, quota):
Thread.__init__(self, name='Producer')
self.queue = queue
self.quota = quota
def run(self):
from random import random
counter = 0
while counter < self.quota:
counter = counter + 1
self.queue.put('%s.%d' % (self.name, counter))
_sleep(random() * 1e-05)
class ConsumerThread(Thread):
def __init__(self, queue, count):
Thread.__init__(self, name='Consumer')
self.queue = queue
self.count = count
def run(self):
while self.count > 0:
item = self.queue.get()
print item
self.count = self.count - 1
NP = 3
QL = 4
NI = 5
Q = BoundedQueue(QL)
P = []
for i in range(NP):
t = ProducerThread(Q, NI)
t.name = 'Producer-%d' % (i + 1)
P.append(t)
C = ConsumerThread(Q, NI * NP)
for t in P:
t.start()
_sleep(1e-06)
C.start()
for t in P:
t.join()
C.join()
if __name__ == '__main__':
_test()
| |
from __future__ import unicode_literals
import warnings
from django.contrib.admin import TabularInline, ModelAdmin
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.auth.models import User, Permission
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, override_settings, RequestFactory
from django.utils.encoding import force_text
# local test models
from .admin import InnerInline, site as admin_site
from .models import (Holder, Inner, Holder2, Inner2, Holder3, Inner3, Person,
OutfitItem, Fashionista, Teacher, Parent, Child, Author, Book, Profile,
ProfileCollection, ParentModelWithCustomPk, ChildModel1, ChildModel2,
Sighting, Novel, Chapter, FootNote, BinaryTree, SomeParentModel,
SomeChildModel, Poll, Question, Inner4Stacked, Inner4Tabular, Holder4)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class TestInline(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
self.change_url = '/admin/admin_inlines/holder/%i/' % holder.id
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.factory = RequestFactory()
def tearDown(self):
self.client.logout()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
response = self.client.get(self.change_url)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get('/admin/admin_inlines/holder/%i/'
% holder.id)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get('/admin/admin_inlines/author/add/')
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author-book relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post('/admin/admin_inlines/fashionista/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_non_field_errors(self):
"""
Ensure that non_field_errors are displayed correctly, including the
right value for colspan. Refs #13510.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post('/admin/admin_inlines/titlecollection/add/', data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get('/admin/admin_inlines/novel/add/')
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="chapter_set-group">')
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get('/admin/admin_inlines/poll/add/')
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
Ensure that the inlines' model field help texts are displayed when
using both the stacked and tabular layouts.
Ref #8190.
"""
response = self.client.get('/admin/admin_inlines/holder4/add/')
self.assertContains(response, '<p class="help">Awesome stacked help text is awesome.</p>', 4)
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Awesome tabular help text is awesome.)" title="Awesome tabular help text is awesome." />', 1)
# ReadOnly fields
response = self.client.get('/admin/admin_inlines/capofamiglia/add/')
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Help text for ReadOnlyInline)" title="Help text for ReadOnlyInline" />', 1)
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get('/admin/admin_inlines/someparentmodel/%s/' % parent.pk)
self.assertNotContains(response, '<td class="field-position">')
self.assertContains(response, (
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1" />'))
def test_non_related_name_inline(self):
"""
Ensure that multiple inlines with related_name='+' have correct form
prefixes. Bug #16838.
"""
response = self.client.get('/admin/admin_inlines/capofamiglia/add/')
self.assertContains(response,
'<input type="hidden" name="-1-0-id" id="id_-1-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-1-0-name" type="text" class="vTextField" '
'name="-1-0-name" maxlength="100" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-id" id="id_-2-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-2-0-name" type="text" class="vTextField" '
'name="-2-0-name" maxlength="100" />', html=True)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for locales that use
thousand separators
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get('/admin/admin_inlines/holder/%i/' % holder.id)
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for models with a
custom primary key field. Bug #18433.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get('/admin/admin_inlines/parentmodelwithcustompk/foo/')
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
Ensure that an object can be created with inlines when it inherits
another class. Bug #19524.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post('/admin/admin_inlines/extraterrestrial/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = '<input id="id_binarytree_set-MAX_NUM_FORMS" name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d" />'
# The total number of forms will remain the same in either case
total_forms_hidden = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="2" />'
response = self.client.get('/admin/admin_inlines/binarytree/add/')
self.assertContains(response, max_forms_input % 3)
self.assertContains(response, total_forms_hidden)
response = self.client.get("/admin/admin_inlines/binarytree/%d/" % bt_head.id)
self.assertContains(response, max_forms_input % 2)
self.assertContains(response, total_forms_hidden)
def test_min_num(self):
"""
Ensure that min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="5" />'
request = self.factory.get('/admin/admin_inlines/binarytree/add/')
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms)
self.assertContains(response, total_forms)
def test_custom_min_num(self):
"""
Ensure that get_min_num is called and used correctly.
"""
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d" />'
request = self.factory.get('/admin/admin_inlines/binarytree/add/')
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms % 2)
self.assertContains(response, total_forms % 5)
request = self.factory.get("/admin/admin_inlines/binarytree/%d/" % bt_head.id)
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertContains(response, min_forms % 5)
self.assertContains(response, total_forms % 8)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get('/admin/admin_inlines/author/add/')
self.assertContains(response,
'<input id="id_nonautopkbook_set-0-rand_pk" name="nonautopkbook_set-0-rand_pk" type="hidden" />',
html=True)
self.assertContains(response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" name="nonautopkbook_set-2-0-rand_pk" type="hidden" />',
html=True)
def test_inline_editable_pk(self):
response = self.client.get('/admin/admin_inlines/author/add/')
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" name="editablepkbook_set-0-manual_pk" type="text" />',
html=True, count=1)
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" name="editablepkbook_set-2-0-manual_pk" type="text" />',
html=True, count=1)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get('/admin/admin_inlines/holder/%s/' % holder.pk)
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get('/admin/admin_inlines/holder4/%s/' % holder.pk)
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = '/admin/admin_inlines/%s/%s/' % (model, pk)
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get('/admin/admin_inlines/parentmodelwithcustompk/foo/')
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get('/admin/admin_inlines/poll/%s/' % poll.pk)
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class TestInlineMedia(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = '/admin/admin_inlines/holder/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = '/admin/admin_inlines/holder3/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = '/admin/admin_inlines/holder2/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
def test_original_content_type_id_deprecated(self):
"""
#23444 -- Verify a warning is raised when accessing
`original_content_type_id` attribute of `InlineAdminForm` object.
"""
iaf = InlineAdminForm(None, None, {}, {}, None)
poll = Poll.objects.create(name="poll")
iaf2 = InlineAdminForm(None, None, {}, {}, poll)
poll_ct = ContentType.objects.get_for_model(Poll)
with warnings.catch_warnings(record=True) as recorded:
warnings.filterwarnings('always')
with self.assertRaises(AttributeError):
iaf.original_content_type_id
msg = force_text(recorded.pop().message)
self.assertEqual(
msg,
'InlineAdminForm.original_content_type_id is deprecated and will be '
'removed in Django 2.0. If you were using this attribute to construct '
'the "view on site" URL, use the `absolute_url` attribute instead.'
)
self.assertEqual(iaf2.original_content_type_id, poll_ct.id)
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class TestInlineProtectedOnDelete(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = '/admin/admin_inlines/novel/%i/' % lotr.id
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
def setUp(self):
self.user = User(username='admin')
self.user.is_staff = True
self.user.is_active = True
self.user.set_password('secret')
self.user.save()
self.author_ct = ContentType.objects.get_for_model(Author)
self.holder_ct = ContentType.objects.get_for_model(Holder2)
self.book_ct = ContentType.objects.get_for_model(Book)
self.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
book = author.books.create(name='The inline Book')
self.author_change_url = '/admin/admin_inlines/author/%i/' % author.id
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
holder = Holder2.objects.create(dummy=13)
inner2 = Inner2.objects.create(dummy=42, holder=holder)
self.holder_change_url = '/admin/admin_inlines/holder2/%i/' % holder.id
self.inner2_id = inner2.id
self.assertEqual(
self.client.login(username='admin', password='secret'),
True)
def tearDown(self):
self.client.logout()
def test_inline_add_m2m_noperm(self):
response = self.client.get('/admin/admin_inlines/author/add/')
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get('/admin/admin_inlines/holder2/add/')
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get('/admin/admin_inlines/author/add/')
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get('/admin/admin_inlines/holder2/add/')
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author-book relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_Author_books-0-id" '
'value="%i" name="Author_books-0-id" />' % self.author_book_auto_m2m_intermediate_id, html=True)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertNotContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>')
# Just the one form for existing instances
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
# max-num 0 means we can't add new ones
self.assertContains(response, '<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" '
'value="0" name="inner2_set-MAX_NUM_FORMS" />', html=True)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, three for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF="admin_inlines.urls")
class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumWebDriverTestCase.available_apps
fixtures = ['admin-views-users.xml']
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_add_stackeds(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/admin_inlines/holder4/add/'))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/admin_inlines/holder4/add/'))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector(
'%s .inline-deletelink' % inline_id):
delete_link.click()
self.assertEqual(rows_length(), 3)
def test_add_inlines(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/admin_inlines/profilecollection/add/'))
# Check that there's only one inline to start with and that it has the
# correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# Check that the inline has been added, that it has the right id, and
# that it contains the right fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
# Check that the objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/admin_inlines/profilecollection/add/'))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# Verify that they're gone and that the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_alternating_rows(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/admin_inlines/profilecollection/add/'))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
row_selector = 'form#profilecollection_form tr.dynamic-profile_set'
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row1" % row_selector)), 2, msg="Expect two row1 styled rows")
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row2" % row_selector)), 1, msg="Expect one row2 styled row")
class SeleniumChromeTests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumIETests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| |
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import btp_annotation231
except ImportError:
btp_annotation231 = sys.modules["onshape_client.oas.models.btp_annotation231"]
try:
from onshape_client.oas.models import btp_argument_declaration232
except ImportError:
btp_argument_declaration232 = sys.modules[
"onshape_client.oas.models.btp_argument_declaration232"
]
try:
from onshape_client.oas.models import btp_identifier8
except ImportError:
btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"]
try:
from onshape_client.oas.models import btp_operator_declaration264_all_of
except ImportError:
btp_operator_declaration264_all_of = sys.modules[
"onshape_client.oas.models.btp_operator_declaration264_all_of"
]
try:
from onshape_client.oas.models import btp_procedure_declaration_base266
except ImportError:
btp_procedure_declaration_base266 = sys.modules[
"onshape_client.oas.models.btp_procedure_declaration_base266"
]
try:
from onshape_client.oas.models import btp_space10
except ImportError:
btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"]
try:
from onshape_client.oas.models import btp_statement269
except ImportError:
btp_statement269 = sys.modules["onshape_client.oas.models.btp_statement269"]
try:
from onshape_client.oas.models import btp_statement_block271
except ImportError:
btp_statement_block271 = sys.modules[
"onshape_client.oas.models.btp_statement_block271"
]
try:
from onshape_client.oas.models import btp_type_name290
except ImportError:
btp_type_name290 = sys.modules["onshape_client.oas.models.btp_type_name290"]
class BTPOperatorDeclaration264(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("operator",): {
"NONE": "NONE",
"PLUS": "PLUS",
"MINUS": "MINUS",
"TIMES": "TIMES",
"DIVIDE": "DIVIDE",
"MODULUS": "MODULUS",
"POWER": "POWER",
"NEGATE": "NEGATE",
"OR": "OR",
"AND": "AND",
"NOT": "NOT",
"EQUAL_TO": "EQUAL_TO",
"NOT_EQUAL_TO": "NOT_EQUAL_TO",
"GREATER": "GREATER",
"LESS": "LESS",
"GREATER_OR_EQUAL": "GREATER_OR_EQUAL",
"LESS_OR_EQUAL": "LESS_OR_EQUAL",
"CONCATENATE": "CONCATENATE",
"CONDITIONAL": "CONDITIONAL",
},
("documentation_type",): {
"FUNCTION": "FUNCTION",
"PREDICATE": "PREDICATE",
"CONSTANT": "CONSTANT",
"ENUM": "ENUM",
"USER_TYPE": "USER_TYPE",
"FEATURE_DEFINITION": "FEATURE_DEFINITION",
"FILE_HEADER": "FILE_HEADER",
"UNDOCUMENTABLE": "UNDOCUMENTABLE",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"operator": (str,), # noqa: E501
"space_after_operator": (btp_space10.BTPSpace10,), # noqa: E501
"space_before_operator": (btp_space10.BTPSpace10,), # noqa: E501
"atomic": (bool,), # noqa: E501
"documentation_type": (str,), # noqa: E501
"end_source_location": (int,), # noqa: E501
"node_id": (str,), # noqa: E501
"short_descriptor": (str,), # noqa: E501
"space_after": (btp_space10.BTPSpace10,), # noqa: E501
"space_before": (btp_space10.BTPSpace10,), # noqa: E501
"space_default": (bool,), # noqa: E501
"start_source_location": (int,), # noqa: E501
"annotation": (btp_annotation231.BTPAnnotation231,), # noqa: E501
"arguments_to_document": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"deprecated": (bool,), # noqa: E501
"deprecated_explanation": (str,), # noqa: E501
"for_export": (bool,), # noqa: E501
"space_after_export": (btp_space10.BTPSpace10,), # noqa: E501
"symbol_name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
"arguments": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"body": (btp_statement_block271.BTPStatementBlock271,), # noqa: E501
"precondition": (btp_statement269.BTPStatement269,), # noqa: E501
"return_type": (btp_type_name290.BTPTypeName290,), # noqa: E501
"space_after_arglist": (btp_space10.BTPSpace10,), # noqa: E501
"space_in_empty_list": (btp_space10.BTPSpace10,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"operator": "operator", # noqa: E501
"space_after_operator": "spaceAfterOperator", # noqa: E501
"space_before_operator": "spaceBeforeOperator", # noqa: E501
"atomic": "atomic", # noqa: E501
"documentation_type": "documentationType", # noqa: E501
"end_source_location": "endSourceLocation", # noqa: E501
"node_id": "nodeId", # noqa: E501
"short_descriptor": "shortDescriptor", # noqa: E501
"space_after": "spaceAfter", # noqa: E501
"space_before": "spaceBefore", # noqa: E501
"space_default": "spaceDefault", # noqa: E501
"start_source_location": "startSourceLocation", # noqa: E501
"annotation": "annotation", # noqa: E501
"arguments_to_document": "argumentsToDocument", # noqa: E501
"deprecated": "deprecated", # noqa: E501
"deprecated_explanation": "deprecatedExplanation", # noqa: E501
"for_export": "forExport", # noqa: E501
"space_after_export": "spaceAfterExport", # noqa: E501
"symbol_name": "symbolName", # noqa: E501
"arguments": "arguments", # noqa: E501
"body": "body", # noqa: E501
"precondition": "precondition", # noqa: E501
"return_type": "returnType", # noqa: E501
"space_after_arglist": "spaceAfterArglist", # noqa: E501
"space_in_empty_list": "spaceInEmptyList", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""btp_operator_declaration264.BTPOperatorDeclaration264 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
operator (str): [optional] # noqa: E501
space_after_operator (btp_space10.BTPSpace10): [optional] # noqa: E501
space_before_operator (btp_space10.BTPSpace10): [optional] # noqa: E501
atomic (bool): [optional] # noqa: E501
documentation_type (str): [optional] # noqa: E501
end_source_location (int): [optional] # noqa: E501
node_id (str): [optional] # noqa: E501
short_descriptor (str): [optional] # noqa: E501
space_after (btp_space10.BTPSpace10): [optional] # noqa: E501
space_before (btp_space10.BTPSpace10): [optional] # noqa: E501
space_default (bool): [optional] # noqa: E501
start_source_location (int): [optional] # noqa: E501
annotation (btp_annotation231.BTPAnnotation231): [optional] # noqa: E501
arguments_to_document ([btp_argument_declaration232.BTPArgumentDeclaration232]): [optional] # noqa: E501
deprecated (bool): [optional] # noqa: E501
deprecated_explanation (str): [optional] # noqa: E501
for_export (bool): [optional] # noqa: E501
space_after_export (btp_space10.BTPSpace10): [optional] # noqa: E501
symbol_name (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501
arguments ([btp_argument_declaration232.BTPArgumentDeclaration232]): [optional] # noqa: E501
body (btp_statement_block271.BTPStatementBlock271): [optional] # noqa: E501
precondition (btp_statement269.BTPStatement269): [optional] # noqa: E501
return_type (btp_type_name290.BTPTypeName290): [optional] # noqa: E501
space_after_arglist (btp_space10.BTPSpace10): [optional] # noqa: E501
space_in_empty_list (btp_space10.BTPSpace10): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
btp_operator_declaration264_all_of.BTPOperatorDeclaration264AllOf,
btp_procedure_declaration_base266.BTPProcedureDeclarationBase266,
],
"oneOf": [],
}
| |
from __future__ import absolute_import, division, print_function, with_statement
from __future__ import unicode_literals
from deepstreampy.constants import topic as topic_constants
from deepstreampy.constants import actions as action_constants
from deepstreampy.constants import event as event_constants
from deepstreampy.constants import connection_state
from deepstreampy.message import message_parser, message_builder
from deepstreampy.utils import ResubscribeNotifier, SingleNotifier, Listener
from deepstreampy.utils import str_types
from deepstreampy.constants import merge_strategies
from deepstreampy import jsonpath
from pyee import EventEmitter
from tornado import gen, concurrent
import json
from functools import partial
from copy import deepcopy
ALL_EVENT = 'ALL_EVENT'
ENTRY_ADDED_EVENT = 'ENTRY_ADDED_EVENT'
ENTRY_REMOVED_EVENT = 'ENTRY_REMOVED_EVENT'
ENTRY_MOVED_EVENT = 'ENTRY_MOVED_EVENT'
class Record(EventEmitter, object):
def __init__(self, name, connection, options, client):
super(Record, self).__init__()
self.name = name
self.usages = 0
self._connection = connection
self._client = client
self._options = options
self._has_provider = False
self._is_ready = False
self._is_destroyed = False
self._data = {}
self._version = None
self._old_value = None
self._old_path_values = None
self._queued_method_calls = list()
self._write_callbacks = {}
self.merge_strategy = merge_strategies.remote_wins
self._emitter = EventEmitter()
if 'merge_strategy' in options:
self.merge_strategy = options['merge_strategy']
self._resubscribe_notifier = ResubscribeNotifier(
client, self._send_read)
record_read_ack_timeout = options.get("recordReadAckTimeout", 15)
self._read_ack_timeout = client.io_loop.call_later(
record_read_ack_timeout,
partial(self._on_timeout, event_constants.ACK_TIMEOUT))
record_read_timeout = options.get("recordReadTimeout", 15)
self._read_timeout = client.io_loop.call_later(
record_read_timeout,
partial(self._on_timeout, event_constants.RESPONSE_TIMEOUT))
self._record_delete_timeout = options.get("recordDeleteTimeout", 15)
self._delete_ack_timeout = None
self._discard_timeout = None
def get(self, path=None):
"""
Returns a copy of either the entire dataset of the record or, if called
with a path - the value of that path within the record's dataset.
Returning a copy rather than the actual value helps to prevent the
record getting out of sync due to unintentional changes to its data.
Args:
path (str, optional): a JSON path
"""
return self._get_path(path)
def set(self, data, path=None, callback=None):
"""
Sets the value of either the entire dataset or of a specific path within
the record and submits the changes to the server.
If the new data is equal to the current data, nothing happens.
Args:
data: the new value of the data
path (str, optional): a JSON path
callback (callable)
"""
config = {}
if callback:
state = self._client.connection_state
if state in (connection_state.CLOSED,
connection_state.RECONNECTING):
callback('Connection error: error updating record as '
'connection was closed')
return
else:
config['writeSuccess'] = True
self._set_up_callback(self.version, callback)
if path is None and not isinstance(data, (dict, list)):
raise ValueError(
"Invalid record data {0}: Record data must be a dict or list.")
if self._check_destroyed('set'):
return
if not self._is_ready:
self._queued_method_calls.append(partial(self.set, data, path))
return
old_value = self._data
deep_copy = self._options.get('recordDeepCopy', True)
new_value = jsonpath.set(old_value, path, data, deep_copy)
if new_value == old_value:
if callback:
callback(None)
return
self._send_update(path, data, config)
self._apply_change(new_value)
def subscribe(self, callback, path=None, trigger_now=False):
"""
Subscribe to changes to the record's dataset.
When called with a path it will only subscribe to updates to that path,
rather than the entire record.
Args:
callback (callable)
path (str, optional): a JSON path to subscribe for
trigger_now (bool): specifies whether the callback should be invoked
immediately with the current value
"""
if self._check_destroyed('subscribe'):
return
if path is None:
event = ALL_EVENT
else:
event = path
self._emitter.on(event, callback)
if trigger_now and self._is_ready:
if path:
callback(jsonpath.get(self._data, path, True))
else:
callback(self._data)
def unsubscribe(self, callback, path=None):
"""
Remove a subscription that was previously made.
Args:
callback (callable)
path (str, optional): the JSON path to unsibscribe for
"""
if self._check_destroyed('unsubscribe'):
return
if path is None:
event = ALL_EVENT
else:
event = path
self._emitter.remove_listener(event, callback)
def discard(self):
"""
Remove all change listeners and notify the server that the client is no
longer interested in updates for this record.
"""
future = concurrent.Future()
if self._check_destroyed('discard'):
return
def ready_callback(record):
self.usages -= 1
if self.usages <= 0:
self.emit('destroyPending')
self._discard_timeout = self._client.io_loop.call_later(
1, partial(self._on_timeout, event_constants.ACK_TIMEOUT))
send_future = self._connection.send_message(
topic_constants.RECORD, action_constants.UNSUBSCRIBE,
[self.name])
send_future.add_done_callback(
lambda f: future.set_result(f.result()))
self.when_ready(ready_callback)
return future
def delete(self):
"""
Delete the record on the server.
"""
future = concurrent.Future()
if self._check_destroyed('delete'):
return
def ready_callback(record):
self.emit('destroyPending')
self._delete_ack_timeout = self._client.io_loop.call_later(
self._record_delete_timeout,
partial(self._on_timeout, event_constants.DELETE_TIMEOUT))
send_future = self._connection.send_message(
topic_constants.RECORD, action_constants.DELETE, [self.name])
send_future.add_done_callback(
lambda f: future.set_result(f.result()))
self.when_ready(ready_callback)
return future
def when_ready(self, callback):
if self._is_ready:
callback(self)
else:
self.once('ready', partial(callback, self))
def _set_up_callback(self, current_version, callback):
new_version = (current_version or 0) + 1
self._write_callbacks[new_version] = callback
def _on_message(self, message):
action = message['action']
if action == action_constants.READ:
if self.version is None:
self._client.io_loop.remove_timeout(self._read_timeout)
self._on_read(message)
else:
self._apply_update(message)
elif action == action_constants.ACK:
self._process_ack_message(message)
elif action in (action_constants.UPDATE, action_constants.PATCH):
self._apply_update(message)
elif action == action_constants.WRITE_ACKNOWLEDGEMENT:
versions = json.loads(message['data'][1])
for version in versions:
if version in self._write_callbacks:
callback = self._write_callbacks[version]
callback(
message_parser.convert_typed(message['data'][2],
self._client))
del self._write_callbacks[version]
elif message['data'][0] == event_constants.VERSION_EXISTS:
self._recover_record(message['data'][2],
json.loads(message['data'][3]), message)
elif action == event_constants.MESSAGE_DENIED:
self._clear_timeouts()
elif action == action_constants.SUBSCRIPTION_HAS_PROVIDER:
has_provider = message_parser.convert_typed(
message['data'][1], self._client)
self._has_provider = has_provider
self.emit('hasProviderChanged', has_provider)
def _recover_record(self, remote_version, remote_data, message):
if self.merge_strategy:
self.merge_strategy(self, remote_data, remote_version,
partial(self._on_record_recovered,
remote_version, remote_data, message))
else:
self.emit('error', event_constants.VERSION_EXISTS,
'received update for {0} but version is {1}'.format(
remote_version, self.version))
def _on_record_recovered(self, remote_version, remote_data, message, error,
data):
if not error:
old_version = self.version
self._version = int(remote_version)
old_value = self._data
new_value = jsonpath.set(old_value, None, data, True)
if data == remote_data:
self._apply_change(data)
callback = self._write_callbacks.get(self.version, None)
if callback:
callback(None)
if remote_version in self._write_callbacks.keys():
del self._write_callbacks[remote_version]
return
config = message['data'][4] if len(message['data']) >= 5 else None
if config and json.loads(config)['writeSuccess']:
callback = self._write_callbacks[old_version]
del self._write_callbacks[old_version]
self._set_up_callback(self.version, callback)
self._send_update(None, data, config)
self._apply_change(new_value)
else:
self.emit('error', event_constants.VERSION_EXISTS,
'received update for {0} but version is {1}'.format(
remote_version, self.version))
def _process_ack_message(self, message):
acknowledge_action = message['data'][0]
if acknowledge_action == action_constants.SUBSCRIBE:
self._client.io_loop.remove_timeout(self._read_ack_timeout)
elif acknowledge_action == action_constants.DELETE:
self.emit('delete')
self._destroy()
elif acknowledge_action == action_constants.UNSUBSCRIBE:
self.emit('discard')
self._destroy()
def _apply_update(self, message):
version = int(message['data'][1])
if message['action'] == action_constants.PATCH:
data = message_parser.convert_typed(message['data'][3],
self._client)
else:
data = json.loads(message['data'][2])
if self.version is None:
self._version = version
elif self.version + 1 != version:
if message['action'] == action_constants.PATCH:
self._connection.send_message(topic_constants.RECORD,
action_constants.SNAPSHOT,
[self.name])
else:
self._recover_record(version, data, message)
return
self._begin_change()
self._version = version
if message['action'] == action_constants.PATCH:
jsonpath.set(self._data, message['data'][2], data, False)
else:
self._data = data
self._complete_change()
def _send_update(self, path, data, config):
self._version += 1
if not path:
if config:
msg_data = [self.name, self.version, data, config]
else:
msg_data = [self.name, self.version, data]
self._connection.send_message(topic_constants.RECORD,
action_constants.UPDATE, msg_data)
else:
if config:
msg_data = [
self.name, self.version, path,
message_builder.typed(data), config
]
else:
msg_data = [
self.name, self.version, path,
message_builder.typed(data)
]
self._connection.send_message(topic_constants.RECORD,
action_constants.PATCH, msg_data)
def _apply_change(self, new_data):
if self.is_destroyed:
return
old_data = self._data
self._data = new_data
if not self._emitter._events:
return
paths = self._emitter._events.keys()
for path in paths:
if path == 'new_listener':
continue
if path == 'ALL_EVENT' and new_data != old_data:
self._emitter.emit(ALL_EVENT, new_data)
continue
new_value = jsonpath.get(new_data, path, False)
old_value = jsonpath.get(old_data, path, False)
if new_value != old_value:
self._emitter.emit(path, self._get_path(path))
def _on_read(self, message):
self._begin_change()
self._version = int(message['data'][1])
self._data = json.loads(message['data'][2])
self._complete_change()
self._set_ready()
def _set_ready(self):
self._is_ready = True
for call in self._queued_method_calls:
call()
self._queued_method_calls = []
self.emit('ready')
def _send_read(self):
"""
Sends the read message, either initially at record creation or after a
lost connection has been re-established.
"""
return self._connection.send_message(
topic_constants.RECORD, action_constants.CREATEORREAD, [self.name])
def _get_path(self, path=None):
deep_copy = self._options.get('recordDeepCopy', False)
return jsonpath.get(self._data, path, deep_copy)
def _begin_change(self):
if not self._emitter._events:
return
# Hacky way of getting active listeners, except a special one
paths = [
event for event in self._emitter._events.keys()
if event != 'new_listener'
]
self._old_path_values = dict()
if self._emitter.listeners(ALL_EVENT):
self._old_value = deepcopy(self._data)
for path in paths:
if path != ALL_EVENT:
self._old_path_values[path] = jsonpath.get(
self._data, path, True)
def _complete_change(self):
if (self._emitter.listeners(ALL_EVENT)
and self._old_value != self._data):
self._emitter.emit(ALL_EVENT, self._data)
self._old_value = None
if not self._old_path_values:
return
for path in self._old_path_values:
current_value = self._get_path(
path) #jsonpath.get(self._data, path, True)
if current_value != self._old_path_values[path]:
self._emitter.emit(path, current_value)
self._old_path_values = None
def _clear_timeouts(self):
if self._read_ack_timeout:
self._client.io_loop.remove_timeout(self._read_ack_timeout)
if self._discard_timeout:
self._client.io_loop.remove_timeout(self._discard_timeout)
if self._delete_ack_timeout:
self._client.io_loop.remove_timeout(self._delete_ack_timeout)
def _check_destroyed(self, method_name):
if self._is_destroyed:
self.emit(
'error',
"Can't invoke {0}. Record {1} is already destroyed".format(
method_name, self.name))
return True
return False
def _on_timeout(self, timeout_type):
self._clear_timeouts()
self.emit('error', timeout_type)
def _destroy(self):
self._clear_timeouts()
self._emitter.remove_all_listeners()
self._resubscribe_notifier.destroy()
self._is_destroyed = True
self._is_ready = False
self._client = None
self._connection = None
@property
def has_provider(self):
return self._has_provider
@property
def is_destroyed(self):
return self._is_destroyed
@property
def is_ready(self):
return self._is_ready
@property
def version(self):
return self._version
class List(Record):
def __init__(self, name, connection, options, client):
super(List, self).__init__(name, connection, options, client)
self._before_structure = None
self._has_add_listener = None
self._has_remove_listener = None
self._has_move_listener = None
def get(self):
"""
Return the list of entries or an empty array if the list hasn't been
populated yet.
"""
entries = super(List, self).get()
if not isinstance(entries, list):
return []
return entries
def set(self, entries):
"""
Update the list with a new set of entries.
Args:
entries (list): the new entries
"""
error_msg = 'entries must be a list of record names'
if not isinstance(entries, list):
raise ValueError(error_msg)
for entry in entries:
if not isinstance(entry, str_types):
raise ValueError(error_msg)
if not self.is_ready:
self._queued_method_calls.append(partial(self.set, entries))
else:
self._before_change()
super(List, self).set(entries)
self._after_change()
def remove_entry(self, entry):
"""
Remove the entry from the list.
Args:
entry (str): the entry to remove
"""
if not self.is_ready:
self._queued_method_calls.append(partial(self.remove_entry, entry))
current_entries = deepcopy(super(List, self).get())
current_entries.remove(entry)
self.set(current_entries)
def remove_at(self, index):
"""
Remove the entry at the specified index.
Args:
index (int): the index of the entry to remove
"""
if not self.is_ready:
self._queued_method_calls.append(partial(self.remove_at, index))
current_entries = deepcopy(super(List, self).get())
del current_entries[index]
self.set(current_entries)
def add_entry(self, entry, index=None):
"""
Add an entry to the list.
Args:
entry (str): the entry to add
index (int): the index at which to add the entry
"""
if not self.is_ready:
self._queued_method_calls.append(
partial(self.add_entry, entry, index))
entries = deepcopy(self.get())
if index is not None:
entries.insert(index, entry)
else:
entries.append(entry)
self.set(entries)
def subscribe(self, callback):
"""
Proxies the underlying Record's subscribe method.
"""
super(List, self).subscribe(callback)
def unsubscribe(self, callback):
"""
Proxies the underlying Record's unsubscribe method.
"""
super(List, self).unsubscribe(callback)
def _apply_update(self, message):
if message['action'] == action_constants.PATCH:
raise ValueError('PATCH is not supported for Lists')
if message['data'][2][0] != '[':
message['data'][2] = '[]'
self._before_change()
super(List, self)._apply_update(message)
self._after_change()
def _before_change(self):
self._has_add_listener = len(self.listeners(ENTRY_ADDED_EVENT)) > 0
self._has_remove_listener = len(
self.listeners(ENTRY_REMOVED_EVENT)) > 0
self._has_move_listener = len(self.listeners(ENTRY_MOVED_EVENT)) > 0
if (self._has_add_listener or self._has_remove_listener
or self._has_move_listener):
self._before_structure = self._get_structure()
else:
self._before_structure = None
def _after_change(self):
if self._before_structure is None:
return
after = self._get_structure()
before = self._before_structure
if self._has_remove_listener:
self._after_change_remove_listener(before, after)
if self._has_add_listener or self._has_move_listener:
self._after_change_move_add_listener(before, after)
def _after_change_remove_listener(self, before, after):
for entry in before:
if (entry not in after or len(after[entry]) < len(before[entry])):
for n in before[entry]:
if entry not in after or n not in after[entry]:
self.emit(ENTRY_REMOVED_EVENT, entry, n)
def _after_change_move_add_listener(self, before, after):
for entry in after:
if entry not in before:
for n in after[entry]:
self.emit(ENTRY_ADDED_EVENT, entry, n)
elif before[entry] != after[entry]:
added = len(before[entry]) != len(after[entry])
for n in after[entry]:
if added and n not in before[entry]:
self.emit(ENTRY_ADDED_EVENT, entry, n)
elif not added:
self.emit(ENTRY_MOVED_EVENT, entry, n)
def _get_structure(self):
structure = {}
entries = super(List, self).get()
for i, entry in enumerate(entries):
if entry in structure:
structure[entry].append(i)
else:
structure[entry] = [i]
return structure
@property
def is_empty(self):
return len(self.get()) == 0
class RecordHandler(EventEmitter, object):
def __init__(self, connection, client, **options):
super(RecordHandler, self).__init__()
self._options = options
self._connection = connection
self._client = client
self._records = {}
self._lists = {}
self._listeners = {}
self._destroy_emitter = EventEmitter()
record_read_timeout = options.get("recordReadTimeout", 15)
self._has_registry = SingleNotifier(
client, connection, topic_constants.RECORD, action_constants.HAS,
record_read_timeout)
self._snapshot_registry = SingleNotifier(
client, connection, topic_constants.RECORD,
action_constants.SNAPSHOT, record_read_timeout)
@gen.coroutine
def get_record(self, name):
"""
Return an existing record or create a new one.
Args:
name (str): the unique name of the record
"""
if name in self._records:
record = self._records[name]
else:
record = Record(name, self._connection, self._options,
self._client)
record.on('error', partial(self._on_record_error, name))
record.on('destroyPending', partial(self._on_destroy_pending,
name))
record.on('delete', partial(self._remove_record, name))
record.on('discard', partial(self._remove_record, name))
self._records[name] = record
record.usages += 1
yield record._send_read()
raise gen.Return(record)
@gen.coroutine
def get_list(self, name):
"""
Return an exising list or create a new one.
Args:
name (str): the unique name of the list
list_options (dict): a dict of parameters for this particular list
"""
if name in self._lists:
_list = self._lists[name]
else:
_list = List(name, self._connection, self._options, self._client)
self._lists[name] = _list
if name not in self._records:
self._records[name] = _list
_list.on('error', partial(self._on_record_error, name))
_list.on('destroyPending', partial(self._on_destroy_pending, name))
_list.on('delete', partial(self._remove_record, name))
_list.on('discard', partial(self._remove_record, name))
self._records[name].usages += 1
yield _list._send_read()
raise gen.Return(_list)
def get_anonymous_record(self):
"""
Return an anonymous record.
"""
future = concurrent.Future()
future.set_result(AnonymousRecord(self))
return future
def listen(self, pattern, callback):
"""
Listen for record subscriptions made by this or other clients. This is
useful to create "active" data providers, e.g. providers that only
provide data for a particular record if a user is actually interested in
it.
Args:
pattern (str): A combination of alpha numeric characters and
wildcards(*)
callback (callable):
"""
if pattern in self._listeners:
self._client._on_error(topic_constants.RECORD,
event_constants.LISTENER_EXISTS, pattern)
future = concurrent.Future()
future.set_result(None)
else:
listener = Listener(topic_constants.RECORD, pattern, callback,
self._options, self._client, self._connection)
self._listeners[pattern] = listener
future = listener.send_future
return future
def unlisten(self, pattern):
"""
Remove a listener that was previously registered with `listen`.
Args:
pattern (str): A combination of alpha numeric characters and
wildcards(*)
"""
if pattern in self._listeners:
listener = self._listeners[pattern]
if not listener.destroy_pending:
listener.send_destroy()
future = concurrent.Future()
future.set_result(None)
else:
future = listener.destroy()
del self._listeners[pattern]
else:
self._client._on_error(topic_constants.RECORD,
event_constants.NOT_LISTENING, pattern)
future = concurrent.Future()
future.set_result(None)
return future
def snapshot(self, name, callback):
"""
Retrieve the current record data without subscribing to changes.
Args:
name (str): the unique name of the record
callback (callable):
"""
if name in self._records and self._records[name].is_ready:
callback(None, self._records[name].get())
future = concurrent.Future()
future.set_result(None)
else:
future = self._snapshot_registry.request(name, callback)
return future
def has(self, name, callback):
"""
Check whether the record exists.
Args:
name (str): the unique name of the record
callback (callable):
"""
if name in self._records:
callback(None, True)
future = concurrent.Future()
future.set_result(None)
else:
future = self._has_registry.request(name, callback)
return future
def _process_message(self, message, name):
action = message['action']
data = message['data']
processed = False
if (action == action_constants.READ
and self._snapshot_registry.has_request(name)):
processed = True
self._snapshot_registry.receive(name, None, json.loads(data[2]))
if (action == action_constants.HAS
and self._has_registry.has_request(name)):
processed = True
record_exists = message_parser.convert_typed(data[1], self._client)
self._has_registry.receive(name, None, record_exists)
listener = self._listeners.get(name, None)
if (action == action_constants.ACK
and data[0] == action_constants.UNLISTEN and listener
and listener.destroy_pending):
processed = True
listener.destroy()
del self._listeners[name]
del listener
elif listener:
processed = True
listener._on_message(message)
elif action in (action_constants.SUBSCRIPTION_FOR_PATTERN_REMOVED,
action_constants.SUBSCRIPTION_HAS_PROVIDER):
processed = True
return processed
def handle(self, message):
action = message['action']
data = message['data']
if (action == action_constants.ERROR
and data[0] not in (event_constants.VERSION_EXISTS,
action_constants.SNAPSHOT,
action_constants.HAS)):
message['processedError'] = True
self._client._on_error(topic_constants.RECORD, message['data'][0],
message['data'][1])
return
if action in (action_constants.ACK, action_constants.ERROR):
name = data[1]
if data[0] in (action_constants.DELETE,
action_constants.UNSUBSCRIBE):
name = message['data'][1]
self._destroy_emitter.emit('destroy_ack_' + name, message)
if (message['data'][0] == action_constants.DELETE
and name in self._records):
self._records[name]._on_message(message)
return
if data[0] in (action_constants.SNAPSHOT, action_constants.HAS):
message['processedError'] = True
error = message['data'][2]
self._snapshot_registry.receive(name, error, None)
return
else:
name = message['data'][0]
processed = False
if name in self._records:
processed = True
self._records[name]._on_message(message)
processed = self._process_message(message, name) or processed
if not processed:
self._client._on_error(topic_constants.RECORD,
event_constants.UNSOLICITED_MESSAGE, name)
def _on_record_error(self, record_name, error):
message = "No ACK message received in time for {}".format(record_name)
self._client._on_error(topic_constants.RECORD, error, message)
def _on_destroy_pending(self, record_name):
on_message = self._records[record_name]._on_message
self._destroy_emitter.once('destroy_ack_' + record_name, on_message)
self._remove_record(record_name)
def _remove_record(self, record_name):
if record_name in self._records:
del self._records[record_name]
elif record_name in self._lists:
del self._lists[record_name]
class AnonymousRecord(EventEmitter, object):
def __init__(self, record_handler):
super(AnonymousRecord, self).__init__()
self._record_handler = record_handler
self._name = None
self._record = None
self._subscriptions = []
self._proxy_method('delete')
self._proxy_method('set')
self._proxy_method('discard')
def get(self, path=None):
"""
Proxies the actual record's get method.
Args:
path (str, optional): a JSON path. If not provided, the entire
record is returned
"""
if self._record is None:
return None
return self._record.get(path)
def subscribe(self, callback, path=None):
"""
Proxies the actual record's subscribe method.
Args:
callback (callable):
path (str): a JSON path. If not provided, the subscription is for
the entire record.
"""
self._subscriptions.append((callback, path))
if self._record is not None:
self._record.subscribe(callback, path, True)
def unsubscribe(self, callback, path=None):
"""
Proxies the actual record's unsubscribe method.
Args:
callback (callable):
path (str): a JSON path. If not provided, the subscription is for
the entire record.
"""
self._subscriptions.remove((callback, path))
if self._record is not None:
self._record.unsubscribe(callback, path)
def _on_record_get(self, record):
self._record = record
@property
def name(self):
return self._name
@name.setter
@gen.coroutine
def name(self, value):
self._name = value
if self._record is not None and not self._record.is_destroyed:
for subscription in self._subscriptions:
self._record.unsubscribe(*subscription)
self._record.discard()
record_future = self._record_handler.get_record(value)
self._record = yield record_future
for subscription in self._subscriptions:
self._record.subscribe(*subscription, trigger_now=True)
self._record.when_ready(partial(self.emit, "ready"))
self.emit("nameChanged", value)
def _proxy_method(self, method_name):
method = partial(self._call_method_on_record, method_name)
setattr(self, method_name, method)
def _call_method_on_record(self, method_name, *args, **kwargs):
if self._record is None:
raise AttributeError(
"Can't invoke {}. AnonymousRecord not initialised. "
"Set `name` first.")
getattr(self._record, method_name)(*args, **kwargs)
| |
import numpy as np
import time
import cv2
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from lib import utils
# from scipy import ndimage
from .ui_recorder import UIRecorder
from .ui_color import UIColor
from .ui_sketch import UISketch
from .ui_warp import UIWarp
class GUIDraw(QWidget):
def __init__(self, opt_engine, win_size=320, img_size=64, topK=16, useAverage=False, shadow=False):
QWidget.__init__(self)
self.isPressed = False
self.points = []
self.topK = topK
self.shadow = False
self.lastDraw = 0
self.model = None
self.shadow = shadow
self.init_color(shadow)
self.opt_engine = opt_engine
self.pos = None
self.nps = win_size
self.scale = win_size / float(img_size)
self.brushWidth = int(2 * self.scale)
self.show_nn = True
self.type = 'edge' if self.shadow else 'color'
self.show_ui = True
self.uir = UIRecorder(shadow=shadow)
nc = 1 if shadow else 3
self.uiColor = UIColor(img_size=img_size, scale=self.scale, nc=nc)
self.uiSketch = UISketch(img_size=img_size, scale=self.scale, nc=nc)
self.uiWarp = UIWarp(img_size=img_size, scale=self.scale, nc=nc)
self.img_size = img_size
self.move(win_size, win_size)
self.useAverage = useAverage
if self.shadow:
self.setMouseTracking(True)
self.movie = True
self.frame_id = -1
self.image_id = 0
def change_average_mode(self):
self.useAverage = not self.useAverage
self.update()
def update_opt_engine(self):
if self.type in ['color', 'edge']:
[im_c, mask_c] = self.uiColor.get_constraints()
[im_e, mask_e] = self.uiSketch.get_constraints()
else:
[im_c, mask_c] = self.uiWarp.get_constraints()
[im_e, mask_e] = self.uiWarp.get_edge_constraints()
self.opt_engine.set_constraints([im_c, mask_c, im_e, mask_e])
self.opt_engine.update()
self.frame_id = -1
def update_im(self):
self.update()
QApplication.processEvents()
def update_ui(self):
if self.opt_engine.is_fixed():
self.set_frame_id(-1)
self.set_image_id(0)
self.emit(SIGNAL('update_image_id'), 0)
self.opt_engine.update_fix()
if self.type is 'color':
self.uiColor.update(self.points, self.color)
if self.type is 'edge':
self.uiSketch.update(self.points, self.color)
if self.type is 'warp':
self.uiWarp.update(self.pos)
def set_image_id(self, image_id):
if self.image_id != image_id:
self.image_id = image_id
self.update()
def set_frame_id(self, frame_id):
if self.frame_id != frame_id:
self.frame_id = frame_id
self.update()
def reset(self):
self.isPressed = False
self.points = []
self.lastDraw = 0
self.uir.reset()
self.uiSketch.reset()
self.uiColor.reset()
self.uiWarp.reset()
self.frame_id = -1
self.image_id = 0
self.update()
def round_point(self, pnt):
# print(type(pnt))
x = int(np.round(pnt.x()))
y = int(np.round(pnt.y()))
return QPoint(x, y)
def init_color(self, shadow):
if shadow:
self.color = QColor(0, 0, 0) # shadow mode: default color black
else:
self.color = QColor(0, 255, 0) # default color red
self.prev_color = self.color
def change_color(self):
if self.shadow:
if self.color == QColor(0, 0, 0):
self.color = QColor(255, 255, 255)
else:
self.color = QColor(0, 0, 0)
else:
color = QColorDialog.getColor(parent=self)
self.color = color
self.prev_color = self.color
self.emit(SIGNAL('update_color'), QString('background-color: %s' % self.color.name()))
def get_image_id(self):
return self.image_id
def get_frame_id(self):
return self.frame_id
def get_z(self):
print('get z from image %d, frame %d'%(self.get_image_id(), self.get_frame_id()))
return self.opt_engine.get_z(self.get_image_id(), self.get_frame_id())
def shadow_image(self, img, pos):
if img is None:
return None
weighted_img = np.ones((img.shape[0], img.shape[1]), np.uint8)
x = int(pos.x() / self.scale)
y = int(pos.y() / self.scale)
weighted_img[y, x] = 0
dist_img = cv2.distanceTransform(weighted_img, distanceType=cv2.cv.CV_DIST_L2, maskSize=5).astype(np.float32)
dist_sigma = self.img_size/2.0
dist_img_f = np.exp(-dist_img / dist_sigma)
dist_img_f = np.tile(dist_img_f[..., np.newaxis], [1,1,3])
l = 0.25
img_f = img.astype(np.float32)
rst_f = (img_f * l + (1-l) * (img_f * dist_img_f + (1-dist_img_f)*255.0))
rst = rst_f.astype(np.uint8)
return rst
def paintEvent(self, event):
painter = QPainter()
painter.begin(self)
painter.fillRect(event.rect(), Qt.white)
painter.setRenderHint(QPainter.Antialiasing)
im = self.opt_engine.get_image(self.get_image_id(), self.get_frame_id(), self.useAverage)
if self.shadow and self.useAverage:
im = self.shadow_image(im, self.pos)
if im is not None:
bigim = cv2.resize(im, (self.nps, self.nps))
qImg = QImage(bigim.tostring(), self.nps, self.nps, QImage.Format_RGB888)
painter.drawImage(0, 0, qImg)
# draw path
if self.isPressed and self.type in ['color', 'edge'] and self.show_ui:
if self.type is 'edge':
if self.shadow:
painter.setPen(QPen(self.color, 10, cap=Qt.RoundCap, join=Qt.RoundJoin))
else:
painter.setPen(QPen(Qt.gray, 10, Qt.DotLine, cap=Qt.RoundCap, join=Qt.RoundJoin))
else:
painter.setPen(QPen(self.color, int(self.brushWidth), cap=Qt.RoundCap, join=Qt.RoundJoin))
n_pnts = len(self.points)
for i in range(0, n_pnts-5, 5):
painter.drawLine(self.points[i], self.points[i + 5])
self.lastDraw = n_pnts
# draw cursor
if self.pos is not None:
w = self.brushWidth
c = self.color
ca = QColor(255, 255, 255, 127)
pnt = QPointF(self.pos.x(), self.pos.y())
if self.type is 'color':
ca = QColor(c.red(), c.green(), c.blue(), 127)
if self.type is 'edge':
ca = QColor(0, 0, 0, 127)
if self.type is 'warp':
ca = QColor(0, 0, 0, 127)
painter.setPen(QPen(ca, 1))
painter.setBrush(ca)
if self.type is 'warp':
if self.show_ui:
painter.drawRect(int(self.pos.x()-w/2.0),int(self.pos.y() - w/2.0), w, w)
else:
painter.drawEllipse(pnt, w, w)
if self.type is 'warp' and self.show_ui:
color = Qt.green
w = 10
painter.setPen(QPen(color, w, Qt.DotLine, cap=Qt.RoundCap, join=Qt.RoundJoin)) # ,)
pnt1 = self.uiWarp.StartPoint()
if pnt1 is not None:
pnt1f = QPointF(pnt1[0]*self.scale, pnt1[1]*self.scale)
pnt2f = QPointF(self.pos.x(), self.pos.y())
painter.drawLine(pnt1f, pnt2f)
if self.show_ui:
self.uir.draw(painter)
painter.end()
def update_msg(self, painter):
# msgs = []
if self.type is 'color':
msg = 'coloring: (%d, %d, %d)' % (self.color.red(), self.color.green(), self.color.blue())
if self.type is 'edge':
msg = 'sketching'
if self.type is 'warp':
msg = 'warping'
painter.setPen(QColor(0, 0, 0))
fontSz = 10
border = 3
painter.setFont(QFont('Decorative', fontSz))
painter.drawText(QPoint(border, fontSz + border), QString(msg))
num_frames = self.opt_engine.get_num_frames()
num_images = self.opt_engine.get_num_images()
if num_frames > 0 and num_images > 0:
d_frame_id = (self.get_frame_id())%num_frames + 1
d_show_id = (self.get_image_id())% num_images + 1
msg = 'frame %2d/%2d, image %2d/%2d'%(d_frame_id, num_frames, d_show_id, num_images)
painter.setPen(QColor(0, 0, 0))
fontSz = 10
border = 3
painter.setFont(QFont('Decorative', fontSz))
painter.drawText(QPoint(border, 2 * fontSz + border), QString(msg))
def wheelEvent(self, event):
d = event.delta() / 120
if self.type is 'edge':
self.brushWidth = self.uiSketch.update_width(d, self.color)
if self.type is 'color':
self.brushWidth = self.uiColor.update_width(d)
if self.type is 'warp':
self.brushWidth = self.uiWarp.update_width(d)
self.update()
def mousePressEvent(self, event):
self.pos = self.round_point(event.pos())
if event.button() == Qt.LeftButton:
self.isPressed = True
self.points.append(self.pos)
self.update_opt_engine()
self.update_ui()
self.update()
if event.button() == Qt.RightButton:
if self.type in ['edge', 'color']:# or self.type is 'edge':
self.change_color()
if self.type is 'warp':
im = self.opt_engine.get_image(self.get_image_id(), self.get_frame_id())
self.uiWarp.AddPoint(event.pos(), im)
self.brushWidth = self.uiWarp.update_width(0)
self.update()
def mouseMoveEvent(self, event):
self.pos = self.round_point(event.pos())
if self.isPressed:
if self.type in ['color','edge']:
self.points.append(self.pos)
self.update_ui()
self.update_opt_engine()
self.update()
def mouseReleaseEvent(self, event):
if event.button() == Qt.LeftButton and self.isPressed:
self.update()
if self.type is 'color' or self.type is 'edge':
self.uir.save_record(self.points, self.color, self.brushWidth, self.type)
self.opt_engine.save_constraints()
self.uiColor.reset()
self.uiSketch.reset()
self.uiWarp.reset()
del self.points[:]
self.isPressed = False
self.lastDraw = 0
def sizeHint(self):
return QSize(self.nps, self.nps) # 28 * 8
def update_frame(self, dif):
num_frames = self.opt_engine.get_num_frames()
if num_frames > 0:
self.frame_id = (self.frame_id+dif) % num_frames
print("show frame id = %d"%self.frame_id)
def fix_z(self):
self.opt_engine.init_z(self.get_frame_id(), self.get_image_id())
def morph_seq(self):
self.frame_id=0
num_frames = self.opt_engine.get_num_frames()
print('show %d frames' % num_frames)
for n in range(num_frames):
self.update()
QApplication.processEvents()
fps = 10
time.sleep(1/float(fps))
self.emit(SIGNAL('update_frame_id'),self.frame_id)
if n < num_frames-1: # stop at last frame
self.update_frame(1)
def use_color(self):
print('coloring')
self.type = 'color'
self.color = self.prev_color
self.emit(SIGNAL('update_color'), QString('background-color: %s' % self.color.name()))
self.brushWidth = self.uiColor.update_width(0)
self.update()
def use_edge(self):
print('sketching')
self.type = 'edge'
self.color = QColor(0, 0, 0) if self.shadow else QColor(128, 128, 128)
self.emit(SIGNAL('update_color'), QString('background-color: %s' % self.color.name()))
self.brushWidth = self.uiSketch.update_width(0, self.color)
self.update()
def use_warp(self):
self.type = 'warp'
self.color = QColor(128, 128, 128)
self.emit(SIGNAL('update_color'), QString('background-color: %s' % self.color.name()))
self.brushWidth = self.uiWarp.update_width(0)
print('warp brush: %d' % self.brushWidth)
self.update()
def show_edits(self):
self.show_ui = not self.show_ui
self.update()
| |
# Usage:
# - python this_script path_to_image
#
# Purpose: This script generates random test points using Mitchel's Best Candidate-II
# algorithm. These points are displayed on the input image.
#
# The user can also designate rectangles for those random points to be
# generated within.
#
# We use this utility to create random points within areas of a building we
# have mapped.
import sys
from sys import argv, stderr, exit
from PIL import Image, ImageTk
from random import random
import Tkinter
import requests
from requests.exceptions import HTTPError
import json
import os
import math
from server_interface import ServerInterface
APP = {} # contains global information needed by tkinter functions
NUM_POINTS = 2 # number of vertices in a rectangle
NUM_TEST_POINTS = 20 # Number of test points we want
NUM_CANDIDATES = 20 # Number of attempts per test point chosen
SERVER_URL = "http://mapbuilder.herokuapp.com/"
FID = -1 # Floor ID
SERVER = None
FLOOR_NAME = None
class Point(object):
""" Point Object """
def __init__(self, x, y):
self.x = x
self.y = y
def save(self, fid, c):
"""Given a floor id and counter will create a point in the database"""
n = c()
name = '{} TEST: {}'.format(FLOOR_NAME, n)
data = {
'x_coordinate': self.x,
'y_coordinate': self.y,
'short_name': n,
'verbose_name': name,
'direction': 0,
'floor': FID
}
try:
SERVER.post(ServerInterface.LOCATION, data)
print('Saved {} at ({},{})'.format(name, self.x, self.y))
except HTTPError, e:
print >> sys.stderr, e
print >> sys.stderr, 'Unable to save {}'.format(name)
class Rectangle(object):
""" Rectangle Object """
def __init__(self, corners):
if len(corners) != 2:
stderr.write("not enough corners.\n")
exit()
p1 = corners[0]
p2 = corners[1]
self.XL = min(p1.x, p2.x)
self.XR = max(p1.x, p2.x)
self.YT = min(p1.y, p2.y)
self.YB = max(p1.y, p2.y)
def corners(self):
return [Point(self.XL, self.YT),
Point(self.XR, self.YT),
Point(self.XR, self.YB),
Point(self.XL, self.YB)]
def contains(self, point):
""" Checks whether the given rectangle contains the given point """
if ((self.XL <= point.x <= self.XR) and
(self.YT <= point.y <= self.YB)):
return True
else:
return False
def getRandomPoints():
""" Gets and returns a list of random Points """
test_points = []
while len(test_points) < NUM_TEST_POINTS:
new_point = bestCandidate(test_points)
test_points.append(new_point)
return test_points
def bestCandidate(test_points):
""" Runs Mitchell's Best-Candidate II Algorithm to generate a dispersed random Point """
if len(test_points) == 0:
return getCandidatePoint()
bestDistance = 0
for i in range(NUM_CANDIDATES):
c = getCandidatePoint()
d = distance(findClosest(test_points, c), c)
if d > bestDistance:
best = c
bestDistance = d
return best
def getCandidatePoint():
""" Returns a random Point in the image within at least one of the rectangles """
global APP
if APP['rectangles'] == []:
return RandomPoint()
attempts = 0 # Caps total number of tries allowed
while(attempts < 1000000):
point = RandomPoint()
for rectangle in APP['rectangles']:
if rectangle.contains(point):
return point
attempts += 1
stderr.write("rectangle space too small to find point\n")
exit()
def findClosest(test_points, point):
""" Given a set of Points, and a point returns the closest Point to the point """
min_distance = math.sqrt(APP['dims']['w'] ** 2 + APP['dims']['h'] ** 2)
closest = None
for test_point in test_points:
cur_distance = distance(point, test_point)
if cur_distance < min_distance:
closest = test_point
min_distance = cur_distance
return closest
#--------------------------------
# Misc Utility Functions Below
#--------------------------------
def distance(a, b):
""" Returns the distance between the given two Points """
dx = a.x - b.x
dy = a.y - b.y
return math.sqrt(dx * dx + dy * dy)
def rand(minimum, maximum):
""" Returns a random number between minimum and maximum """
return random() * (maximum - minimum) + minimum
def counter():
"""Creates a counter instance"""
x = [0]
def c():
x[0] += 1
return x[0]
return c
def RandomPoint():
""" generates a Point object within the image space """
global APP
return Point(rand(0, APP['dims']['w']), rand(0, APP['dims']['h']))
def get_floor_info(imageName):
"""Requests a building name and floor number from the user
If a floor with the supplied name and number exists, get the resource ID for it
If it does not exists, create a new one and return its resource ID.
Args:
imageName: A string with the location of an image for a floor
Returns:
The Resource ID for the floor and the name to use when posting new points
"""
global SERVER
try:
building_name = raw_input("Building: ")
floor_number = raw_input("Floor Number: ")
building_name.replace(' ', '_')
combined_name = "{} {}".format(building_name, floor_number)
lookup_params = {
'building_name__iexact': building_name, # iexact to account for capitalization
'floor_number': floor_number
}
found, floor_info = SERVER.get_single_item(ServerInterface.FLOOR,
lookup_params)
if found:
return floor_info['resource_uri'], combined_name
else:
with open(imageName, 'rb') as image:
post_payload = {
'building_name': building_name,
'floor_number': floor_number,
}
files = {
'image': image
}
floor_info = SERVER.post_with_files(ServerInterface.FLOOR,
post_payload,
files)
return floor_info['resource_uri'], combined_name
except HTTPError, e:
print >> sys.stderr, e
exit("Error finding floor id")
#--------------------------------
# TKinter Application Code Below
#--------------------------------
def initializeApp(image_path):
""" Initializes data for app Binds tkinter buttons """
global APP
image = Image.open(image_path)
width, height = image.size[0], image.size[1]
APP['window'] = Tkinter.Tk()
APP['frame'] = Tkinter.Frame(APP['window'])
image_tk = ImageTk.PhotoImage(image)
APP['canvas'] = Tkinter.Canvas(APP['frame'], width=width, height=height)
APP['canvas'].create_image(width // 2, height // 2, image=image_tk)
APP['dims'] = {'w': width, 'h': width}
APP['buttons'] = getButtons()
APP['rectangles'] = []
APP['points'] = []
APP['canvas_list'] = []
APP['frame'].pack()
APP['canvas'].pack()
APP['buttons']['reset_btn'].pack(side='right')
APP['canvas'].bind("<Button-1>", handle_click)
APP['window'].mainloop()
def getButtons():
""" Returns dict of buttons; will be added to app object"""
buttons = {'log_btn': Tkinter.Button(APP['frame'], text="Log", command=log),
'done_btn': Tkinter.Button(APP['frame'], text="Done", command=done),
'reset_btn': Tkinter.Button(APP['frame'], text="Reset", command=reset)}
return buttons
def draw_point(p, color):
""" draws a point at the coordinates with the specified color """
global APP
radius = 5 # point radius
new_canvas = APP['canvas'].create_oval(
p.x - radius, p.y - radius, p.x + radius, p.y + radius, fill=color)
APP['points'].append(p)
APP['canvas_list'].append(new_canvas)
def draw_rectangle(rectangle, outline_color):
""" draws a rectangle at the coordinates with the specified color """
global APP
corners = rectangle.corners()
p1 = corners[0]
p2 = corners[2]
new_canvas = APP['canvas'].create_rectangle(
p1.x, p1.y, p2.x, p2.y, outline=outline_color, width=2)
APP['rectangles'].append(rectangle)
APP['canvas_list'].append(new_canvas)
def handle_click(click):
""" Adds a point to the canvas; if there are enough points, allows logging """
global APP
point = Point(click.x, click.y)
num_points = len(APP['points']) + 1
if num_points > NUM_POINTS:
reset()
APP['buttons']['log_btn'].pack_forget()
elif num_points == NUM_POINTS:
APP['buttons']['log_btn'].pack(side='left')
draw_point(point, 'blue')
def log():
""" generates the rest of the rectangle for drawing; asks for confirmation if input
is correct. """
global APP
APP['canvas'].unbind("<Button-1>")
rectangle = Rectangle(APP['points'])
reset()
# command = raw_input("Confirm Points? [Y/N]")
command = 'Y'
if command.upper() == 'Y':
draw_rectangle(rectangle, 'red')
APP['buttons']['done_btn'].pack(side='right')
APP['points'] = []
APP['canvas_list'] = []
else:
reset()
APP['buttons']['log_btn'].pack_forget()
APP['canvas'].bind("<Button-1>", handle_click)
def done():
""" activates the algorithm with the current set of rectangles. Generates random points
within those rectangles. """
APP['buttons']['done_btn'].pack_forget()
APP['buttons']['log_btn'].pack_forget()
APP['canvas'].unbind("<Button-1>")
test_points = getRandomPoints()
c = counter()
for point in test_points:
draw_point(point, 'green')
APP['buttons']['reset_btn'].pack_forget()
if not debug:
global FID
point.save(FID, c)
def reset():
""" deletes unlogged points from the canvas """
global APP
for canvas in APP['canvas_list']:
APP['canvas'].delete(canvas)
APP['points'] = []
APP['canvas_list'] = []
def main(argv, debug):
if len(argv) != 4:
print "Usage: python build_locations filename username password"
exit(1)
filename = argv[1]
username = argv[2]
password = argv[3]
if not debug:
global FID
global FLOOR_NAME
global SERVER
SERVER = ServerInterface(username, password)
FID, FLOOR_NAME = get_floor_info(filename)
image_path = argv[1]
initializeApp(image_path)
if __name__ == '__main__':
debug = False
main(argv, debug)
| |
# Copyright 2014-2016 Insight Software Consortium.
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
#
# The initial version of the directory_cache_t class was written
# by Matthias Baas (baas@ira.uka.de).
"""
directory cache implementation.
This module contains the implementation of a cache that uses individual
files, stored in a dedicated cache directory, to store the cached contents.
The :class:`parser.directory_cache_t` class instance could be passed as the
`cache` argument of the :func:`parser.parse` function.
"""
import os
import os.path
import gzip
import hashlib
try:
import cPickle as pickle
except ImportError:
import pickle
from . import declarations_cache
class index_entry_t(object):
"""
Entry of the index table in the directory cache index.
Each cached header file (i.e. each .cache file) has a corresponding
index_entry_t object. This object is used to determine whether the
cache file with the declarations is still valid or not.
This class is a helper class for the directory_cache_t class.
"""
def __init__(self, filesigs, configsig):
"""
:param filesigs: a list of tuples( `fileid`, `sig`)...
:param configsig: the signature of the configuration object.
"""
self.filesigs = filesigs
self.configsig = configsig
def __getstate__(self):
return self.filesigs, self.configsig
def __setstate__(self, state):
self.filesigs, self.configsig = state
class directory_cache_t (declarations_cache.cache_base_t):
"""cache class that stores its data as multiple files inside a directory.
The cache stores one index file called `index.dat` which is always
read by the cache when the cache object is created. Each header file
will have its corresponding .cache file that stores the declarations
found in the header file. The index file is used to determine whether
a .cache file is still valid or not (by checking if one of the dependent
files (i.e. the header file itself and all included files) have been
modified since the last run).
"""
def __init__(self, dir="cache", compression=False, sha1_sigs=True):
"""
:param dir: cache directory path, it is created, if it does not exist
:param compression: if `True`, the cache files will be compressed
using `gzip`
:param sha1_sigs: `sha1_sigs` determines whether file modifications is
checked by computing a `sha1` digest or by checking
the modification date
"""
declarations_cache.cache_base_t.__init__(self)
# Cache directory
self.__dir = os.path.abspath(dir)
# Flag that determines whether the cache files will be compressed
self.__compression = compression
# Flag that determines whether the signature is a sha1 digest or
# the modification time
# (this flag is passed to the filename_repository_t class)
self.__sha1_sigs = sha1_sigs
# Filename repository
self.__filename_rep = filename_repository_t(self.__sha1_sigs)
# Index dictionary (Key is the value returned by _create_cache_key()
# (which is based on the header file name) and value is an
# index_entry_t object)
self.__index = {}
# Flag that indicates whether the index was modified
self.__modified_flag = False
# Check if dir refers to an existing file...
if os.path.isfile(self.__dir):
raise ValueError((
"Cannot use %s as cache directory. There is already a file " +
"with that name.") % self.__dir)
# Load the cache or create the cache directory...
if os.path.isdir(self.__dir):
self._load()
else:
# Create the cache directory...
os.mkdir(self.__dir)
def flush(self):
"""Save the index table to disk."""
self._save()
def update(self, source_file, configuration, declarations, included_files):
"""Replace a cache entry by a new value.
:param source_file: a C++ source file name.
:type source_file: str
:param configuration: configuration object.
:type configuration: :class:`xml_generator_configuration_t`
:param declarations: declarations contained in the `source_file`
:type declarations: pickable object
:param included_files: included files
:type included_files: list of str
"""
# Normlize all paths...
source_file = os.path.normpath(source_file)
included_files = [os.path.normpath(p) for p in included_files]
# Create the list of dependent files. This is the included_files list
# + the source file. Duplicate names are removed.
dependent_files = {}
for name in [source_file] + included_files:
dependent_files[name] = 1
dependent_files = list(dependent_files.keys())
key = self._create_cache_key(source_file)
# Remove an existing entry (if there is one)
# After calling this method, it is guaranteed that __index[key]
# does not exist anymore.
self._remove_entry(source_file, key)
# Create a new entry...
# Create the sigs of all dependent files...
filesigs = []
for filename in dependent_files:
id_, sig = self.__filename_rep.acquire_filename(filename)
filesigs.append((id_, sig))
configsig = self._create_config_signature(configuration)
entry = index_entry_t(filesigs, configsig)
self.__index[key] = entry
self.__modified_flag = True
# Write the declarations into the cache file...
cachefilename = self._create_cache_filename(source_file)
self._write_file(cachefilename, declarations)
def cached_value(self, source_file, configuration):
"""Return the cached declarations or None.
:param source_file: Header file name
:type source_file: str
:param configuration: Configuration object
:type configuration: :class:`parser.xml_generator_configuration_t`
:rtype: Cached declarations or None
"""
# Check if the cache contains an entry for source_file
key = self._create_cache_key(source_file)
entry = self.__index.get(key)
if entry is None:
# print "CACHE: %s: Not cached"%source_file
return None
# Check if the entry is still valid. It is not valid if:
# - the source_file has been updated
# - the configuration object has changed (i.e. the header is parsed
# by gccxml with different settings which may influence the
# declarations)
# - the included files have been updated
# (this list is part of the cache entry as it cannot be known
# by the caller when cached_value() is called. It was instead
# passed to update())
# Check if the config is different...
configsig = self._create_config_signature(configuration)
if configsig != entry.configsig:
# print "CACHE: %s: Config mismatch"%source_file
return None
# Check if any of the dependent files has been modified...
for id_, sig in entry.filesigs:
if self.__filename_rep.is_file_modified(id_, sig):
# print "CACHE: %s: Entry not up to date"%source_file
return None
# Load and return the cached declarations
cachefilename = self._create_cache_filename(source_file)
decls = self._read_file(cachefilename)
# print "CACHE: Using cached decls for",source_file
return decls
def _load(self):
"""Load the cache.
Loads the `index.dat` file, which contains the index table and the
file name repository.
This method is called by the :meth:`__init__`
"""
indexfilename = os.path.join(self.__dir, "index.dat")
if os.path.exists(indexfilename):
data = self._read_file(indexfilename)
self.__index = data[0]
self.__filename_rep = data[1]
if self.__filename_rep._sha1_sigs != self.__sha1_sigs:
print((
"CACHE: Warning: sha1_sigs stored in the cache is set " +
"to %s.") % self.__filename_rep._sha1_sigs)
print("Please remove the cache to change this setting.")
self.__sha1_sigs = self.__filename_rep._sha1_sigs
else:
self.__index = {}
self.__filename_rep = filename_repository_t(self.__sha1_sigs)
self.__modified_flag = False
def _save(self):
"""
save the cache index, in case it was modified.
Saves the index table and the file name repository in the file
`index.dat`
"""
if self.__modified_flag:
self.__filename_rep.update_id_counter()
indexfilename = os.path.join(self.__dir, "index.dat")
self._write_file(
indexfilename,
(self.__index,
self.__filename_rep))
self.__modified_flag = False
def _read_file(self, filename):
"""
read a Python object from a cache file.
Reads a pickled object from disk and returns it.
:param filename: Name of the file that should be read.
:type filename: str
:rtype: object
"""
if self.__compression:
f = gzip.GzipFile(filename, "rb")
else:
f = open(filename, "rb")
res = pickle.load(f)
f.close()
return res
def _write_file(self, filename, data):
"""Write a data item into a file.
The data object is written to a file using the pickle mechanism.
:param filename: Output file name
:type filename: str
:param data: A Python object that will be pickled
"""
if self.__compression:
f = gzip.GzipFile(filename, "wb")
else:
f = open(filename, "wb")
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
f.close()
def _remove_entry(self, source_file, key):
"""Remove an entry from the cache.
source_file is the name of the header and key is its corresponding
cache key (obtained by a call to :meth:_create_cache_key ).
The entry is removed from the index table, any referenced file
name is released and the cache file is deleted.
If key references a non-existing entry, the method returns
immediately.
:param source_file: Header file name
:type source_file: str
:param key: Key value for the specified header file
:type key: hash table object
"""
entry = self.__index.get(key)
if entry is None:
return
# Release the referenced files...
for id_, sig in entry.filesigs:
self.__filename_rep.release_filename(id_)
# Remove the cache entry...
del self.__index[key]
self.__modified_flag = True
# Delete the corresponding cache file...
cachefilename = self._create_cache_filename(source_file)
try:
os.remove(cachefilename)
except OSError as e:
print("Could not remove cache file (%s)" % e)
@staticmethod
def _create_cache_key(source_file):
"""
return the cache key for a header file.
:param source_file: Header file name
:type source_file: str
:rtype: str
"""
path, name = os.path.split(source_file)
return name + str(hash(path))
def _create_cache_filename(self, source_file):
"""
return the cache file name for a header file.
:param source_file: Header file name
:type source_file: str
:rtype: str
"""
res = self._create_cache_key(source_file) + ".cache"
return os.path.join(self.__dir, res)
@staticmethod
def _create_config_signature(config):
"""
return the signature for a config object.
The signature is computed as sha1 digest of the contents of
working_directory, include_paths, define_symbols and
undefine_symbols.
:param config: Configuration object
:type config: :class:`parser.xml_generator_configuration_t`
:rtype: str
"""
m = hashlib.sha1()
m.update(config.working_directory)
for p in config.include_paths:
m.update(p)
for p in config.define_symbols:
m.update(p)
for p in config.undefine_symbols:
m.update(p)
for p in config.cflags:
m.update(p)
return m.digest()
class filename_entry_t(object):
"""This is a record stored in the filename_repository_t class.
The class is an internal class used in the implementation of the
filename_repository_t class and it just serves as a container for
the file name and the reference count.
"""
def __init__(self, filename):
"""Constructor.
The reference count is initially set to 0.
"""
# Filename
self.filename = filename
# Reference count
self.refcount = 0
# Cached signature value for the file.
# If sig_valid flag is False, the signature still has to be computed,
# otherwise the cached value can be used.
# These attributes must not be pickled!
self.sig_valid = False
self.signature = None
def __getstate__(self):
# Only pickle filename and refcount
return self.filename, self.refcount
def __setstate__(self, state):
self.filename, self.refcount = state
self.sig_valid = False
self.signature = None
def inc_ref_count(self):
"""Increase the reference count by 1."""
self.refcount += 1
def dec_ref_count(self):
"""Decrease the reference count by 1 and return the new count."""
self.refcount -= 1
return self.refcount
class filename_repository_t(object):
"""File name repository.
This class stores file names and can check whether a file has been
modified or not since a previous call.
A file name is stored by calling acquire_filename() which returns
an ID and a signature of the file. The signature can later be used
to check if the file was modified by calling is_file_modified().
If the file name is no longer required release_filename() should be
called so that the entry can be removed from the repository.
"""
def __init__(self, sha1_sigs):
"""Constructor.
"""
# Flag that determines whether the signature is a sha1 digest or
# the modification time
# (this flag is passed to the filename_repository_t class)
self._sha1_sigs = sha1_sigs
# ID lookup table (key: filename / value: id_)
self.__id_lut = {}
# Entry dictionary (key: id_ / value: filename_entry_t)
# This dictionary contains the actual data.
# It must always hold that each entry in __entries has a corresponding
# entry in __id_lut (i.e. the keys in __id_lut must be the names
# stored in __entries)
self.__entries = {}
# A counter for new ids
self.__next_id = 1
def acquire_filename(self, name):
"""Acquire a file name and return its id and its signature.
"""
id_ = self.__id_lut.get(name)
# Is this a new entry?
if id_ is None:
# then create one...
id_ = self.__next_id
self.__next_id += 1
self.__id_lut[name] = id_
entry = filename_entry_t(name)
self.__entries[id_] = entry
else:
# otherwise obtain the entry...
entry = self.__entries[id_]
entry.inc_ref_count()
return id_, self._get_signature(entry)
def release_filename(self, id_):
"""Release a file name.
"""
entry = self.__entries.get(id_)
if entry is None:
raise ValueError("Invalid filename id (%d)" % id_)
# Decrease reference count and check if the entry has to be removed...
if entry.dec_ref_count() == 0:
del self.__entries[id_]
del self.__id_lut[entry.filename]
def is_file_modified(self, id_, signature):
"""Check if the file referred to by `id_` has been modified.
"""
entry = self.__entries.get(id_)
if entry is None:
raise ValueError("Invalid filename id_ (%d)" % id_)
# Is the signature already known?
if entry.sig_valid:
# use the cached signature
filesig = entry.signature
else:
# compute the signature and store it
filesig = self._get_signature(entry)
entry.signature = filesig
entry.sig_valid = True
return filesig != signature
def update_id_counter(self):
"""Update the `id_` counter so that it doesn't grow forever.
"""
if not self.__entries:
self.__next_id = 1
else:
self.__next_id = max(self.__entries.keys()) + 1
def _get_signature(self, entry):
"""Return the signature of the file stored in entry.
"""
if self._sha1_sigs:
# return sha1 digest of the file content...
if not os.path.exists(entry.filename):
return None
try:
f = open(entry.filename)
except IOError as e:
print("Cannot determine sha1 digest:", e)
return None
data = f.read()
f.close()
return hashlib.sha1(data).digest()
else:
# return file modification date...
try:
return os.path.getmtime(entry.filename)
except OSError:
return None
def _dump(self):
"""Dump contents for debugging/testing.
"""
print(70 * "-")
print("ID lookup table:")
for name in self.__id_lut:
id_ = self.__id_lut[name]
print(" %s -> %d" % (name, id_))
print(70 * "-")
print("%-4s %-60s %s" % ("ID", "Filename", "Refcount"))
print(70 * "-")
for id_ in self.__entries:
entry = self.__entries[id_]
print("%04d %-60s %d" % (id_, entry.filename, entry.refcount))
| |
from __future__ import absolute_import, unicode_literals
import unittest
from mopidy.core import PlaybackState
from mopidy.models import Track
from tests.mpd import protocol
PAUSED = PlaybackState.PAUSED
PLAYING = PlaybackState.PLAYING
STOPPED = PlaybackState.STOPPED
class PlaybackOptionsHandlerTest(protocol.BaseTestCase):
def test_consume_off(self):
self.send_request('consume "0"')
self.assertFalse(self.core.tracklist.consume.get())
self.assertInResponse('OK')
def test_consume_off_without_quotes(self):
self.send_request('consume 0')
self.assertFalse(self.core.tracklist.consume.get())
self.assertInResponse('OK')
def test_consume_on(self):
self.send_request('consume "1"')
self.assertTrue(self.core.tracklist.consume.get())
self.assertInResponse('OK')
def test_consume_on_without_quotes(self):
self.send_request('consume 1')
self.assertTrue(self.core.tracklist.consume.get())
self.assertInResponse('OK')
def test_crossfade(self):
self.send_request('crossfade "10"')
self.assertInResponse('ACK [0@0] {crossfade} Not implemented')
def test_random_off(self):
self.send_request('random "0"')
self.assertFalse(self.core.tracklist.random.get())
self.assertInResponse('OK')
def test_random_off_without_quotes(self):
self.send_request('random 0')
self.assertFalse(self.core.tracklist.random.get())
self.assertInResponse('OK')
def test_random_on(self):
self.send_request('random "1"')
self.assertTrue(self.core.tracklist.random.get())
self.assertInResponse('OK')
def test_random_on_without_quotes(self):
self.send_request('random 1')
self.assertTrue(self.core.tracklist.random.get())
self.assertInResponse('OK')
def test_repeat_off(self):
self.send_request('repeat "0"')
self.assertFalse(self.core.tracklist.repeat.get())
self.assertInResponse('OK')
def test_repeat_off_without_quotes(self):
self.send_request('repeat 0')
self.assertFalse(self.core.tracklist.repeat.get())
self.assertInResponse('OK')
def test_repeat_on(self):
self.send_request('repeat "1"')
self.assertTrue(self.core.tracklist.repeat.get())
self.assertInResponse('OK')
def test_repeat_on_without_quotes(self):
self.send_request('repeat 1')
self.assertTrue(self.core.tracklist.repeat.get())
self.assertInResponse('OK')
def test_setvol_below_min(self):
self.send_request('setvol "-10"')
self.assertEqual(0, self.core.playback.volume.get())
self.assertInResponse('OK')
def test_setvol_min(self):
self.send_request('setvol "0"')
self.assertEqual(0, self.core.playback.volume.get())
self.assertInResponse('OK')
def test_setvol_middle(self):
self.send_request('setvol "50"')
self.assertEqual(50, self.core.playback.volume.get())
self.assertInResponse('OK')
def test_setvol_max(self):
self.send_request('setvol "100"')
self.assertEqual(100, self.core.playback.volume.get())
self.assertInResponse('OK')
def test_setvol_above_max(self):
self.send_request('setvol "110"')
self.assertEqual(100, self.core.playback.volume.get())
self.assertInResponse('OK')
def test_setvol_plus_is_ignored(self):
self.send_request('setvol "+10"')
self.assertEqual(10, self.core.playback.volume.get())
self.assertInResponse('OK')
def test_setvol_without_quotes(self):
self.send_request('setvol 50')
self.assertEqual(50, self.core.playback.volume.get())
self.assertInResponse('OK')
def test_single_off(self):
self.send_request('single "0"')
self.assertFalse(self.core.tracklist.single.get())
self.assertInResponse('OK')
def test_single_off_without_quotes(self):
self.send_request('single 0')
self.assertFalse(self.core.tracklist.single.get())
self.assertInResponse('OK')
def test_single_on(self):
self.send_request('single "1"')
self.assertTrue(self.core.tracklist.single.get())
self.assertInResponse('OK')
def test_single_on_without_quotes(self):
self.send_request('single 1')
self.assertTrue(self.core.tracklist.single.get())
self.assertInResponse('OK')
def test_replay_gain_mode_off(self):
self.send_request('replay_gain_mode "off"')
self.assertInResponse('ACK [0@0] {replay_gain_mode} Not implemented')
def test_replay_gain_mode_track(self):
self.send_request('replay_gain_mode "track"')
self.assertInResponse('ACK [0@0] {replay_gain_mode} Not implemented')
def test_replay_gain_mode_album(self):
self.send_request('replay_gain_mode "album"')
self.assertInResponse('ACK [0@0] {replay_gain_mode} Not implemented')
def test_replay_gain_status_default(self):
self.send_request('replay_gain_status')
self.assertInResponse('OK')
self.assertInResponse('off')
@unittest.SkipTest
def test_replay_gain_status_off(self):
pass
@unittest.SkipTest
def test_replay_gain_status_track(self):
pass
@unittest.SkipTest
def test_replay_gain_status_album(self):
pass
class PlaybackControlHandlerTest(protocol.BaseTestCase):
def test_next(self):
self.send_request('next')
self.assertInResponse('OK')
def test_pause_off(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('play "0"')
self.send_request('pause "1"')
self.send_request('pause "0"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
def test_pause_on(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('play "0"')
self.send_request('pause "1"')
self.assertEqual(PAUSED, self.core.playback.state.get())
self.assertInResponse('OK')
def test_pause_toggle(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('play "0"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
self.send_request('pause')
self.assertEqual(PAUSED, self.core.playback.state.get())
self.assertInResponse('OK')
self.send_request('pause')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
def test_play_without_pos(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('play')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
def test_play_with_pos(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('play "0"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
def test_play_with_pos_without_quotes(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('play 0')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
def test_play_with_pos_out_of_bounds(self):
self.core.tracklist.add([])
self.send_request('play "0"')
self.assertEqual(STOPPED, self.core.playback.state.get())
self.assertInResponse('ACK [2@0] {play} Bad song index')
def test_play_minus_one_plays_first_in_playlist_if_no_current_track(self):
self.assertEqual(self.core.playback.current_track.get(), None)
self.core.tracklist.add([Track(uri='dummy:a'), Track(uri='dummy:b')])
self.send_request('play "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertEqual(
'dummy:a', self.core.playback.current_track.get().uri)
self.assertInResponse('OK')
def test_play_minus_one_plays_current_track_if_current_track_is_set(self):
self.core.tracklist.add([Track(uri='dummy:a'), Track(uri='dummy:b')])
self.assertEqual(self.core.playback.current_track.get(), None)
self.core.playback.play()
self.core.playback.next()
self.core.playback.stop()
self.assertNotEqual(self.core.playback.current_track.get(), None)
self.send_request('play "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertEqual(
'dummy:b', self.core.playback.current_track.get().uri)
self.assertInResponse('OK')
def test_play_minus_one_on_empty_playlist_does_not_ack(self):
self.core.tracklist.clear()
self.send_request('play "-1"')
self.assertEqual(STOPPED, self.core.playback.state.get())
self.assertEqual(None, self.core.playback.current_track.get())
self.assertInResponse('OK')
def test_play_minus_is_ignored_if_playing(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.seek(30000)
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertEquals(PLAYING, self.core.playback.state.get())
self.send_request('play "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_play_minus_one_resumes_if_paused(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.seek(30000)
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertEquals(PLAYING, self.core.playback.state.get())
self.core.playback.pause()
self.assertEquals(PAUSED, self.core.playback.state.get())
self.send_request('play "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_playid(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('playid "0"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
def test_playid_without_quotes(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('playid 0')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertInResponse('OK')
def test_playid_minus_1_plays_first_in_playlist_if_no_current_track(self):
self.assertEqual(self.core.playback.current_track.get(), None)
self.core.tracklist.add([Track(uri='dummy:a'), Track(uri='dummy:b')])
self.send_request('playid "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertEqual(
'dummy:a', self.core.playback.current_track.get().uri)
self.assertInResponse('OK')
def test_playid_minus_1_plays_current_track_if_current_track_is_set(self):
self.core.tracklist.add([Track(uri='dummy:a'), Track(uri='dummy:b')])
self.assertEqual(self.core.playback.current_track.get(), None)
self.core.playback.play()
self.core.playback.next()
self.core.playback.stop()
self.assertNotEqual(None, self.core.playback.current_track.get())
self.send_request('playid "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertEqual(
'dummy:b', self.core.playback.current_track.get().uri)
self.assertInResponse('OK')
def test_playid_minus_one_on_empty_playlist_does_not_ack(self):
self.core.tracklist.clear()
self.send_request('playid "-1"')
self.assertEqual(STOPPED, self.core.playback.state.get())
self.assertEqual(None, self.core.playback.current_track.get())
self.assertInResponse('OK')
def test_playid_minus_is_ignored_if_playing(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.seek(30000)
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertEquals(PLAYING, self.core.playback.state.get())
self.send_request('playid "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_playid_minus_one_resumes_if_paused(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.seek(30000)
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertEquals(PLAYING, self.core.playback.state.get())
self.core.playback.pause()
self.assertEquals(PAUSED, self.core.playback.state.get())
self.send_request('playid "-1"')
self.assertEqual(PLAYING, self.core.playback.state.get())
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_playid_which_does_not_exist(self):
self.core.tracklist.add([Track(uri='dummy:a')])
self.send_request('playid "12345"')
self.assertInResponse('ACK [50@0] {playid} No such song')
def test_previous(self):
self.send_request('previous')
self.assertInResponse('OK')
def test_seek_in_current_track(self):
seek_track = Track(uri='dummy:a', length=40000)
self.core.tracklist.add([seek_track])
self.core.playback.play()
self.send_request('seek "0" "30"')
self.assertEqual(self.core.playback.current_track.get(), seek_track)
self.assertGreaterEqual(self.core.playback.time_position, 30000)
self.assertInResponse('OK')
def test_seek_in_another_track(self):
seek_track = Track(uri='dummy:b', length=40000)
self.core.tracklist.add(
[Track(uri='dummy:a', length=40000), seek_track])
self.core.playback.play()
self.assertNotEqual(self.core.playback.current_track.get(), seek_track)
self.send_request('seek "1" "30"')
self.assertEqual(self.core.playback.current_track.get(), seek_track)
self.assertInResponse('OK')
def test_seek_without_quotes(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.play()
self.send_request('seek 0 30')
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_seekid_in_current_track(self):
seek_track = Track(uri='dummy:a', length=40000)
self.core.tracklist.add([seek_track])
self.core.playback.play()
self.send_request('seekid "0" "30"')
self.assertEqual(self.core.playback.current_track.get(), seek_track)
self.assertGreaterEqual(
self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_seekid_in_another_track(self):
seek_track = Track(uri='dummy:b', length=40000)
self.core.tracklist.add(
[Track(uri='dummy:a', length=40000), seek_track])
self.core.playback.play()
self.send_request('seekid "1" "30"')
self.assertEqual(1, self.core.playback.current_tl_track.get().tlid)
self.assertEqual(seek_track, self.core.playback.current_track.get())
self.assertInResponse('OK')
def test_seekcur_absolute_value(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.play()
self.send_request('seekcur "30"')
self.assertGreaterEqual(self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_seekcur_positive_diff(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.play()
self.core.playback.seek(10000)
self.assertGreaterEqual(self.core.playback.time_position.get(), 10000)
self.send_request('seekcur "+20"')
self.assertGreaterEqual(self.core.playback.time_position.get(), 30000)
self.assertInResponse('OK')
def test_seekcur_negative_diff(self):
self.core.tracklist.add([Track(uri='dummy:a', length=40000)])
self.core.playback.play()
self.core.playback.seek(30000)
self.assertGreaterEqual(self.core.playback.time_position.get(), 30000)
self.send_request('seekcur "-20"')
self.assertLessEqual(self.core.playback.time_position.get(), 15000)
self.assertInResponse('OK')
def test_stop(self):
self.send_request('stop')
self.assertEqual(STOPPED, self.core.playback.state.get())
self.assertInResponse('OK')
| |
##########################################################################
#
# Copyright (c) 2008-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import os.path
import IECore
import IECoreGL
IECoreGL.init( False )
class TestSelection( unittest.TestCase ) :
def testSelect( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "one" ) )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 0, 0 ) ) } )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( -1, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "two" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 2, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "three" ) )
r.geometry( "sphere", {}, {} )
s = r.scene()
s.setCamera( IECoreGL.PerspectiveCamera() )
ss = s.select( IECoreGL.Selector.Mode.GLSelect, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
names = [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ]
self.assertEqual( len( names ), 3 )
self.assert_( "one" in names )
self.assert_( "two" in names )
self.assert_( "three" in names )
def testRegionSelect( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 0, 0 ) ) } )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( -2, -2, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "red" ) )
r.geometry( "sphere", {}, {} )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 0, 1, 0 ) ) } )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 4, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "green" ) )
r.geometry( "sphere", {}, {} )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 0, 0, 1 ) ) } )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 4, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "blue" ) )
r.geometry( "sphere", {}, {} )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 1, 1 ) ) } )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, -4, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "white" ) )
r.geometry( "sphere", {}, {} )
s = r.scene()
s.setCamera( IECoreGL.PerspectiveCamera() )
ss = s.select( IECoreGL.Selector.Mode.GLSelect, IECore.Box2f( IECore.V2f( 0, 0.5 ), IECore.V2f( 0.5, 1 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "red" )
ss = s.select( IECoreGL.Selector.Mode.GLSelect, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 0.5 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "green" )
ss = s.select( IECoreGL.Selector.Mode.GLSelect, IECore.Box2f( IECore.V2f( 0.5, 0 ), IECore.V2f( 1, 0.5 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "blue" )
ss = s.select( IECoreGL.Selector.Mode.GLSelect, IECore.Box2f( IECore.V2f( 0.5 ), IECore.V2f( 1 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "white" )
def testIDSelect( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( -1, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "frontLeft" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -1 ) ) )
r.setAttribute( "name", IECore.StringData( "backLeft" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 2, 0, 1 ) ) )
r.setAttribute( "name", IECore.StringData( "frontRight" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -1 ) ) )
r.setAttribute( "name", IECore.StringData( "backRight" ) )
r.geometry( "sphere", {}, {} )
s = r.scene()
s.setCamera( IECoreGL.OrthographicCamera() )
ss = s.select( IECoreGL.Selector.Mode.IDRender, IECore.Box2f( IECore.V2f( 0.25, 0.5 ), IECore.V2f( 0.26, 0.51 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "frontLeft" )
ss = s.select( IECoreGL.Selector.Mode.IDRender, IECore.Box2f( IECore.V2f( 0.75, 0.5 ), IECore.V2f( 0.76, 0.51 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "frontRight" )
def testIDSelectDepths( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "ball" ) )
r.geometry( "sphere", {}, {} )
scene = r.scene()
scene.setCamera( IECoreGL.OrthographicCamera() )
s1 = scene.select( IECoreGL.Selector.Mode.GLSelect, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
self.assertEqual( len( s1 ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( s1[0].name ), "ball" )
s2 = scene.select( IECoreGL.Selector.Mode.IDRender, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
self.assertEqual( len( s2 ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( s2[0].name ), "ball" )
self.assertAlmostEqual( s1[0].depthMin, s2[0].depthMin, 5 )
def testOcclusionQuerySelect( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( -1, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "frontLeft" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -1 ) ) )
r.setAttribute( "name", IECore.StringData( "backLeft" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 2, 0, 1 ) ) )
r.setAttribute( "name", IECore.StringData( "frontRight" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -1 ) ) )
r.setAttribute( "name", IECore.StringData( "backRight" ) )
r.geometry( "sphere", {}, {} )
s = r.scene()
s.setCamera( IECoreGL.OrthographicCamera() )
ss = s.select( IECoreGL.Selector.Mode.OcclusionQuery, IECore.Box2f( IECore.V2f( 0, 0 ), IECore.V2f( 0.25, 1 ) ) )
self.assertEqual( len( ss ), 2 )
self.assertEqual( set( [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ] ), set( ( "frontLeft", "backLeft" ) ) )
ss = s.select( IECoreGL.Selector.Mode.OcclusionQuery, IECore.Box2f( IECore.V2f( 0.75, 0 ), IECore.V2f( 1, 1 ) ) )
self.assertEqual( len( ss ), 2 )
self.assertEqual( set( [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ] ), set( ( "frontRight", "backRight" ) ) )
def testIDSelectWithAdditionalDisplayStyles( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
with IECore.WorldBlock( r ) :
r.setAttribute( "gl:primitive:wireframe", IECore.BoolData( True ) )
r.setAttribute( "gl:primitive:bound", IECore.BoolData( True ) )
r.setAttribute( "gl:primitive:outline", IECore.BoolData( True ) )
r.setAttribute( "gl:primitive:points", IECore.BoolData( True ) )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( -1, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "frontLeft" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -1 ) ) )
r.setAttribute( "name", IECore.StringData( "backLeft" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 2, 0, 1 ) ) )
r.setAttribute( "name", IECore.StringData( "frontRight" ) )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -1 ) ) )
r.setAttribute( "name", IECore.StringData( "backRight" ) )
r.geometry( "sphere", {}, {} )
s = r.scene()
s.setCamera( IECoreGL.OrthographicCamera() )
ss = s.select( IECoreGL.Selector.Mode.IDRender, IECore.Box2f( IECore.V2f( 0.25, 0.5 ), IECore.V2f( 0.26, 0.51 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "frontLeft" )
ss = s.select( IECoreGL.Selector.Mode.IDRender, IECore.Box2f( IECore.V2f( 0.75, 0.5 ), IECore.V2f( 0.76, 0.51 ) ) )
self.assertEqual( len( ss ), 1 )
self.assertEqual( IECoreGL.NameStateComponent.nameFromGLName( ss[0].name ), "frontRight" )
def testPointsPrimitiveSelect( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "pointsNeedSelectingToo" ) )
r.points( 1, { "P" : IECore.PrimitiveVariable( IECore.PrimitiveVariable.Interpolation.Vertex, IECore.V3fVectorData( [ IECore.V3f( 0 ) ] ) ) } )
s = r.scene()
s.setCamera( IECoreGL.PerspectiveCamera() )
for mode in ( IECoreGL.Selector.Mode.GLSelect, IECoreGL.Selector.Mode.OcclusionQuery, IECoreGL.Selector.Mode.IDRender ) :
ss = s.select( mode, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
names = [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ]
self.assertEqual( len( names ), 1 )
self.assertEqual( names[0], "pointsNeedSelectingToo" )
def testCurvesPrimitiveSelect( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "curvesNeedSelectingToo" ) )
r.curves(
IECore.CubicBasisf.linear(),
False,
IECore.IntVectorData( [ 2 ] ),
{
"P" : IECore.PrimitiveVariable(
IECore.PrimitiveVariable.Interpolation.Vertex,
IECore.V3fVectorData( [ IECore.V3f( -1, -1, 0, ), IECore.V3f( 1, 1, 0 ) ] )
)
}
)
s = r.scene()
s.setCamera( IECoreGL.PerspectiveCamera() )
for mode in ( IECoreGL.Selector.Mode.GLSelect, IECoreGL.Selector.Mode.OcclusionQuery, IECoreGL.Selector.Mode.IDRender ) :
ss = s.select( mode, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
names = [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ]
self.assertEqual( len( names ), 1 )
self.assertEqual( names[0], "curvesNeedSelectingToo" )
def testCurvesPrimitiveSelectUsingLines( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "curvesNeedSelectingToo" ) )
r.setAttribute( "gl:curvesPrimitive:useGLLines", IECore.BoolData( True ) )
r.curves(
IECore.CubicBasisf.linear(),
False,
IECore.IntVectorData( [ 2 ] ),
{
"P" : IECore.PrimitiveVariable(
IECore.PrimitiveVariable.Interpolation.Vertex,
IECore.V3fVectorData( [ IECore.V3f( -1, -1, 0, ), IECore.V3f( 1, 1, 0 ) ] )
)
}
)
s = r.scene()
s.setCamera( IECoreGL.PerspectiveCamera() )
for mode in ( IECoreGL.Selector.Mode.GLSelect, IECoreGL.Selector.Mode.OcclusionQuery, IECoreGL.Selector.Mode.IDRender ) :
ss = s.select( mode, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
names = [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ]
self.assertEqual( len( names ), 1 )
self.assertEqual( names[0], "curvesNeedSelectingToo" )
def testCurvesPrimitiveSelectUsingWireframeLines( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "curvesNeedSelectingToo" ) )
r.setAttribute( "gl:curvesPrimitive:useGLLines", IECore.BoolData( True ) )
r.setAttribute( "gl:primitive:wireframe", IECore.BoolData( True ) )
r.setAttribute( "gl:primitive:solid", IECore.BoolData( False ) )
r.curves(
IECore.CubicBasisf.linear(),
False,
IECore.IntVectorData( [ 2 ] ),
{
"P" : IECore.PrimitiveVariable(
IECore.PrimitiveVariable.Interpolation.Vertex,
IECore.V3fVectorData( [ IECore.V3f( -1, -1, 0, ), IECore.V3f( 1, 1, 0 ) ] )
)
}
)
s = r.scene()
s.setCamera( IECoreGL.PerspectiveCamera() )
for mode in ( IECoreGL.Selector.Mode.GLSelect, IECoreGL.Selector.Mode.OcclusionQuery, IECoreGL.Selector.Mode.IDRender ) :
ss = s.select( mode, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
names = [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ]
self.assertEqual( len( names ), 1 )
self.assertEqual( names[0], "curvesNeedSelectingToo" )
def testContextManager( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "one" ) )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 0, 0 ) ) } )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( -1, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "two" ) )
r.geometry( "sphere", {}, {} )
scene = r.scene()
scene.setCamera( None )
IECoreGL.PerspectiveCamera().render( IECoreGL.State.defaultState() )
hits = []
with IECoreGL.Selector( IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ), IECoreGL.Selector.Mode.IDRender, hits ) as selector :
IECoreGL.State.bindBaseState()
selector.baseState().bind()
scene.root().render( selector.baseState() )
names = [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in hits ]
self.assertEqual( len( names ), 2 )
self.assert_( "one" in names )
self.assert_( "two" in names )
def testSelectableFlag( self ) :
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "deferred" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
with IECore.WorldBlock( r ) :
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, -5 ) ) )
r.setAttribute( "name", IECore.StringData( "selectableObj" ) )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 0, 0 ) ) } )
r.geometry( "sphere", {}, {} )
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( -1, 0, 0 ) ) )
r.setAttribute( "name", IECore.StringData( "unselectableObj" ) )
r.setAttribute( "gl:primitive:selectable", IECore.BoolData( False ) )
r.geometry( "sphere", {}, {} )
s = r.scene()
s.setCamera( IECoreGL.PerspectiveCamera() )
for mode in ( IECoreGL.Selector.Mode.GLSelect, IECoreGL.Selector.Mode.OcclusionQuery, IECoreGL.Selector.Mode.IDRender ) :
ss = s.select( mode, IECore.Box2f( IECore.V2f( 0 ), IECore.V2f( 1 ) ) )
names = [ IECoreGL.NameStateComponent.nameFromGLName( x.name ) for x in ss ]
self.assertEqual( names, [ "selectableObj" ] )
if __name__ == "__main__":
unittest.main()
| |
#!/usr/bin/python3.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Implment an abstraction of a protocol state machine.
"""
import sys
import re
import fnmatch
from pycopia.stringmatch import compile_exact
class ProtocolExit(Exception):
"""Raise when the protocol completes."""
pass
class ProtocolError(Exception):
"""Raise when the protocol transition is invalid."""
pass
RESET = 0
ANY = object()
def transition_error(text):
"""default transition."""
raise ProtocolError('Symbol {!r} is undefined.'.format(text))
class StateMachine:
ANY = ANY # place here for easy access from other modules
RESET = RESET
def __init__(self, initial_state=RESET):
self._exact_transitions = {}
self._any_transitions = {}
self._re_transitions = {}
self.default_transition = (transition_error, initial_state)
self.initial_state = initial_state
self.reset()
def __str__(self):
return "StateMachine: state=%r" % (self.current_state,)
def reset(self):
self.current_state = self.initial_state
self.stack = [] # primary stack
self.altstack = [] # alternate stack
# stacks for user
def push(self, v):
self.stack.append(v)
def pop(self):
return self.stack.pop()
def pushalt(self, v):
self.altstack.append(v)
def popalt(self):
return self.altstack.pop()
def step(self, symbol):
state = self.current_state
try:
cre, action, next = self._exact_transitions[(symbol, state)]
mo = cre.search(symbol)
if mo:
self.current_state = next
if action:
action(mo)
return
except KeyError:
pass
try:
rel = self._re_transitions[state]
for cre, action, next in rel:
mo = cre.search(symbol)
if mo:
self.current_state = next
if action:
action(mo)
return
except KeyError:
pass
try:
action, next = self._any_transitions[state]
self.current_state = next
if action:
action(symbol)
except KeyError:
action, next = self.default_transition
self.current_state = next
if action:
action(symbol)
# transition constructors
def set_default_transition(self, action, next_state):
self.default_transition = (action, next_state)
def add_exact(self, symbol, state, action, next_state):
cre = compile_exact(symbol)
self._exact_transitions[(symbol, state)] = (cre, action, next_state)
# general add method that knows what you want. ;-)
def add(self, symbol, state, action, next_state):
if symbol is ANY:
self._any_transitions[state] = (action, next_state)
elif is_exact(symbol):
self.add_exact(symbol, state, action, next_state)
else:
self.add_regex(symbol, state, action, next_state)
def add_any(self, state, action, next_state):
self._any_transitions[state] = (action, next_state)
def add_glob(self, expression, state, action, next_state):
self.add_regex(fnmatch.translate(expression),
state, action, next_state)
def add_regex(self, expression, state, action, next_state,
ignore_case=False, multiline=False):
cre = re.compile(expression, _get_re_flags(ignore_case, multiline))
try:
rel = self._re_transitions[state]
rel.append((cre, action, next_state))
except KeyError:
self._re_transitions[state] = [(cre, action, next_state)]
def add_list(self, expression_list, state, action, next_state):
for input_symbol in expression_list:
self.add_exact(input_symbol, state, action, next_state)
def is_exact(pattern):
for c in pattern:
if c in rb".^$*?+\{}(),[]|":
return False
return True
class Protocol:
"""Implement the actions for the state machine. Add bound methods to it."""
EOL = b"\n"
def __init__(self, eol=None):
self.states = StateMachine()
self.iostream = None
self.data = None # extra data action handlers might use.
self.eol = eol or self.EOL
self.initialize(self.states)
def __str__(self):
if self.iostream is None:
return "Protocol: fsm: {}. current: {}, Not running.".format(
self.states, self.states.current_state)
else:
return "Protocol: fsm: {},\n current: {},\n iostream: {}".format(
self.states, self.states.current_state, self.iostream)
def log(self, *args):
print(*args, file=sys.stderr)
def run(self, iostream, data=None):
self.iostream = iostream
self.data = data
states = self.states
states.reset()
self.start()
try:
while 1:
nextline = iostream.readline()
if nextline:
states.step(nextline)
else:
break
finally:
self.iostream = None
self.data = None
def step(self, iostream, data=None):
self.iostream = iostream
self.data = data
try:
nextline = iostream.readline()
if nextline:
self.states.step(nextline)
else:
raise ProtocolExit("No more data")
finally:
self.iostream = None
self.data = None
def close(self):
self.states = None
self.iostream = None
def initialize(self, states):
"""Fill this in with state transitions."""
raise NotImplementedError()
def start(self):
return NotImplemented
def reset(self):
self.states.reset()
def writeln(self, data):
self.iostream.write(data + self.eol)
def _get_re_flags(ignore_case, multiline):
flags = 0
if ignore_case:
flags |= re.I
if multiline:
flags |= re.M
return flags
if __name__ == "__main__":
from pycopia import autodebug
from pycopia import IO
class TestProtocol(Protocol):
def initialize(self, fsm):
fsm.set_default_transition(self._error, fsm.RESET)
fsm.add(b"GREETINGS\n", fsm.RESET, self._bye, 2)
fsm.add(fsm.ANY, 2, self._bye, fsm.RESET)
def start(self):
self.writeln(b"HELLO type GREETINGS")
def _bye(self, match):
self.writeln(b"BYE")
raise ProtocolExit
def _error(self, symbol):
self.writeln(b"ERROR")
proto = TestProtocol()
try:
proto.run(IO.ConsoleIO(binary=True))
except ProtocolExit:
print("exited")
| |
# Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
from coal_mine.business_logic import (
AlreadyExistsError,
AlreadyPausedError,
AlreadyUnpausedError,
CanaryNotFoundError,
BusinessLogic,
)
from coal_mine.memory_store import MemoryStore
from coal_mine.mongo_store import MongoStore
from datetime import datetime, timedelta
import signal
import smtplib
import time
from unittest import TestCase
from unittest.mock import patch
import uuid
class MemoryStoreTester(object):
def get_store(self):
return MemoryStore()
def free_store(self):
pass
class MongoStoreTester(object):
def get_store(self):
self.db_hosts = ['localhost']
self.db_name = "coal-mine-test-" + str(uuid.uuid4())
return MongoStore(self.db_hosts, self.db_name, None, None)
def free_store(self):
self.store.db.client.drop_database(self.db_name)
class BusinessLogicTests(object):
def setUp(self):
self.store = self.get_store()
self.logic = BusinessLogic(self.store, 'example@example.com')
def tearDown(self):
signal.alarm(0)
signal.signal(signal.SIGALRM, signal.SIG_DFL)
self.free_store()
def test_noop(self):
# Just tests that setUp() and tearDown() don't crash.
pass
def test_create(self):
created = self.logic.create(name='test_create', periodicity=12345)
fetched = self.logic.get(created['id'])
self.assertEqual(created, fetched)
def test_create_invalid(self):
with self.assertRaises(TypeError):
self.logic.create(name=2, periodicity=12346)
with self.assertRaises(TypeError):
self.logic.create(name='', periodicity=12346)
self.logic.create(name='test_create_invalid', periodicity=12346)
with self.assertRaises(AlreadyExistsError):
self.logic.create(name='test_create_invalid', periodicity=12346)
with self.assertRaises(TypeError):
self.logic.create(name='test_create_invalid2', periodicity='abc')
with self.assertRaises(TypeError):
self.logic.create(name='test_create_invalid2', periodicity=-1)
with self.assertRaises(TypeError):
self.logic.create(name='test_create_invalid2', periodicity=12346,
description=2)
with self.assertRaises(TypeError):
self.logic.create(name='test_create_invalid2', periodicity=12346,
emails='test_create_invalid@example.com')
with self.assertRaises(TypeError):
self.logic.create(name='test_create_invalid2', periodicity=12346,
paused='abc')
def test_create_emails_none(self):
self.logic.create(name='test_create_emails_none',
periodicity=12347,
emails=None)
def test_update(self):
created = self.logic.create(name='test_update',
periodicity=12347)
self.logic.update(created['id'],
name='test_update2',
periodicity=12348,
description='test_update2 description',
emails=['test_update@example.com'])
fetched = self.logic.get(created['id'])
self.assertEqual(fetched['name'], 'test_update2')
self.assertEqual(fetched['periodicity'], 12348)
self.assertEqual(fetched['description'], 'test_update2 description')
self.assertEqual(fetched['emails'], ['test_update@example.com'])
def test_update_same_slug(self):
created = self.logic.create(name='test_update_same_slug',
periodicity=5)
self.logic.update(created['id'], name='Test_Update_Same_Slug')
fetched = self.logic.get(created['id'])
self.assertEqual(fetched['name'], 'Test_Update_Same_Slug')
self.assertEqual(created['slug'], fetched['slug'])
def test_update_paused(self):
created = self.logic.create('test_update_paused', periodicity=5)
self.logic.pause(created['id'])
self.logic.update(created['id'], periodicity=10)
fetched = self.logic.get(created['id'])
self.assertNotIn('deadline', fetched)
def test_update_invalid(self):
created = self.logic.create(name='test_update_invalid',
periodicity=12349)
with self.assertRaises(TypeError):
self.logic.update(created['id'], name=2)
with self.assertRaises(TypeError):
self.logic.update(created['id'], name='')
self.logic.create(name='test_update_invalid2', periodicity=12350)
with self.assertRaises(AlreadyExistsError):
self.logic.update(created['id'], name='test_update_invalid2')
with self.assertRaises(TypeError):
self.logic.update(created['id'], periodicity='abc')
with self.assertRaises(TypeError):
self.logic.update(created['id'], periodicity=-1)
with self.assertRaises(TypeError):
self.logic.update(created['id'], description=2)
with self.assertRaises(TypeError):
self.logic.update(created['id'],
emails='test_update_invalid@example.com')
with self.assertRaises(ValueError):
self.logic.update(created['id'])
def test_update_late_change(self):
created = self.logic.create(name='test_update_late_change',
periodicity=12351)
time.sleep(1)
self.logic.update(created['id'], periodicity=1)
fetched = self.logic.get(created['id'])
# Note that this test is mostly for code coverage, but we should at
# least check that the change we expected is there.
self.assertNotEqual(created['periodicity'], fetched['periodicity'])
def test_update_not_found(self):
with self.assertRaises(CanaryNotFoundError):
self.logic.update('testunfo', name='test_update_not_found')
def test_store_unset(self):
created = self.logic.create('foo', 20)
self.logic.pause(created['id'])
def test_trigger(self):
created = self.logic.create(name='test_trigger', periodicity=12352)
self.logic.trigger(created['id'])
self.logic.trigger(created['id'], comment='test_trigger comment')
def test_trigger_late(self):
created = self.logic.create(name='test_trigger_late', periodicity=1)
time.sleep(1.1)
self.logic.trigger(created['id'])
def test_trigger_paused(self):
created = self.logic.create(name='test_trigger_paused',
periodicity=12353,
paused=True)
self.logic.trigger(created['id'])
def test_trigger_not_found(self):
with self.assertRaises(CanaryNotFoundError):
self.logic.trigger('testtnfo')
def test_pause(self):
created = self.logic.create(name='test_pause', periodicity=1)
time.sleep(1.1)
self.logic.pause(created['id'])
with self.assertRaises(AlreadyPausedError):
self.logic.pause(created['id'])
self.logic.unpause(created['id'])
with self.assertRaises(AlreadyUnpausedError):
self.logic.unpause(created['id'])
self.logic.pause(created['id'], comment='test_pause pause comment')
self.logic.unpause(created['id'],
comment='test_pause unpause comment')
def test_pause_not_found(self):
with self.assertRaises(CanaryNotFoundError):
self.logic.pause('testpnfo')
def test_unpause_not_found(self):
with self.assertRaises(CanaryNotFoundError):
self.logic.unpause('testunfo')
def test_delete(self):
created = self.logic.create(name='test_delete', periodicity=12354)
self.logic.get(created['id'])
self.logic.delete(created['id'])
with self.assertRaises(CanaryNotFoundError):
self.logic.get(created['id'])
def test_delete_not_found(self):
with self.assertRaises(CanaryNotFoundError):
self.logic.delete('testdnfo')
def test_list(self):
self.logic.list()
def test_list_no_paused_canaries(self):
self.logic.create('not-paused', 20)
self.assertEqual(next(self.logic.list())['name'], 'not-paused')
self.assertEqual(next(self.logic.list(paused=False))['name'],
'not-paused')
with self.assertRaises(StopIteration):
next(self.logic.list(paused=True))
def test_list_only_paused_canary(self):
self.logic.create('paused', 20, paused=True)
self.assertEqual(next(self.logic.list())['name'], 'paused')
self.assertEqual(next(self.logic.list(paused=True))['name'],
'paused')
with self.assertRaises(StopIteration):
next(self.logic.list(paused=False))
def test_list_paused_and_unpaused_canary(self):
self.logic.create('not-paused', 10)
self.logic.create('paused', 20, paused=True)
iterator = self.logic.list()
self.assertEqual(set((next(iterator)['name'], next(iterator)['name'])),
set(('not-paused', 'paused')))
iterator = self.logic.list(paused=True)
self.assertEqual(next(iterator)['name'], 'paused')
with self.assertRaises(StopIteration):
next(iterator)
iterator = self.logic.list(paused=False)
self.assertEqual(next(iterator)['name'], 'not-paused')
with self.assertRaises(StopIteration):
next(iterator)
def test_list_no_late_canaries(self):
self.logic.create('not-late', 20)
self.assertEqual(next(self.logic.list())['name'], 'not-late')
self.assertEqual(next(self.logic.list(late=False))['name'],
'not-late')
with self.assertRaises(StopIteration):
next(self.logic.list(late=True))
def test_list_only_late_canary(self):
self.logic.create('late', 1)
time.sleep(1.1)
self.assertEqual(next(self.logic.list())['name'], 'late')
self.assertEqual(next(self.logic.list(late=True))['name'],
'late')
with self.assertRaises(StopIteration):
next(self.logic.list(late=False))
def test_list_late_and_not_late_canary(self):
self.logic.create('late', 1)
self.logic.create('not-late', 20)
time.sleep(1.1)
iterator = self.logic.list()
self.assertEqual(set((next(iterator)['name'], next(iterator)['name'])),
set(('not-late', 'late')))
iterator = self.logic.list(late=True)
self.assertEqual(next(iterator)['name'], 'late')
with self.assertRaises(StopIteration):
next(iterator)
iterator = self.logic.list(late=False)
self.assertEqual(next(iterator)['name'], 'not-late')
with self.assertRaises(StopIteration):
next(iterator)
def test_list_search(self):
self.logic.create('foo', 20)
next(self.logic.list(search='foo'))
with self.assertRaises(StopIteration):
next(self.logic.list(search='froodlefreedle'))
next(self.logic.list(verbose=True))
def test_notify(self):
with patch('smtplib.SMTP'):
created = self.logic.create(name='test_notify',
periodicity=1,
emails=['test_notify@example.com'])
time.sleep(1.1)
self.logic.trigger(created['id'])
with patch.object(smtplib.SMTP, 'connect', side_effect=Exception):
time.sleep(1.1)
self.logic.trigger(created['id'])
# Not needed for test, but let's clean up after ourselves to avoid
# unwanted notifications while other tests are running!
self.logic.delete(created['id'])
def test_find_identifier(self):
created = self.logic.create(name='test_find_identifier',
periodicity=12355)
self.assertEqual(created['id'],
self.logic.find_identifier(identifier=created['id']))
self.assertEqual(created['id'],
self.logic.find_identifier(name=created['name']))
def test_find_identifier_invalid(self):
with self.assertRaisesRegexp(Exception, 'Must specify'):
self.logic.find_identifier()
with self.assertRaisesRegexp(Exception, 'Specify only one'):
self.logic.find_identifier(name='foo', slug='bar')
def test_find_identifier_slug_not_found(self):
with self.assertRaisesRegexp(
CanaryNotFoundError,
r"'slug': 'test-find-identifier-slug-not-found'"):
self.logic.find_identifier(
slug='test-find-identifier-slug-not-found')
def test_add_history(self):
history = []
self.logic.add_history(history, None)
for i in range(1000):
self.logic.add_history(history, str(i))
def test_add_history_invalid(self):
history = []
with self.assertRaises(TypeError):
self.logic.add_history(history, 2)
def test_schedule_next_deadline(self):
# Make sure StopIteration is handled properly when there are no
# active canaries.
self.logic.schedule_next_deadline()
def test_periodicity_numeric(self):
created = self.logic.create(name='test_periodicity_numeric',
periodicity=1200)
delta = (created['deadline'] - datetime.utcnow()).total_seconds()
self.assertAlmostEqual(delta / 10, 120, places=0)
def test_periodicity_schedule_inactive(self):
now = datetime.utcnow()
midnight_tomorrow = (now + timedelta(days=1)).replace(
hour=0, minute=0, second=0, microsecond=0)
tomorrow_schedule = '* * * * {} 1200'.format(
midnight_tomorrow.isoweekday())
created = self.logic.create(name='test_periodicity_schedule_inactive',
periodicity=tomorrow_schedule)
delta = (created['deadline'] - midnight_tomorrow).total_seconds()
self.assertAlmostEqual(delta / 10, 120, places=0)
def test_periodicity_schedule_active(self):
now = datetime.utcnow()
created = self.logic.create(name='test_periodicity_schedule_active',
periodicity='* * * * * 1200')
delta = (created['deadline'] - now).total_seconds()
self.assertAlmostEqual(delta / 10, 120, places=0)
def test_periodicity_invalid(self):
with self.assertRaises(TypeError):
self.logic.create(name='test_periodicity_invalid',
periodicity='* * * * 1200')
def test_periodicity_invalid_newline(self):
with self.assertRaises(TypeError):
self.logic.create(name='test_periodicity_invalid_newline',
periodicity='* * * * sat 1200\n* * * * sun 400')
def test_periodicity_invalid_command(self):
with self.assertRaises(TypeError):
self.logic.create(name='test_periodicity_invalid_command',
periodicity='* * * * * froodle')
def test_periodicity_invalid_negative(self):
with self.assertRaises(TypeError):
self.logic.create(name='test_periodicity_invalid_negative',
periodicity='* * * * * -1')
def test_periodicity_invalid_overlapping(self):
with self.assertRaises(TypeError):
self.logic.create(name='test_periodicity_invalid_overlapping',
periodicity='* * * * * 30; * * * * * 60')
def test_periodicity_delta_case_2(self):
periodicity = '* 0 * * * 120'
whence = datetime(2016, 6, 30, 1, 0)
delta = self.logic.calculate_periodicity_delta(periodicity, whence)
next_deadline = whence + delta
self.assertEqual(next_deadline, datetime(2016, 7, 1, 0, 2))
def test_periodicity_delta_case_3(self):
periodicity = '* 0 * * * 120'
whence = datetime(2016, 6, 30, 0, 59)
delta = self.logic.calculate_periodicity_delta(periodicity, whence)
next_deadline = whence + delta
self.assertEqual(next_deadline, datetime(2016, 7, 1, 0, 2))
def test_periodicity_delta_case_4(self):
periodicity = '* 0 * * * 120; * 1 * * * 600'
whence = datetime(2016, 6, 30, 0, 59)
delta = self.logic.calculate_periodicity_delta(periodicity, whence)
next_deadline = whence + delta
self.assertEqual(next_deadline, datetime(2016, 6, 30, 1, 9))
def test_deadline_handler_next_deadline(self):
self.logic.create(name='sooner', periodicity=1)
later = self.logic.create(name='later', periodicity=2)
time.sleep(1.1)
next_deadline = next(self.store.upcoming_deadlines())
self.assertEqual(later['name'], next_deadline['name'])
class BusinessLogicMemoryTests(MemoryStoreTester, BusinessLogicTests,
TestCase):
pass
class BusinessLogicMongoTests(MongoStoreTester, BusinessLogicTests,
TestCase):
pass
| |
import datetime
import random
import re
import sha
from django.conf import settings
from django.db import models
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string ``ALREADY_ACTIVATED`` after successful
activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = self.model.ACTIVATED
profile.save()
return user
return False
def create_inactive_user(self, username, password, email,
send_email=True, profile_callback=None):
"""
Create a new, inactive ``User``, generates a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
To disable the email, call with ``send_email=False``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. It receives one context variable, ``site``, which
is the currently-active
``django.contrib.sites.models.Site`` instance. Because it
is used as the subject line of an email, this template's
output **must** be only a single line of text; output
longer than one line will be forcibly joined into only a
single line.
``registration/activation_email.txt``
This template will be used for the body of the email. It
will receive three context variables: ``activation_key``
will be the user's activation key (for use in constructing
a URL to activate the account), ``expiration_days`` will
be the number of days for which the key will be valid and
``site`` will be the currently-active
``django.contrib.sites.models.Site`` instance.
To enable creation of a custom user profile along with the
``User`` (e.g., the model specified in the
``AUTH_PROFILE_MODULE`` setting), define a function which
knows how to create and save an instance of that model with
appropriate default values, and pass it as the keyword
argument ``profile_callback``. This function should accept one
keyword argument:
``user``
The ``User`` to relate the profile to.
"""
new_user = User.objects.create_user(username, email, password)
new_user.is_active = False
new_user.save()
registration_profile = self.create_profile(new_user)
if profile_callback is not None:
profile_callback(user=new_user)
if send_email:
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail
else:
from django.core.mail import send_mail
current_site = Site.objects.get_current()
subject = render_to_string('registration/activation_email_subject.txt',
{ 'site': current_site })
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
{ 'activation_key': registration_profile.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': current_site })
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [new_user.email])
return new_user
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = sha.new(str(random.random())).hexdigest()[:5]
activation_key = sha.new(salt+user.username).hexdigest()
return self.create(user=user,
activation_key=activation_key)
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in self.all():
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
user.delete()
class RegistrationProfile(models.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation, and a mechanism for
automatically creating an instance of a site-specific profile
model is provided via the ``create_inactive_user`` on
``RegistrationManager``.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = models.ForeignKey(User, unique=True, verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u"Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string ``ALREADY_ACTIVATED``. Re-activating is
not permitted, and so this method returns ``True`` in this
case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return self.activation_key == self.ACTIVATED or \
(self.user.date_joined + expiration_date <= datetime.datetime.now())
activation_key_expired.boolean = True
| |
#!/usr/bin/env python3
# Copyright (c) 2016-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import re
import fnmatch
import sys
import subprocess
import datetime
import os
################################################################################
# file filtering
################################################################################
EXCLUDE = [
# auto generated:
'src/qt/bitcoinstrings.cpp',
'src/chainparamsseeds.h',
# other external copyrights:
'src/reverse_iterator.h',
'src/test/fuzz/FuzzedDataProvider.h',
'src/tinyformat.h',
'src/bench/nanobench.h',
'test/functional/test_framework/bignum.py',
# python init:
'*__init__.py',
]
EXCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in EXCLUDE]))
EXCLUDE_DIRS = [
# git subtrees
"src/crypto/ctaes/",
"src/leveldb/",
"src/minisketch",
"src/secp256k1/",
"src/univalue/",
"src/crc32c/",
]
INCLUDE = ['*.h', '*.cpp', '*.cc', '*.c', '*.mm', '*.py', '*.sh', '*.bash-completion']
INCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in INCLUDE]))
def applies_to_file(filename):
for excluded_dir in EXCLUDE_DIRS:
if filename.startswith(excluded_dir):
return False
return ((EXCLUDE_COMPILED.match(filename) is None) and
(INCLUDE_COMPILED.match(filename) is not None))
################################################################################
# obtain list of files in repo according to INCLUDE and EXCLUDE
################################################################################
GIT_LS_CMD = 'git ls-files --full-name'.split(' ')
GIT_TOPLEVEL_CMD = 'git rev-parse --show-toplevel'.split(' ')
def call_git_ls(base_directory):
out = subprocess.check_output([*GIT_LS_CMD, base_directory])
return [f for f in out.decode("utf-8").split('\n') if f != '']
def call_git_toplevel():
"Returns the absolute path to the project root"
return subprocess.check_output(GIT_TOPLEVEL_CMD).strip().decode("utf-8")
def get_filenames_to_examine(base_directory):
"Returns an array of absolute paths to any project files in the base_directory that pass the include/exclude filters"
root = call_git_toplevel()
filenames = call_git_ls(base_directory)
return sorted([os.path.join(root, filename) for filename in filenames if
applies_to_file(filename)])
################################################################################
# define and compile regexes for the patterns we are looking for
################################################################################
COPYRIGHT_WITH_C = r'Copyright \(c\)'
COPYRIGHT_WITHOUT_C = 'Copyright'
ANY_COPYRIGHT_STYLE = '(%s|%s)' % (COPYRIGHT_WITH_C, COPYRIGHT_WITHOUT_C)
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
YEAR_LIST = '(%s)(, %s)+' % (YEAR, YEAR)
ANY_YEAR_STYLE = '(%s|%s)' % (YEAR_RANGE, YEAR_LIST)
ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ("%s %s" % (ANY_COPYRIGHT_STYLE,
ANY_YEAR_STYLE))
ANY_COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE)
def compile_copyright_regex(copyright_style, year_style, name):
return re.compile(r'%s %s,? %s( +\*)?\n' % (copyright_style, year_style, name))
EXPECTED_HOLDER_NAMES = [
r"Satoshi Nakamoto",
r"The Bitcoin Core developers",
r"BitPay Inc\.",
r"University of Illinois at Urbana-Champaign\.",
r"Pieter Wuille",
r"Wladimir J\. van der Laan",
r"Jeff Garzik",
r"Jan-Klaas Kollhof",
r"ArtForz -- public domain half-a-node",
r"Intel Corporation ?",
r"The Zcash developers",
r"Jeremy Rubin",
]
DOMINANT_STYLE_COMPILED = {}
YEAR_LIST_STYLE_COMPILED = {}
WITHOUT_C_STYLE_COMPILED = {}
for holder_name in EXPECTED_HOLDER_NAMES:
DOMINANT_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_RANGE, holder_name))
YEAR_LIST_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_LIST, holder_name))
WITHOUT_C_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE,
holder_name))
################################################################################
# search file contents for copyright message of particular category
################################################################################
def get_count_of_copyrights_of_any_style_any_holder(contents):
return len(ANY_COPYRIGHT_COMPILED.findall(contents))
def file_has_dominant_style_copyright_for_holder(contents, holder_name):
match = DOMINANT_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_year_list_style_copyright_for_holder(contents, holder_name):
match = YEAR_LIST_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_without_c_style_copyright_for_holder(contents, holder_name):
match = WITHOUT_C_STYLE_COMPILED[holder_name].search(contents)
return match is not None
################################################################################
# get file info
################################################################################
def read_file(filename):
return open(filename, 'r', encoding="utf8").read()
def gather_file_info(filename):
info = {}
info['filename'] = filename
c = read_file(filename)
info['contents'] = c
info['all_copyrights'] = get_count_of_copyrights_of_any_style_any_holder(c)
info['classified_copyrights'] = 0
info['dominant_style'] = {}
info['year_list_style'] = {}
info['without_c_style'] = {}
for holder_name in EXPECTED_HOLDER_NAMES:
has_dominant_style = (
file_has_dominant_style_copyright_for_holder(c, holder_name))
has_year_list_style = (
file_has_year_list_style_copyright_for_holder(c, holder_name))
has_without_c_style = (
file_has_without_c_style_copyright_for_holder(c, holder_name))
info['dominant_style'][holder_name] = has_dominant_style
info['year_list_style'][holder_name] = has_year_list_style
info['without_c_style'][holder_name] = has_without_c_style
if has_dominant_style or has_year_list_style or has_without_c_style:
info['classified_copyrights'] = info['classified_copyrights'] + 1
return info
################################################################################
# report execution
################################################################################
SEPARATOR = '-'.join(['' for _ in range(80)])
def print_filenames(filenames, verbose):
if not verbose:
return
for filename in filenames:
print("\t%s" % filename)
def print_report(file_infos, verbose):
print(SEPARATOR)
examined = [i['filename'] for i in file_infos]
print("%d files examined according to INCLUDE and EXCLUDE fnmatch rules" %
len(examined))
print_filenames(examined, verbose)
print(SEPARATOR)
print('')
zero_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 0]
print("%4d with zero copyrights" % len(zero_copyrights))
print_filenames(zero_copyrights, verbose)
one_copyright = [i['filename'] for i in file_infos if
i['all_copyrights'] == 1]
print("%4d with one copyright" % len(one_copyright))
print_filenames(one_copyright, verbose)
two_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 2]
print("%4d with two copyrights" % len(two_copyrights))
print_filenames(two_copyrights, verbose)
three_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 3]
print("%4d with three copyrights" % len(three_copyrights))
print_filenames(three_copyrights, verbose)
four_or_more_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] >= 4]
print("%4d with four or more copyrights" % len(four_or_more_copyrights))
print_filenames(four_or_more_copyrights, verbose)
print('')
print(SEPARATOR)
print('Copyrights with dominant style:\ne.g. "Copyright (c)" and '
'"<year>" or "<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
dominant_style = [i['filename'] for i in file_infos if
i['dominant_style'][holder_name]]
if len(dominant_style) > 0:
print("%4d with '%s'" % (len(dominant_style),
holder_name.replace('\n', '\\n')))
print_filenames(dominant_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with year list style:\ne.g. "Copyright (c)" and '
'"<year1>, <year2>, ...":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
year_list_style = [i['filename'] for i in file_infos if
i['year_list_style'][holder_name]]
if len(year_list_style) > 0:
print("%4d with '%s'" % (len(year_list_style),
holder_name.replace('\n', '\\n')))
print_filenames(year_list_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with no "(c)" style:\ne.g. "Copyright" and "<year>" or '
'"<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
without_c_style = [i['filename'] for i in file_infos if
i['without_c_style'][holder_name]]
if len(without_c_style) > 0:
print("%4d with '%s'" % (len(without_c_style),
holder_name.replace('\n', '\\n')))
print_filenames(without_c_style, verbose)
print('')
print(SEPARATOR)
unclassified_copyrights = [i['filename'] for i in file_infos if
i['classified_copyrights'] < i['all_copyrights']]
print("%d with unexpected copyright holder names" %
len(unclassified_copyrights))
print_filenames(unclassified_copyrights, verbose)
print(SEPARATOR)
def exec_report(base_directory, verbose):
filenames = get_filenames_to_examine(base_directory)
file_infos = [gather_file_info(f) for f in filenames]
print_report(file_infos, verbose)
################################################################################
# report cmd
################################################################################
REPORT_USAGE = """
Produces a report of all copyright header notices found inside the source files
of a repository.
Usage:
$ ./copyright_header.py report <base_directory> [verbose]
Arguments:
<base_directory> - The base directory of a bitcoin source code repository.
[verbose] - Includes a list of every file of each subcategory in the report.
"""
def report_cmd(argv):
if len(argv) == 2:
sys.exit(REPORT_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad <base_directory>: %s" % base_directory)
if len(argv) == 3:
verbose = False
elif argv[3] == 'verbose':
verbose = True
else:
sys.exit("*** unknown argument: %s" % argv[2])
exec_report(base_directory, verbose)
################################################################################
# query git for year of last change
################################################################################
GIT_LOG_CMD = "git log --pretty=format:%%ai %s"
def call_git_log(filename):
out = subprocess.check_output((GIT_LOG_CMD % filename).split(' '))
return out.decode("utf-8").split('\n')
def get_git_change_years(filename):
git_log_lines = call_git_log(filename)
if len(git_log_lines) == 0:
return [datetime.date.today().year]
# timestamp is in ISO 8601 format. e.g. "2016-09-05 14:25:32 -0600"
return [line.split(' ')[0].split('-')[0] for line in git_log_lines]
def get_most_recent_git_change_year(filename):
return max(get_git_change_years(filename))
################################################################################
# read and write to file
################################################################################
def read_file_lines(filename):
f = open(filename, 'r', encoding="utf8")
file_lines = f.readlines()
f.close()
return file_lines
def write_file_lines(filename, file_lines):
f = open(filename, 'w', encoding="utf8")
f.write(''.join(file_lines))
f.close()
################################################################################
# update header years execution
################################################################################
COPYRIGHT = r'Copyright \(c\)'
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
HOLDER = 'The Bitcoin Core developers'
UPDATEABLE_LINE_COMPILED = re.compile(' '.join([COPYRIGHT, YEAR_RANGE, HOLDER]))
def get_updatable_copyright_line(file_lines):
index = 0
for line in file_lines:
if UPDATEABLE_LINE_COMPILED.search(line) is not None:
return index, line
index = index + 1
return None, None
def parse_year_range(year_range):
year_split = year_range.split('-')
start_year = year_split[0]
if len(year_split) == 1:
return start_year, start_year
return start_year, year_split[1]
def year_range_to_str(start_year, end_year):
if start_year == end_year:
return start_year
return "%s-%s" % (start_year, end_year)
def create_updated_copyright_line(line, last_git_change_year):
copyright_splitter = 'Copyright (c) '
copyright_split = line.split(copyright_splitter)
# Preserve characters on line that are ahead of the start of the copyright
# notice - they are part of the comment block and vary from file-to-file.
before_copyright = copyright_split[0]
after_copyright = copyright_split[1]
space_split = after_copyright.split(' ')
year_range = space_split[0]
start_year, end_year = parse_year_range(year_range)
if end_year >= last_git_change_year:
return line
return (before_copyright + copyright_splitter +
year_range_to_str(start_year, last_git_change_year) + ' ' +
' '.join(space_split[1:]))
def update_updatable_copyright(filename):
file_lines = read_file_lines(filename)
index, line = get_updatable_copyright_line(file_lines)
if not line:
print_file_action_message(filename, "No updatable copyright.")
return
last_git_change_year = get_most_recent_git_change_year(filename)
new_line = create_updated_copyright_line(line, last_git_change_year)
if line == new_line:
print_file_action_message(filename, "Copyright up-to-date.")
return
file_lines[index] = new_line
write_file_lines(filename, file_lines)
print_file_action_message(filename,
"Copyright updated! -> %s" % last_git_change_year)
def exec_update_header_year(base_directory):
for filename in get_filenames_to_examine(base_directory):
update_updatable_copyright(filename)
################################################################################
# update cmd
################################################################################
UPDATE_USAGE = """
Updates all the copyright headers of "The Bitcoin Core developers" which were
changed in a year more recent than is listed. For example:
// Copyright (c) <firstYear>-<lastYear> The Bitcoin Core developers
will be updated to:
// Copyright (c) <firstYear>-<lastModifiedYear> The Bitcoin Core developers
where <lastModifiedYear> is obtained from the 'git log' history.
This subcommand also handles copyright headers that have only a single year. In those cases:
// Copyright (c) <year> The Bitcoin Core developers
will be updated to:
// Copyright (c) <year>-<lastModifiedYear> The Bitcoin Core developers
where the update is appropriate.
Usage:
$ ./copyright_header.py update <base_directory>
Arguments:
<base_directory> - The base directory of a bitcoin source code repository.
"""
def print_file_action_message(filename, action):
print("%-52s %s" % (filename, action))
def update_cmd(argv):
if len(argv) != 3:
sys.exit(UPDATE_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad base_directory: %s" % base_directory)
exec_update_header_year(base_directory)
################################################################################
# inserted copyright header format
################################################################################
def get_header_lines(header, start_year, end_year):
lines = header.split('\n')[1:-1]
lines[0] = lines[0] % year_range_to_str(start_year, end_year)
return [line + '\n' for line in lines]
CPP_HEADER = '''
// Copyright (c) %s The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_cpp_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(CPP_HEADER, start_year, end_year))
SCRIPT_HEADER = '''
# Copyright (c) %s The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_script_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(SCRIPT_HEADER, start_year, end_year))
################################################################################
# query git for year of last change
################################################################################
def get_git_change_year_range(filename):
years = get_git_change_years(filename)
return min(years), max(years)
################################################################################
# check for existing core copyright
################################################################################
def file_already_has_core_copyright(file_lines):
index, _ = get_updatable_copyright_line(file_lines)
return index is not None
################################################################################
# insert header execution
################################################################################
def file_has_hashbang(file_lines):
if len(file_lines) < 1:
return False
if len(file_lines[0]) <= 2:
return False
return file_lines[0][:2] == '#!'
def insert_script_header(filename, file_lines, start_year, end_year):
if file_has_hashbang(file_lines):
insert_idx = 1
else:
insert_idx = 0
header_lines = get_script_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(insert_idx, line)
write_file_lines(filename, file_lines)
def insert_cpp_header(filename, file_lines, start_year, end_year):
file_lines.insert(0, '\n')
header_lines = get_cpp_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(0, line)
write_file_lines(filename, file_lines)
def exec_insert_header(filename, style):
file_lines = read_file_lines(filename)
if file_already_has_core_copyright(file_lines):
sys.exit('*** %s already has a copyright by The Bitcoin Core developers'
% (filename))
start_year, end_year = get_git_change_year_range(filename)
if style in ['python', 'shell']:
insert_script_header(filename, file_lines, start_year, end_year)
else:
insert_cpp_header(filename, file_lines, start_year, end_year)
################################################################################
# insert cmd
################################################################################
INSERT_USAGE = """
Inserts a copyright header for "The Bitcoin Core developers" at the top of the
file in either Python or C++ style as determined by the file extension. If the
file is a Python file and it has a '#!' starting the first line, the header is
inserted in the line below it.
The copyright dates will be set to be:
"<year_introduced>-<current_year>"
where <year_introduced> is according to the 'git log' history. If
<year_introduced> is equal to <current_year>, the date will be set to be:
"<current_year>"
If the file already has a copyright for "The Bitcoin Core developers", the
script will exit.
Usage:
$ ./copyright_header.py insert <file>
Arguments:
<file> - A source file in the bitcoin repository.
"""
def insert_cmd(argv):
if len(argv) != 3:
sys.exit(INSERT_USAGE)
filename = argv[2]
if not os.path.isfile(filename):
sys.exit("*** bad filename: %s" % filename)
_, extension = os.path.splitext(filename)
if extension not in ['.h', '.cpp', '.cc', '.c', '.py', '.sh']:
sys.exit("*** cannot insert for file extension %s" % extension)
if extension == '.py':
style = 'python'
elif extension == '.sh':
style = 'shell'
else:
style = 'cpp'
exec_insert_header(filename, style)
################################################################################
# UI
################################################################################
USAGE = """
copyright_header.py - utilities for managing copyright headers of 'The Bitcoin
Core developers' in repository source files.
Usage:
$ ./copyright_header <subcommand>
Subcommands:
report
update
insert
To see subcommand usage, run them without arguments.
"""
SUBCOMMANDS = ['report', 'update', 'insert']
if __name__ == "__main__":
if len(sys.argv) == 1:
sys.exit(USAGE)
subcommand = sys.argv[1]
if subcommand not in SUBCOMMANDS:
sys.exit(USAGE)
if subcommand == 'report':
report_cmd(sys.argv)
elif subcommand == 'update':
update_cmd(sys.argv)
elif subcommand == 'insert':
insert_cmd(sys.argv)
| |
#! /usr/bin/env python3
"""A simple python script that grades MC output from Scantron forms.
Requires two files. (1) The grading key, a matrix with point values for
each answer deliminated by spaces, one row per question. Zeroes must be
included. (2) The raw output from IT. Correctly handles blank answers
("."). An optional key Scantron can be included (with ID KEY_ID).
Optionally, a scramble file can be specified to allow multiple forms to
be handled. The scramble file is a simple matrix with columns of
matching question numbers. It does have to include a column for Form 1
if it is being used.
"""
import numpy as np
import argparse
# Top and bottom cutoffs for analysis of answers, as fraction. IT uses
# 27% for some reason.
ANALYSIS_THRESHOLD = 0.27
KEY_ID = "00000000"
def generate_responses_array(answers):
"""Takes the raw answers from the output and returns the student
solutions as an array with "1" indicating the selection.
Args:
answers (string): A string in the format "010123120...".
Returns:
An array of solutions, with "1" indicating the selected answer.
For example, a row of [0,1,0,0,0] indicates an answer of B. This
array yields the total score when multiplied by the key and
summed.
"""
responses = []
for qnum in range(len(answers)):
response = [0 for n in range(5)]
# Check if blank response, indicated by ".". If not, change
# list[answer] to 1.
if answers[qnum] != ".":
response[int(answers[qnum])] = 1
responses.append(response)
return np.array(responses)
def descramble(responses, formnum, scramble):
"""Unscrambles responses depending on their form.
Args:
responses (numpy.array): Array of responses generated by the
generate_responses_array function.
formnum (int): The form number for the student.
scramble (numpy.array): The scramble used for the test. Of form
scramble[0] = [1, 2, 3, ...] for form 1 (note index
difference), scramble[n] = [2, 1, 3, ...] for form n+1.
Returns: An array of responses that has been sorted according to the
scramble array, so that everyone can be graded with the same
key.
"""
descrambled_responses = []
for n in scramble[formnum]:
descrambled_responses.append(responses[int(n)-1])
return np.array(descrambled_responses)
def convert_response_to_letter(response):
"""Converts a grade (as a numpy array) to a letter grade string.
"""
response_list = response.tolist()
if 1 in response_list:
return chr(response_list.index(1) + 65)
else:
return "."
def main(key_file_name, answers_file_name, title="Graded Exam",
scramble_file_name=None):
"""Processes raw Scantron output and returns the grades and
statistics.
Args:
key_file_name (string): Name of the file with the key. The key
is a matrix in the format [ 0 1 0 0 0; 0 0 1 0 0; ... ]
where the first row would award 1 point for B. Partial
credit and double answers are fine.
answers_file_name (string): Name of the file containing the raw
Scantron output.
title="Graded Exam" (string): Title to be printed at the top of
the output.
scramble_file_name=None (string): Optional filename of file
containing the scramble. If not included, everyone is graded
as the same form as laid out in the key. Format is a matrix
in the format [ 1 2; 2 3; 3 1; ... ] where form 2 would have
questions 1, 2, 3 from form 1 as numbers 2, 3, 1.
Returns:
A string containing formatted output with exam statistics and
student scores, and an array of individual student responses.
"""
# Load the key. Result is a numpy array.
with open(key_file_name) as key_file:
ans_key = np.loadtxt(key_file)
num_questions = len(ans_key)
# Load and process the scramble file, if available. Transposes so
# that scramble[0] returns an array of questions for form 1,
# scramble[1] returns array of questions for form 2, etc.
if scramble_file_name:
with open(scramble_file_name) as scramble_file:
scramble = np.loadtxt(scramble_file).transpose()
else:
scramble = np.array([range(1,num_questions+1) for n in range(4)])
# Load the student info. Characters 0-7 in the input file are the
# student's uniqueID. Character 9 is the form number. Characters
# 10-? are the recorded answers. Descrambles the responses. For
# student n, students[n]['name'] is the student's name,
# students[n]['responses'] is the set of responses, as an array.
# Only collects the responses portion of the string (up to
# 10+num_questions), because the Scantron system can append extra
# characters.
students = []
key_response = {} # Form with the key
with open(answers_file_name) as answers_file:
for line in answers_file:
uniqueid = line[0:8]
if line[9] == " " or line[9] == ".":
form_num = 0
else:
form_num = int(line[9]) - 1
responses = generate_responses_array(
line[10:10 + num_questions].replace("\n", ""))
if scramble_file_name:
responses = descramble(responses, form_num, scramble)
if uniqueid == KEY_ID:
key_response = {'name': uniqueid, 'responses': responses,
'form': form_num}
else:
students.append({'name': uniqueid, 'responses': responses,
'form': form_num})
num_students = len(students)
num_students_analysis = round(ANALYSIS_THRESHOLD * num_students)
# Actually determines score for each student. Multiplies sets of
# responses by the key, then sums over whole array. Score is stored
# as students[n]['score']
for stu_num in range(len(students)):
students[stu_num]['score'] = (students[stu_num]['responses'] *
ans_key).sum()
# Same for key.
if key_response:
key_response['score'] = (key_response['responses'] * ans_key).sum()
# The maximum possible score, determined from the key.
max_score = sum([ans_row.max() for ans_row in ans_key])
# Generates a new array, students_sorted_grade, that is just sorted
# by grades.
students_sorted_grade = sorted(students,
key=lambda s: s['score'], reverse=True)
# Determines number of each response, by question, for all students,
# and for the top and bottom students in the class. Values are given
# as fractions.
all_answers_frac = (sum(n['responses']
for n in students_sorted_grade[:]) / num_students)
top_answers_frac = (sum(n['responses']
for n in students_sorted_grade[:num_students_analysis])
/ num_students_analysis)
bot_answers_frac = (sum(n['responses']
for n in students_sorted_grade[-num_students_analysis:])
/ num_students_analysis)
# List of all grades. Students only.
all_grades = [s['score'] for s in students]
# The score for the Scantron key, as a check to make sure it was
# assessed properly.
if key_response:
print("\nCheck: the Scantron key (uniqueID = {}) scores {:.2f} "\
"out of {:.2f}.\n".format(key_response['name'],
key_response['score'], max_score))
# Variable output_text is the actual textual output of the function.
output_text = ""
output_text += "{}\n".format(title)
output_text += "{}\n\n".format("=" * len(title))
# The overall averages, max, and min.
output_text += " Overall average: {:.2f} "\
"out of {:.2f} points ({:.2%})\n".format(
np.mean(all_grades),
max_score,
np.mean(all_grades) / max_score)
output_text += "Standard deviation: {:.2f}\n".format(np.std(all_grades))
output_text += " High: {}\n".format(max(all_grades))
output_text += " Low: {}\n".format(min(all_grades))
output_text += "\n"
# Breakdown by question. Includes both average score for the
# question, the overall performance, and the performance of the
# strongest and weakest students.
output_text += "Average Scores by Question\n"
output_text += "{}\n\n".format("-" * 26)
for n in range(num_questions):
output_text += "{:3}: {:.2f} Key: ".format(
n + 1, sum(all_answers_frac[n] * ans_key[n]))
for m in range(len(ans_key[n])):
if ans_key[n][m] != 0:
output_text += "{:6.1f} ".format(ans_key[n][m])
else:
output_text += " - "
output_text += "\n Frequency: "
for m in range(len(all_answers_frac[n])):
output_text += "{:5.1f} ".format(all_answers_frac[n][m] * 100)
output_text += "(%)\n Top {:2.0f}%: ".format(
ANALYSIS_THRESHOLD * 100)
for m in range(len(top_answers_frac[n])):
output_text += "{:5.1f} ".format(top_answers_frac[n][m] * 100)
output_text += "\n Bot {:2.0f}%: ".format(
ANALYSIS_THRESHOLD * 100)
for m in range(len(bot_answers_frac[n])):
output_text += "{:5.1f} ".format(bot_answers_frac[n][m] * 100)
output_text += "\n\n"
# Actual student scores.
students_sorted_name = sorted(students, key=lambda s: s['name'])
output_text += "\nStudent Scores\n"
output_text += "{}\n".format("-" * 14)
for student in students_sorted_name:
output_text += "{:8}\t{:.1f}\n".format(student['name'],
student['score'])
# Generate individual student output.
student_output_text = {}
for student in students:
student_output = "{}\n".format(student['name'])
student_output += "{}\n".format(title)
student_output += "Form: {}\n".format(student['form'] + 1)
student_output += "Q# (F1) Response Score | Q# (F1) Response Score\n"
student_output += "======= ======== ===== | ======= ======== =====\n"
# Question output stored in a list that can be sorted later.
# Necessary otherwise descrambling leaves the questions in the
# wrong order.
question_output = []
for n in range(num_questions):
if 1 in student['responses'][n]:
question_output.append("{:2.0f} ({:2.0f})"\
" {} {:1.2f}".format(
scramble[student['form']][n],
scramble[0][n],
convert_response_to_letter(student['responses'][n]),
ans_key[n][student['responses'][n].tolist().index(1)]))
else:
question_output.append("{:2.0f} ({:2.0f})"\
" {} {:1.2f}".format(
scramble[student['form']][n],
scramble[0][n],
"Blank", 0))
question_output = sorted(question_output)
# Print in 2 columns.
for n in range(num_questions//2 + num_questions%2):
if n < num_questions//2:
student_output += "{} | {}\n".format(
question_output[n],
question_output[n+(num_questions+1)//2])
else: # Last row if odd number of questions
student_output += "{} |\n".format(question_output[n])
student_output += "\nTotal: {} / {}".format(
student['score'], max_score)
student_output_text[student['name']] = student_output
return(output_text, student_output_text)
def process_grades():
parser = argparse.ArgumentParser()
parser.add_argument(
"key_file",
help="Filename for key")
parser.add_argument(
"raw_file",
help="Filename for student responses")
parser.add_argument(
"title",
help="Title for results")
parser.add_argument(
"-s", "--scramble_file",
help="Filename for multiple forms scramble")
parser.add_argument(
"-o", "--individual_output",
help="Output individual responses as uniqueID.txt",
action='store_true')
args = parser.parse_args()
key_file = args.key_file
raw_file = args.raw_file
title = args.title
output_filename = title + ".txt"
if args.scramble_file:
scramble_file = args.scramble_file
else:
scramble_file = None
output, student_output = main(key_file, raw_file, title, scramble_file)
with open(output_filename, 'w') as output_file:
output_file.write(output)
if args.individual_output:
for s in sorted(student_output):
with open("{}_out.txt".format(s), 'w') as output_file:
output_file.write(student_output[s])
| |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.learn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
import tensorflow.python.platform # pylint: disable=unused-import,g-bad-import-order
import numpy as np
import six
import tensorflow as tf
from tensorflow.python.framework import tensor_util
def assert_summary_scope(regexp):
"""Assert that all generated summaries match regexp."""
for summary in tf.get_collection(tf.GraphKeys.SUMMARIES):
tag = tf.unsupported.constant_value(summary.op.inputs[0])
assert tag is not None, 'All summaries must have constant tags'
tag = str(tag)
assert isinstance(tag[0], six.string_types), tag[0]
assert re.match(regexp, tag), "tag doesn't match %s: %s" % (regexp, tag)
class FullyConnectedTest(tf.test.TestCase):
def setUp(self):
tf.test.TestCase.setUp(self)
tf.set_random_seed(1234)
self.input = tf.constant([[1., 2., 3.], [-4., 5., -6.]])
assert not tf.get_collection(tf.GraphKeys.SUMMARIES)
def test_basic_use(self):
output = tf.learn.fully_connected(self.input, 8, activation_fn=tf.nn.relu)
with tf.Session() as sess:
with self.assertRaises(tf.errors.FailedPreconditionError):
sess.run(output)
tf.initialize_all_variables().run()
out_value = sess.run(output)
self.assertEqual(output.get_shape().as_list(), [2, 8])
self.assertTrue(np.all(out_value >= 0),
'Relu should have capped all values.')
self.assertGreater(tf.get_collection(tf.GraphKeys.SUMMARIES), 0,
'Some summaries should have been added.')
self.assertEqual(2,
len(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)))
self.assertEqual(0,
len(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)))
assert_summary_scope('fully_connected')
def test_variable_reuse_with_scope(self):
with tf.variable_scope('test') as vs:
output1 = tf.learn.fully_connected(self.input,
8,
activation_fn=tf.nn.relu)
output2 = tf.learn.fully_connected(self.input,
8,
activation_fn=tf.nn.relu)
with tf.variable_scope(vs, reuse=True):
output3 = tf.learn.fully_connected(self.input,
8,
activation_fn=tf.nn.relu)
with tf.Session() as sess:
tf.initialize_all_variables().run()
out_value1, out_value2, out_value3 = sess.run([output1, output2, output3])
self.assertFalse(np.allclose(out_value1, out_value2))
self.assertAllClose(out_value1, out_value3)
def test_variable_reuse_with_template(self):
tmpl1 = tf.make_template('test',
tf.learn.fully_connected,
num_output_nodes=8)
output1 = tmpl1(self.input)
output2 = tmpl1(self.input)
with tf.Session() as sess:
tf.initialize_all_variables().run()
out_value1, out_value2 = sess.run([output1, output2])
self.assertAllClose(out_value1, out_value2)
assert_summary_scope(r'test(_\d)?/fully_connected')
def test_custom_initializers(self):
output = tf.learn.fully_connected(self.input,
2,
activation_fn=tf.nn.relu,
weight_init=tf.constant_initializer(2.0),
bias_init=tf.constant_initializer(1.0))
with tf.Session() as sess:
tf.initialize_all_variables().run()
out_value = sess.run(output)
self.assertAllClose(np.array([[13.0, 13.0], [0.0, 0.0]]), out_value)
def test_custom_collections(self):
tf.learn.fully_connected(self.input,
2,
activation_fn=tf.nn.relu,
weight_collections=['unbiased'],
bias_collections=['biased'])
self.assertEquals(1, len(tf.get_collection('unbiased')))
self.assertEquals(1, len(tf.get_collection('biased')))
def test_all_custom_collections(self):
tf.learn.fully_connected(self.input,
2,
activation_fn=tf.nn.relu,
weight_collections=['unbiased', 'all'],
bias_collections=['biased', 'all'])
self.assertEquals(1, len(tf.get_collection('unbiased')))
self.assertEquals(1, len(tf.get_collection('biased')))
self.assertEquals(2, len(tf.get_collection('all')))
self.assertEquals(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES),
tf.get_collection('all'))
def test_no_summaries(self):
tf.learn.fully_connected(self.input,
2,
activation_fn=tf.nn.relu,
create_summaries=False)
self.assertEquals([], tf.get_collection(tf.GraphKeys.SUMMARIES))
# Verify fix of a bug where no_summaries + activation_fn=None led to a
# NoneType exception.
def test_no_summaries_no_activation(self):
tf.learn.fully_connected(self.input,
2,
activation_fn=None,
create_summaries=False)
self.assertEquals([], tf.get_collection(tf.GraphKeys.SUMMARIES))
def test_regularizer(self):
cnt = [0]
tensor = tf.constant(5.0)
def test_fn(_):
cnt[0] += 1
return tensor
tf.learn.fully_connected(self.input, 2, weight_regularizer=test_fn)
self.assertEqual([tensor],
tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
self.assertEqual(1, cnt[0])
def test_shape_enforcement(self):
place = tf.placeholder(tf.float32)
with self.assertRaises(ValueError):
tf.learn.fully_connected(place, 8)
tf.learn.fully_connected(place, 8, num_input_nodes=5) # No error
place.set_shape([None, None])
with self.assertRaises(ValueError):
tf.learn.fully_connected(place, 8)
tf.learn.fully_connected(place, 8, num_input_nodes=5) # No error
place.set_shape([None, 6])
tf.learn.fully_connected(place, 8) # No error
with self.assertRaises(ValueError):
tf.learn.fully_connected(place, 8, num_input_nodes=5)
place = tf.placeholder(tf.float32)
place.set_shape([2, 6, 5])
with self.assertRaises(ValueError):
tf.learn.fully_connected(place, 8)
def test_no_bias(self):
tf.learn.fully_connected(self.input, 2, bias_init=None)
self.assertEqual(1,
len(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)))
class Convolution2dTest(tf.test.TestCase):
def setUp(self):
tf.test.TestCase.setUp(self)
tf.set_random_seed(1234)
self.input = tf.constant(np.arange(2 * 3 * 3 * 4).reshape(
[2, 3, 3, 4]).astype(np.float32))
assert not tf.get_collection(tf.GraphKeys.SUMMARIES)
def test_basic_use(self):
output = tf.learn.convolution2d(self.input, 8, (3, 3),
activation_fn=tf.nn.relu)
with tf.Session() as sess:
with self.assertRaises(tf.errors.FailedPreconditionError):
sess.run(output)
tf.initialize_all_variables().run()
out_value = sess.run(output)
self.assertEqual(output.get_shape().as_list(), [2, 3, 3, 8])
self.assertTrue(np.all(out_value >= 0),
'Relu should have capped all values.')
self.assertGreater(tf.get_collection(tf.GraphKeys.SUMMARIES), 0,
'Some summaries should have been added.')
self.assertEqual(2,
len(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)))
self.assertEqual(0,
len(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)))
assert_summary_scope('convolution2d')
def test_variable_reuse_with_scope(self):
with tf.variable_scope('test') as vs:
output1 = tf.learn.convolution2d(self.input,
8, (3, 3),
activation_fn=tf.nn.relu)
output2 = tf.learn.convolution2d(self.input,
8, (3, 3),
activation_fn=tf.nn.relu)
with tf.variable_scope(vs, reuse=True):
output3 = tf.learn.convolution2d(self.input,
8, (3, 3),
activation_fn=tf.nn.relu)
with tf.Session() as sess:
tf.initialize_all_variables().run()
out_value1, out_value2, out_value3 = sess.run([output1, output2, output3])
self.assertFalse(np.allclose(out_value1, out_value2))
self.assertAllClose(out_value1, out_value3)
def test_variable_reuse_with_template(self):
tmpl1 = tf.make_template('test',
tf.learn.convolution2d,
kernel_size=(3, 3),
num_output_channels=8)
output1 = tmpl1(self.input)
output2 = tmpl1(self.input)
with tf.Session() as sess:
tf.initialize_all_variables().run()
out_value1, out_value2 = sess.run([output1, output2])
self.assertAllClose(out_value1, out_value2)
assert_summary_scope(r'test(_\d)?/convolution2d')
def test_custom_initializers(self):
output = tf.learn.convolution2d(self.input,
2,
(3, 3),
activation_fn=tf.nn.relu,
weight_init=tf.constant_initializer(2.0),
bias_init=tf.constant_initializer(1.0),
padding='VALID')
with tf.Session() as sess:
tf.initialize_all_variables().run()
out_value = sess.run(output)
self.assertAllClose(
np.array([[[[1261., 1261.]]], [[[3853., 3853.]]]]), out_value)
def test_custom_collections(self):
tf.learn.convolution2d(self.input,
2, (3, 3),
activation_fn=tf.nn.relu,
weight_collections=['unbiased'],
bias_collections=['biased'])
self.assertEquals(1, len(tf.get_collection('unbiased')))
self.assertEquals(1, len(tf.get_collection('biased')))
def test_all_custom_collections(self):
tf.learn.convolution2d(self.input,
2, (3, 3),
activation_fn=tf.nn.relu,
weight_collections=['unbiased', 'all'],
bias_collections=['biased', 'all'])
self.assertEquals(1, len(tf.get_collection('unbiased')))
self.assertEquals(1, len(tf.get_collection('biased')))
self.assertEquals(2, len(tf.get_collection('all')))
self.assertEquals(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES),
tf.get_collection('all'))
def test_no_summaries(self):
tf.learn.convolution2d(self.input,
2, (3, 3),
activation_fn=tf.nn.relu,
create_summaries=False)
self.assertEquals([], tf.get_collection(tf.GraphKeys.SUMMARIES))
def test_regularizer(self):
cnt = [0]
tensor = tf.constant(5.0)
def test_fn(_):
cnt[0] += 1
return tensor
tf.learn.convolution2d(self.input, 2, (3, 3), weight_regularizer=test_fn)
self.assertEqual([tensor],
tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
self.assertEqual(1, cnt[0])
def test_shape_enforcement(self):
place = tf.placeholder(tf.float32)
with self.assertRaises(ValueError):
tf.learn.convolution2d(place, 8, (3, 3))
tf.learn.convolution2d(place, 8, (3, 3), num_input_channels=5) # No error
place.set_shape([None, None, None, None])
with self.assertRaises(ValueError):
tf.learn.convolution2d(place, 8, (3, 3))
tf.learn.convolution2d(place, 8, (3, 3), num_input_channels=5) # No error
place.set_shape([None, None, None, 6])
tf.learn.convolution2d(place, 8, (3, 3)) # No error
with self.assertRaises(ValueError):
tf.learn.convolution2d(place, 8, (3, 3), num_input_channels=5)
place = tf.placeholder(tf.float32)
place.set_shape([2, 6, 5])
with self.assertRaises(ValueError):
tf.learn.convolution2d(place, 8, (3, 3))
def test_no_bias(self):
tf.learn.convolution2d(self.input, 2, (3, 3), bias_init=None)
self.assertEqual(1,
len(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)))
class RegularizerTest(tf.test.TestCase):
def test_l1(self):
with self.assertRaises(ValueError):
tf.learn.l1_regularizer(2.)
with self.assertRaises(ValueError):
tf.learn.l1_regularizer(-1.)
with self.assertRaises(ValueError):
tf.learn.l1_regularizer(0)
self.assertIsNone(tf.learn.l1_regularizer(0.)(None))
values = np.array([1., -1., 4., 2.])
weights = tf.constant(values)
with tf.Session() as sess:
result = sess.run(tf.learn.l1_regularizer(.5)(weights))
self.assertAllClose(np.abs(values).sum() * .5, result)
def test_l2(self):
with self.assertRaises(ValueError):
tf.learn.l2_regularizer(2.)
with self.assertRaises(ValueError):
tf.learn.l2_regularizer(-1.)
with self.assertRaises(ValueError):
tf.learn.l2_regularizer(0)
self.assertIsNone(tf.learn.l2_regularizer(0.)(None))
values = np.array([1., -1., 4., 2.])
weights = tf.constant(values)
with tf.Session() as sess:
result = sess.run(tf.learn.l2_regularizer(.42)(weights))
self.assertAllClose(np.power(values, 2).sum() / 2.0 * .42, result)
if __name__ == '__main__':
tf.test.main()
| |
from cxio.element import Element
from cxio.aspect_element import AspectElement
from cxio.cx_constants import CxConstants
class ElementMaker(object):
""" Static methods for creating (aspect) element instances.
"""
@staticmethod
def create_nodes_aspect_element(node_id, node_name=None, node_represents=None):
""" Convenience method to create a nodes aspect element
:rtype: AspectElement
"""
if node_name is None and node_represents is None:
e = {'@id': node_id}
elif node_represents is None:
e = {'@id': node_id, 'n': node_name}
else:
e = {'@id': node_id, 'n': node_name, 'r': node_represents}
return AspectElement(CxConstants.NODES, e)
@staticmethod
def create_edges_aspect_element(edge_id, source_id, target_id, interaction):
"""
:rtype: AspectElement
"""
e = {'@id': edge_id,
's': source_id,
't': target_id,
'i': interaction
}
return AspectElement(CxConstants.EDGES, e)
@staticmethod
def create_cartesian_layout_element(node_id, view_id, x, y, z=None):
"""
:rtype: AspectElement
"""
e = {'node': node_id,
'x': x,
'y': y
}
if view_id:
e['view'] = view_id
if z:
e['z'] = z
return AspectElement(CxConstants.CARTESIAN_LAYOUT, e)
@staticmethod
def create_network_attributes_aspect_element(sub_network_id, name, value, data_type=None):
"""
:rtype: AspectElement
"""
e = {'n': name}
if isinstance(value, list):
if data_type is None:
raise IOError('data type missing for (list) network attributes "' + name + '"')
if data_type not in CxConstants.LIST_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (list) network attributes "' + name + '": ' + data_type)
e['d'] = data_type
e['v'] = value
else:
if data_type:
if data_type not in CxConstants.SINGLE_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (single) network attributes "' + name + '": ' + data_type)
if data_type != CxConstants.DATA_TYPE_STRING:
e['d'] = data_type
e['v'] = str(value)
if sub_network_id:
e['s'] = sub_network_id
return AspectElement(CxConstants.NETWORK_ATTRIBUTES, e)
@staticmethod
def create_hidden_attributes_aspect_element(sub_network_id, name, value, data_type=None):
"""
:rtype: AspectElement
"""
e = {'n': name}
if isinstance(value, list):
if data_type is None:
raise IOError('data type missing for (list) hidden attributes "' + name + '"')
if data_type not in CxConstants.LIST_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (list) hidden attributes "' + name + '": ' + data_type)
e['d'] = data_type
e['v'] = value
else:
if data_type:
if data_type not in CxConstants.SINGLE_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (single) hidden attributes "' + name + '": ' + data_type)
if data_type != CxConstants.DATA_TYPE_STRING:
e['d'] = data_type
e['v'] = str(value)
if sub_network_id:
e['s'] = sub_network_id
return AspectElement(CxConstants.HIDDEN_ATTRIBUTES, e)
@staticmethod
def create_edge_attributes_aspect_element(sub_network_id, edge_id, name, value, data_type=None):
"""
:rtype: AspectElement
"""
e = {'po': edge_id,
'n': name
}
if isinstance(value, list):
if data_type is None:
raise IOError('data type missing for (list) edge attributes "' + name + '"')
if data_type not in CxConstants.LIST_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (list) edge attributes "' + name + '": ' + data_type)
e['d'] = data_type
e['v'] = value
else:
if data_type:
if data_type not in CxConstants.SINGLE_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (single) edge attributes "' + name + '": ' + data_type)
if data_type != CxConstants.DATA_TYPE_STRING:
e['d'] = data_type
e['v'] = str(value)
if sub_network_id:
e['s'] = sub_network_id
return AspectElement(CxConstants.EDGE_ATTRIBUTES, e)
@staticmethod
def create_node_attributes_aspect_element(sub_network_id, node_id, name, value, data_type=None):
"""
:rtype: AspectElement
"""
e = {'po': node_id,
'n': name
}
if isinstance(value, list):
if data_type is None:
raise IOError('data type missing for (list) node attributes "' + name + '"')
if data_type not in CxConstants.LIST_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (list) node attributes "' + name + '": ' + data_type)
e['d'] = data_type
e['v'] = value
else:
if data_type:
if data_type not in CxConstants.SINGLE_ATTRIBUTE_TYPES:
raise IOError('illegal data type for (single) node attributes "' + name + '": ' + data_type)
if data_type != CxConstants.DATA_TYPE_STRING:
e['d'] = data_type
e['v'] = str(value)
if sub_network_id:
e['s'] = sub_network_id
return AspectElement(CxConstants.NODE_ATTRIBUTES, e)
@staticmethod
def create_sub_networks_aspect_element(sub_network_id, node_ids, edge_ids):
"""
:rtype: AspectElement
"""
e = {'@id': sub_network_id,
'nodes': node_ids,
'edges': edge_ids
}
return AspectElement(CxConstants.SUB_NETWORKS, e)
@staticmethod
def create_views_aspect_element(view_id, sub_network_id):
"""
:rtype: AspectElement
"""
e = {'@id': view_id,
's': sub_network_id
}
return AspectElement(CxConstants.VIEWS, e)
@staticmethod
def create_network_relations_aspect_element(child, parent, relationship=None, name=None):
"""
:rtype: AspectElement
"""
e = {'c': child}
if parent:
e['p'] = parent
if relationship:
if (relationship != CxConstants.RELATIONSHIP_TYPE_VIEW) and (
relationship != CxConstants.RELATIONSHIP_TYPE_SUBNETWORK):
raise IOError('illegal relationship type: ' + relationship)
e['r'] = relationship
if name:
e['name'] = name
return AspectElement(CxConstants.NETWORK_RELATIONS, e)
@staticmethod
def create_groups_aspect_element(group_id, view_id, name, nodes, external_edges, internal_edges):
"""
:rtype: AspectElement
"""
e = {'@id': group_id,
'view': view_id,
'name': name,
'nodes': nodes,
'external_edges': external_edges,
'internal_edges': internal_edges
}
return AspectElement(CxConstants.GROUPS, e)
@staticmethod
def create_table_column_aspect_element(sub_network, applies_to, name, data_type):
"""
:rtype: AspectElement
"""
if (data_type not in CxConstants.SINGLE_ATTRIBUTE_TYPES) and (
data_type not in CxConstants.LIST_ATTRIBUTE_TYPES):
raise IOError('illegal data type for "' + name + '": ' + data_type)
e = {'s': sub_network,
'n': name,
'applies_to': applies_to,
'd': data_type
}
return AspectElement(CxConstants.TABLE_COLUMN, e)
@staticmethod
def create_visual_properties_aspect_element(properties_of, applies_to, view, properties, dependencies=None,
mappings=None):
"""
:rtype: AspectElement
"""
if properties_of not in CxConstants.VP_PROPERTIES_OF:
raise IOError('illegal properties of: ' + properties_of)
e = {'properties_of': properties_of,
'applies_to': applies_to,
}
if view:
e['view'] = view
if properties:
e['properties'] = properties
if dependencies:
e['dependencies'] = dependencies
if mappings:
e['mappings'] = mappings
return AspectElement(CxConstants.VISUAL_PROPERTIES, e)
@staticmethod
def create_pre_metadata_element(aspect_name, consistency_group, version, last_update, properties, id_counter,
element_count=None):
"""
:rtype: Element
"""
e = {'name': str(aspect_name),
'consistencyGroup': consistency_group,
'version': str(version),
'lastUpdate': last_update,
'properties': properties,
'idCounter': id_counter
}
if element_count:
e['elementCount'] = element_count
return Element(CxConstants.META_DATA, e)
@staticmethod
def create_post_metadata_element(aspect_name, id_counter):
"""
:rtype: Element
"""
e = {'name': aspect_name,
'idCounter': id_counter,
}
return Element(CxConstants.META_DATA, e)
@staticmethod
def create_number_verification_element():
""" Convenience method to create a number verification element
:rtype: Element
"""
e = [dict(longNumber=CxConstants.NUMBER_VERIFICATION_VALUE)]
return Element(CxConstants.NUMBER_VERIFICATION, e)
@staticmethod
def create_status_element(success=True, error_msg=''):
""" Convenience method to create a status element
:rtype: Element
"""
e = [{'error': error_msg,
'success': success,
}]
return Element(CxConstants.STATUS, e)
@staticmethod
def create_ndex_citation_aspect_element(citation_id, citation_type, title, contributors, identifier, description):
"""
:rtype: AspectElement
"""
e = {'@id': citation_id,
'dc:title': title,
'dc:contributor': contributors,
'dc:identifier': identifier,
'dc:type': citation_type,
'dc:description': description,
'attributes': []
}
return AspectElement('citations', e)
@staticmethod
def create_ndex_support_aspect_element(support_id, cx_citation_id, text):
"""
:rtype: AspectElement
"""
e = {'@id': support_id,
'citation': cx_citation_id,
'text': text,
'attributes': []
}
return AspectElement('supports', e)
@staticmethod
def create_ndex_function_term_aspect_element(function_term):
"""
:rtype: AspectElement
"""
e = {'function_term': function_term}
return AspectElement('functionTerms', e)
@staticmethod
def create_ndex_node_citation_aspect_element(node_id, citation_id):
"""
:rtype: AspectElement
"""
e = {'citations': [citation_id],
'po': [node_id]
}
return AspectElement('nodeCitations', e)
@staticmethod
def create_ndex_edge_citation_aspect_element(edge_id, citation_id):
"""
:rtype: AspectElement
"""
e = {'citations': [citation_id],
'po': [edge_id]
}
return AspectElement('edgeCitations', e)
@staticmethod
def create_ndex_node_support_aspect_element(node_id, support_id):
"""
:rtype: AspectElement
"""
e = {'supports': [support_id],
'po': [node_id]
}
return AspectElement('nodeSupports', e)
@staticmethod
def create_ndex_edge_support_aspect_element(edge_id, support_id):
"""
:rtype: AspectElement
"""
e = {'supports': [support_id],
'po': [edge_id]
}
return AspectElement('edgeSupports', e)
@staticmethod
def create_ndex_context_element(contexts):
"""
:rtype: Element
"""
return Element('@context', contexts)
| |
from collections.abc import Mapping
import os
import numpy as np
import pytest
import openmc
import openmc.exceptions as exc
import openmc.lib
from tests import cdtemp
@pytest.fixture(scope='module')
def pincell_model():
"""Set up a model to test with and delete files when done"""
openmc.reset_auto_ids()
pincell = openmc.examples.pwr_pin_cell()
pincell.settings.verbosity = 1
# Add a tally
filter1 = openmc.MaterialFilter(pincell.materials)
filter2 = openmc.EnergyFilter([0.0, 1.0, 1.0e3, 20.0e6])
mat_tally = openmc.Tally()
mat_tally.filters = [filter1, filter2]
mat_tally.nuclides = ['U235', 'U238']
mat_tally.scores = ['total', 'elastic', '(n,gamma)']
pincell.tallies.append(mat_tally)
# Add an expansion tally
zernike_tally = openmc.Tally()
filter3 = openmc.ZernikeFilter(5, r=.63)
cells = pincell.geometry.root_universe.cells
filter4 = openmc.CellFilter(list(cells.values()))
zernike_tally.filters = [filter3, filter4]
zernike_tally.scores = ['fission']
pincell.tallies.append(zernike_tally)
# Add an energy function tally
energyfunc_tally = openmc.Tally()
energyfunc_filter = openmc.EnergyFunctionFilter(
[0.0, 20e6], [0.0, 20e6])
energyfunc_tally.scores = ['fission']
energyfunc_tally.filters = [energyfunc_filter]
pincell.tallies.append(energyfunc_tally)
# Write XML files in tmpdir
with cdtemp():
pincell.export_to_xml()
yield
@pytest.fixture(scope='module')
def uo2_trigger_model():
"""Set up a simple UO2 model with k-eff trigger"""
model = openmc.model.Model()
m = openmc.Material(name='UO2')
m.add_nuclide('U235', 1.0)
m.add_nuclide('O16', 2.0)
m.set_density('g/cm3', 10.0)
model.materials.append(m)
cyl = openmc.ZCylinder(r=1.0, boundary_type='vacuum')
c = openmc.Cell(fill=m, region=-cyl)
model.geometry.root_universe = openmc.Universe(cells=[c])
model.settings.batches = 10
model.settings.inactive = 5
model.settings.particles = 100
model.settings.source = openmc.Source(space=openmc.stats.Box(
[-0.5, -0.5, -1], [0.5, 0.5, 1], only_fissionable=True))
model.settings.verbosity = 1
model.settings.keff_trigger = {'type': 'std_dev', 'threshold': 0.001}
model.settings.trigger_active = True
model.settings.trigger_max_batches = 10
model.settings.trigger_batch_interval = 1
# Write XML files in tmpdir
with cdtemp():
model.export_to_xml()
yield
@pytest.fixture(scope='module')
def lib_init(pincell_model, mpi_intracomm):
openmc.lib.init(intracomm=mpi_intracomm)
yield
openmc.lib.finalize()
@pytest.fixture(scope='module')
def lib_simulation_init(lib_init):
openmc.lib.simulation_init()
yield
@pytest.fixture(scope='module')
def lib_run(lib_simulation_init):
openmc.lib.run()
def test_cell_mapping(lib_init):
cells = openmc.lib.cells
assert isinstance(cells, Mapping)
assert len(cells) == 3
for cell_id, cell in cells.items():
assert isinstance(cell, openmc.lib.Cell)
assert cell_id == cell.id
def test_cell(lib_init):
cell = openmc.lib.cells[1]
assert isinstance(cell.fill, openmc.lib.Material)
cell.fill = openmc.lib.materials[1]
assert str(cell) == 'Cell[0]'
assert cell.name == "Fuel"
cell.name = "Not fuel"
assert cell.name == "Not fuel"
def test_cell_temperature(lib_init):
cell = openmc.lib.cells[1]
cell.set_temperature(100.0, 0)
assert cell.get_temperature(0) == pytest.approx(100.0)
cell.set_temperature(200)
assert cell.get_temperature() == pytest.approx(200.0)
def test_new_cell(lib_init):
with pytest.raises(exc.AllocationError):
openmc.lib.Cell(1)
new_cell = openmc.lib.Cell()
new_cell_with_id = openmc.lib.Cell(10)
assert len(openmc.lib.cells) == 5
def test_material_mapping(lib_init):
mats = openmc.lib.materials
assert isinstance(mats, Mapping)
assert len(mats) == 3
for mat_id, mat in mats.items():
assert isinstance(mat, openmc.lib.Material)
assert mat_id == mat.id
def test_material(lib_init):
m = openmc.lib.materials[3]
assert m.nuclides == ['H1', 'O16', 'B10', 'B11']
old_dens = m.densities
test_dens = [1.0e-1, 2.0e-1, 2.5e-1, 1.0e-3]
m.set_densities(m.nuclides, test_dens)
assert m.densities == pytest.approx(test_dens)
assert m.volume is None
m.volume = 10.0
assert m.volume == 10.0
with pytest.raises(exc.OpenMCError):
m.set_density(1.0, 'goblins')
rho = 2.25e-2
m.set_density(rho)
assert sum(m.densities) == pytest.approx(rho)
m.set_density(0.1, 'g/cm3')
assert m.density == pytest.approx(0.1)
assert m.name == "Hot borated water"
m.name = "Not hot borated water"
assert m.name == "Not hot borated water"
def test_material_add_nuclide(lib_init):
m = openmc.lib.materials[3]
m.add_nuclide('Xe135', 1e-12)
assert m.nuclides[-1] == 'Xe135'
assert m.densities[-1] == 1e-12
def test_new_material(lib_init):
with pytest.raises(exc.AllocationError):
openmc.lib.Material(1)
new_mat = openmc.lib.Material()
new_mat_with_id = openmc.lib.Material(10)
assert len(openmc.lib.materials) == 5
def test_nuclide_mapping(lib_init):
nucs = openmc.lib.nuclides
assert isinstance(nucs, Mapping)
assert len(nucs) == 13
for name, nuc in nucs.items():
assert isinstance(nuc, openmc.lib.Nuclide)
assert name == nuc.name
def test_settings(lib_init):
settings = openmc.lib.settings
assert settings.inactive == 5
assert settings.generations_per_batch == 1
assert settings.particles == 100
assert settings.seed == 1
settings.seed = 11
def test_tally_mapping(lib_init):
tallies = openmc.lib.tallies
assert isinstance(tallies, Mapping)
assert len(tallies) == 3
for tally_id, tally in tallies.items():
assert isinstance(tally, openmc.lib.Tally)
assert tally_id == tally.id
def test_energy_function_filter(lib_init):
"""Test special __new__ and __init__ for EnergyFunctionFilter"""
efunc = openmc.lib.EnergyFunctionFilter([0.0, 1.0], [0.0, 2.0])
assert len(efunc.energy) == 2
assert (efunc.energy == [0.0, 1.0]).all()
assert len(efunc.y) == 2
assert (efunc.y == [0.0, 2.0]).all()
def test_tally(lib_init):
t = openmc.lib.tallies[1]
assert t.type == 'volume'
assert len(t.filters) == 2
assert isinstance(t.filters[0], openmc.lib.MaterialFilter)
assert isinstance(t.filters[1], openmc.lib.EnergyFilter)
# Create new filter and replace existing
with pytest.raises(exc.AllocationError):
openmc.lib.MaterialFilter(uid=1)
mats = openmc.lib.materials
f = openmc.lib.MaterialFilter([mats[2], mats[1]])
assert f.bins[0] == mats[2]
assert f.bins[1] == mats[1]
t.filters = [f]
assert t.filters == [f]
assert t.nuclides == ['U235', 'U238']
with pytest.raises(exc.DataError):
t.nuclides = ['Zr2']
t.nuclides = ['U234', 'Zr90']
assert t.nuclides == ['U234', 'Zr90']
assert t.scores == ['total', '(n,elastic)', '(n,gamma)']
new_scores = ['scatter', 'fission', 'nu-fission', '(n,2n)']
t.scores = new_scores
assert t.scores == new_scores
t2 = openmc.lib.tallies[2]
assert len(t2.filters) == 2
assert isinstance(t2.filters[0], openmc.lib.ZernikeFilter)
assert isinstance(t2.filters[1], openmc.lib.CellFilter)
assert len(t2.filters[1].bins) == 3
assert t2.filters[0].order == 5
t3 = openmc.lib.tallies[3]
assert len(t3.filters) == 1
t3_f = t3.filters[0]
assert isinstance(t3_f, openmc.lib.EnergyFunctionFilter)
assert len(t3_f.energy) == 2
assert len(t3_f.y) == 2
t3_f.set_data([0.0, 1.0, 2.0], [0.0, 1.0, 4.0])
assert len(t3_f.energy) == 3
assert len(t3_f.y) == 3
def test_new_tally(lib_init):
with pytest.raises(exc.AllocationError):
openmc.lib.Material(1)
new_tally = openmc.lib.Tally()
new_tally.scores = ['flux']
new_tally_with_id = openmc.lib.Tally(10)
new_tally_with_id.scores = ['flux']
assert len(openmc.lib.tallies) == 5
def test_tally_activate(lib_simulation_init):
t = openmc.lib.tallies[1]
assert not t.active
t.active = True
assert t.active
def test_tally_writable(lib_simulation_init):
t = openmc.lib.tallies[1]
assert t.writable
t.writable = False
assert not t.writable
# Revert tally to writable state for lib_run fixtures
t.writable = True
def test_tally_results(lib_run):
t = openmc.lib.tallies[1]
assert t.num_realizations == 10 # t was made active in test_tally_active
assert np.all(t.mean >= 0)
nonzero = (t.mean > 0.0)
assert np.all(t.std_dev[nonzero] >= 0)
assert np.all(t.ci_width()[nonzero] >= 1.95*t.std_dev[nonzero])
t2 = openmc.lib.tallies[2]
n = 5
assert t2.mean.size == (n + 1) * (n + 2) // 2 * 3 # Number of Zernike coeffs * 3 cells
def test_global_tallies(lib_run):
assert openmc.lib.num_realizations() == 5
gt = openmc.lib.global_tallies()
for mean, std_dev in gt:
assert mean >= 0
def test_statepoint(lib_run):
openmc.lib.statepoint_write('test_sp.h5')
assert os.path.exists('test_sp.h5')
def test_source_bank(lib_run):
source = openmc.lib.source_bank()
assert np.all(source['E'] > 0.0)
assert np.all(source['wgt'] == 1.0)
assert np.allclose(np.linalg.norm(source['u'], axis=1), 1.0)
def test_by_batch(lib_run):
openmc.lib.hard_reset()
# Running next batch before simulation is initialized should raise an
# exception
with pytest.raises(exc.AllocationError):
openmc.lib.next_batch()
openmc.lib.simulation_init()
try:
for _ in openmc.lib.iter_batches():
# Make sure we can get k-effective during inactive/active batches
mean, std_dev = openmc.lib.keff()
assert 0.0 < mean < 2.5
assert std_dev > 0.0
assert openmc.lib.num_realizations() == 5
for i in range(3):
openmc.lib.next_batch()
assert openmc.lib.num_realizations() == 8
finally:
openmc.lib.simulation_finalize()
def test_set_n_batches(lib_run):
# Run simulation_init so that current_batch reset to 0
openmc.lib.hard_reset()
openmc.lib.simulation_init()
settings = openmc.lib.settings
assert settings.get_batches() == 10
# Setting n_batches less than n_inactive should raise error
with pytest.raises(exc.InvalidArgumentError):
settings.set_batches(3)
# n_batches should stay the same
assert settings.get_batches() == 10
for i in range(7):
openmc.lib.next_batch()
# Setting n_batches less than current_batch should raise error
with pytest.raises(exc.InvalidArgumentError):
settings.set_batches(6)
# n_batches should stay the same
assert settings.get_batches() == 10
# Change n_batches from 10 to 20
settings.set_batches(20)
for _ in openmc.lib.iter_batches():
pass
openmc.lib.simulation_finalize()
# n_active should have been overwritten from 5 to 15
assert openmc.lib.num_realizations() == 15
# Ensure statepoint created at new value of n_batches
assert os.path.exists('statepoint.20.h5')
def test_reset(lib_run):
# Init and run 10 batches.
openmc.lib.hard_reset()
openmc.lib.simulation_init()
try:
for i in range(20):
openmc.lib.next_batch()
# Make sure there are 15 realizations for the 15 active batches.
assert openmc.lib.num_realizations() == 15
assert openmc.lib.tallies[2].num_realizations == 15
_, keff_sd1 = openmc.lib.keff()
tally_sd1 = openmc.lib.tallies[2].std_dev[0]
# Reset and run 3 more batches. Check the number of realizations.
openmc.lib.reset()
for i in range(3):
openmc.lib.next_batch()
assert openmc.lib.num_realizations() == 3
assert openmc.lib.tallies[2].num_realizations == 3
# Check the tally std devs to make sure results were cleared.
_, keff_sd2 = openmc.lib.keff()
tally_sd2 = openmc.lib.tallies[2].std_dev[0]
assert keff_sd2 > keff_sd1
assert tally_sd2 > tally_sd1
finally:
openmc.lib.simulation_finalize()
def test_reproduce_keff(lib_init):
# Get k-effective after run
openmc.lib.hard_reset()
openmc.lib.run()
keff0 = openmc.lib.keff()
# Reset, run again, and get k-effective again. they should match
openmc.lib.hard_reset()
openmc.lib.run()
keff1 = openmc.lib.keff()
assert keff0 == pytest.approx(keff1)
def test_find_cell(lib_init):
cell, instance = openmc.lib.find_cell((0., 0., 0.))
assert cell is openmc.lib.cells[1]
cell, instance = openmc.lib.find_cell((0.4, 0., 0.))
assert cell is openmc.lib.cells[2]
with pytest.raises(exc.GeometryError):
openmc.lib.find_cell((100., 100., 100.))
def test_find_material(lib_init):
mat = openmc.lib.find_material((0., 0., 0.))
assert mat is openmc.lib.materials[1]
mat = openmc.lib.find_material((0.4, 0., 0.))
assert mat is openmc.lib.materials[2]
def test_regular_mesh(lib_init):
mesh = openmc.lib.RegularMesh()
mesh.dimension = (2, 3, 4)
assert mesh.dimension == (2, 3, 4)
with pytest.raises(exc.AllocationError):
mesh2 = openmc.lib.RegularMesh(mesh.id)
# Make sure each combination of parameters works
ll = (0., 0., 0.)
ur = (10., 10., 10.)
width = (1., 1., 1.)
mesh.set_parameters(lower_left=ll, upper_right=ur)
assert mesh.lower_left == pytest.approx(ll)
assert mesh.upper_right == pytest.approx(ur)
mesh.set_parameters(lower_left=ll, width=width)
assert mesh.lower_left == pytest.approx(ll)
assert mesh.width == pytest.approx(width)
mesh.set_parameters(upper_right=ur, width=width)
assert mesh.upper_right == pytest.approx(ur)
assert mesh.width == pytest.approx(width)
meshes = openmc.lib.meshes
assert isinstance(meshes, Mapping)
assert len(meshes) == 1
for mesh_id, mesh in meshes.items():
assert isinstance(mesh, openmc.lib.RegularMesh)
assert mesh_id == mesh.id
translation = (1.0, 2.0, 3.0)
mf = openmc.lib.MeshFilter(mesh)
assert mf.mesh == mesh
mf.translation = translation
assert mf.translation == translation
msf = openmc.lib.MeshSurfaceFilter(mesh)
assert msf.mesh == mesh
msf.translation = translation
assert msf.translation == translation
def test_rectilinear_mesh(lib_init):
mesh = openmc.lib.RectilinearMesh()
x_grid = [-10., 0., 10.]
y_grid = [0., 10., 20.]
z_grid = [10., 20., 30.]
mesh.set_grid(x_grid, y_grid, z_grid)
assert np.all(mesh.lower_left == (-10., 0., 10.))
assert np.all(mesh.upper_right == (10., 20., 30.))
assert np.all(mesh.dimension == (2, 2, 2))
for i, diff_x in enumerate(np.diff(x_grid)):
for j, diff_y in enumerate(np.diff(y_grid)):
for k, diff_z in enumerate(np.diff(z_grid)):
assert np.all(mesh.width[i, j, k, :] == (10, 10, 10))
with pytest.raises(exc.AllocationError):
mesh2 = openmc.lib.RectilinearMesh(mesh.id)
meshes = openmc.lib.meshes
assert isinstance(meshes, Mapping)
assert len(meshes) == 2
mesh = meshes[mesh.id]
assert isinstance(mesh, openmc.lib.RectilinearMesh)
mf = openmc.lib.MeshFilter(mesh)
assert mf.mesh == mesh
msf = openmc.lib.MeshSurfaceFilter(mesh)
assert msf.mesh == mesh
def test_restart(lib_init, mpi_intracomm):
# Finalize and re-init to make internal state consistent with XML.
openmc.lib.hard_reset()
openmc.lib.finalize()
openmc.lib.init(intracomm=mpi_intracomm)
openmc.lib.simulation_init()
# Run for 7 batches then write a statepoint.
for i in range(7):
openmc.lib.next_batch()
openmc.lib.statepoint_write('restart_test.h5', True)
# Run 3 more batches and copy the keff.
for i in range(3):
openmc.lib.next_batch()
keff0 = openmc.lib.keff()
# Restart the simulation from the statepoint and the 3 remaining active batches.
openmc.lib.simulation_finalize()
openmc.lib.hard_reset()
openmc.lib.finalize()
openmc.lib.init(args=('-r', 'restart_test.h5'))
openmc.lib.simulation_init()
for i in range(3):
openmc.lib.next_batch()
keff1 = openmc.lib.keff()
openmc.lib.simulation_finalize()
# Compare the keff values.
assert keff0 == pytest.approx(keff1)
def test_load_nuclide(lib_init):
# load multiple nuclides
openmc.lib.load_nuclide('H3')
assert 'H3' in openmc.lib.nuclides
openmc.lib.load_nuclide('Pu239')
assert 'Pu239' in openmc.lib.nuclides
# load non-existent nuclide
with pytest.raises(exc.DataError):
openmc.lib.load_nuclide('Pu3')
def test_id_map(lib_init):
expected_ids = np.array([[(3, 3), (2, 2), (3, 3)],
[(2, 2), (1, 1), (2, 2)],
[(3, 3), (2, 2), (3, 3)]], dtype='int32')
# create a plot object
s = openmc.lib.plot._PlotBase()
s.width = 1.26
s.height = 1.26
s.v_res = 3
s.h_res = 3
s.origin = (0.0, 0.0, 0.0)
s.basis = 'xy'
s.level = -1
ids = openmc.lib.plot.id_map(s)
assert np.array_equal(expected_ids, ids)
def test_property_map(lib_init):
expected_properties = np.array(
[[(293.6, 0.740582), (293.6, 6.55), (293.6, 0.740582)],
[ (293.6, 6.55), (293.6, 10.29769), (293.6, 6.55)],
[(293.6, 0.740582), (293.6, 6.55), (293.6, 0.740582)]], dtype='float')
# create a plot object
s = openmc.lib.plot._PlotBase()
s.width = 1.26
s.height = 1.26
s.v_res = 3
s.h_res = 3
s.origin = (0.0, 0.0, 0.0)
s.basis = 'xy'
s.level = -1
properties = openmc.lib.plot.property_map(s)
assert np.allclose(expected_properties, properties, atol=1e-04)
def test_position(lib_init):
pos = openmc.lib.plot._Position(1.0, 2.0, 3.0)
assert tuple(pos) == (1.0, 2.0, 3.0)
pos[0] = 1.3
pos[1] = 2.3
pos[2] = 3.3
assert tuple(pos) == (1.3, 2.3, 3.3)
def test_global_bounding_box(lib_init):
expected_llc = (-0.63, -0.63, -np.inf)
expected_urc = (0.63, 0.63, np.inf)
llc, urc = openmc.lib.global_bounding_box()
assert tuple(llc) == expected_llc
assert tuple(urc) == expected_urc
def test_trigger_set_n_batches(uo2_trigger_model, mpi_intracomm):
openmc.lib.finalize()
openmc.lib.init(intracomm=mpi_intracomm)
openmc.lib.simulation_init()
settings = openmc.lib.settings
# Change n_batches to 12 and n_max_batches to 20
settings.set_batches(12, set_max_batches=False, add_sp_batch=False)
settings.set_batches(20, set_max_batches=True, add_sp_batch=True)
assert settings.get_batches(get_max_batches=False) == 12
assert settings.get_batches(get_max_batches=True) == 20
for _ in openmc.lib.iter_batches():
pass
openmc.lib.simulation_finalize()
# n_active should have been overwritten from 5 to 15
assert openmc.lib.num_realizations() == 15
# Ensure statepoint was created only at batch 20 when calling set_batches
assert not os.path.exists('statepoint.12.h5')
assert os.path.exists('statepoint.20.h5')
| |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
r"""
@author: Martin Klapproth <martin.klapproth@googlemail.com>
"""
import argparse
from copy import deepcopy
from genericpath import isfile, isdir
from glob import glob
import os
from os.path import join, islink
from subprocess import Popen, PIPE, STDOUT
import sys
parser = argparse.ArgumentParser(description='HouseKeeper keeps your house clean')
parser.add_argument('job', metavar='JOB', type=str, nargs='*')
parser.add_argument('-n', "--noop", action="store_true")
parser.add_argument('-r', "--run", action="store_true")
parser.add_argument('-s', "--silent", action="store_true")
parser.add_argument('-v', "--verbose", action="store_true")
parser.add_argument('-c', "--config", action="store")
args = parser.parse_args()
import logging
logger = logging.getLogger()
sh = logging.StreamHandler(sys.stdout)
sh.setFormatter(logging.Formatter("%(asctime)s %(levelname)-8s %(name)-12s %(message)s", datefmt='%Y-%m-%d %H:%M:%S'))
logger.addHandler(sh)
logger.setLevel(logging.WARNING)
if args.verbose:
logger.setLevel(logging.DEBUG)
CONFIG_DIR = "/etc/housekeeper"
CONFIG_FILE = None
if args.config:
if isdir(args.config):
CONFIG_DIR = args.config
elif isfile(args.config) or islink(args.config):
CONFIG_DIR = None
CONFIG_FILE = args.config
else:
print("Invalid config argument '%s', must be a file or directory" % args.config)
sys.exit(1)
NOOP = True
if args.run:
NOOP = False
def output(text):
if args.silent:
return
print(text)
class Job(object):
"""
"""
DEFAULT_CONFIG = {}
def __init__(self, name, config):
self.name = name
self.config = config
def execute(self):
"""
"""
def noop(self):
"""
"""
class FindRemoveJob(Job):
"""
"""
PARAMETERS = ["depth", "match", "older", "recurse", "root"]
DEFAULT_CONFIG = {"recurse": False}
def __init__(self, *args, **kwargs):
super(FindRemoveJob, self).__init__(*args, **kwargs)
self.roots = []
try:
root_expression = self.config["root"]
except KeyError:
raise RuntimeError("%s: field 'root' needs to be defined" % self.name)
if "*" in root_expression:
self.roots = glob(root_expression)
else:
self.roots = [root_expression]
self.roots = [r.rstrip("/") for r in self.roots]
self.roots.sort()
for root in self.roots:
if not isdir(root):
raise RuntimeError("%s: root %s needs to be an existing directory" % (self.name, root))
if type(self.config["recurse"]) == str:
self.config["recurse"] = eval(self.config["recurse"])
def execute(self):
"""
:return:
"""
for root in self.roots:
cmd = self.generate_find_command(root)
cmd.append("-delete")
output("%s: execute command: %s (do not copy-paste this command)" % (self.name, " ".join(cmd)))
popen = Popen(cmd, stdout=PIPE, stderr=STDOUT)
popen.communicate()
def noop(self):
"""
:return:
"""
for root in self.roots:
cmd = self.generate_find_command(root)
if NOOP:
output("%s: execute command: %s" % (self.name, " ".join(cmd)))
popen = Popen(cmd, stdout=PIPE, stderr=STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
output("%s: would remove: %s" % (self.name, line))
def generate_find_command(self, root, type="f"):
"""
"""
if not "match" in self.config and not "older" in self.config:
raise RuntimeError("either 'match' or 'older' needs to be defined")
cmd = ["find", root]
# maxdepth
if "depth" in self.config:
cmd += ["-maxdepth", self.config.get("depth", self.config["depth"])]
elif "recurse" in self.config:
if self.config["recurse"] is False:
cmd += ["-maxdepth", self.config.get("depth", "1")]
# type
if type:
assert type in ["f", "d", "l"]
cmd += ["-type", type]
# name
if "match" in self.config:
cmd += ["-name", self.config["match"]]
# mtime
if "older" in self.config:
prefix = "+"
context = self.config["older"]
if context.endswith("d"):
context = context[:-1]
elif context.endswith("w"):
context = int(context[:-1]) * 7
elif context.endswith("m"):
context = int(context[:-1]) * 30
elif context.endswith("y"):
context = int(context[:-1]) * 365
cmd += ["-mtime", "%s%s" % (prefix, context)]
return cmd
class Keep(FindRemoveJob):
"""
Keeps <keep> amount of files (newest ones)
"""
PARAMETERS = ["depth", "match", "older", "recurse", "root", "keep"]
def get_file_list(self, root):
cmd = self.generate_find_command(root)
if NOOP:
output("%s: execute command: %s" % (self.name, " ".join(cmd)))
popen = Popen(cmd, stdout=PIPE, stderr=STDOUT)
out, _ = popen.communicate()
if popen.returncode == 0:
return out.splitlines()
def get_files_to_consider(self):
consider_list = []
for root in self.roots:
file_list = self.get_file_list(root)
order = []
for file in file_list:
order.append((os.stat(file).st_mtime, file))
order.sort()
keep = int(self.config["keep"])
order = order[:-keep]
consider_list += [o[1] for o in order]
return consider_list
def execute(self):
"""
:return:
"""
file_list = self.get_files_to_consider()
for f in file_list:
if isfile(f):
os.remove(f)
def noop(self):
file_list = self.get_files_to_consider()
for f in file_list:
output("%s: would remove: %s" % (self.name, f))
JOB_TYPE_MAPPING = {
"find-remove": FindRemoveJob,
"keep": Keep
}
class HouseKeeper(object):
def __init__(self):
"""
:return:
"""
self.config = {}
self.read_config()
def read_config(self):
"""
:return:
"""
if CONFIG_DIR:
logger.debug("Looking for config files at %s" % CONFIG_DIR)
config_files = glob(join(CONFIG_DIR, "*"))
logger.debug("Found %s config files" % len(config_files))
else:
config_files = [CONFIG_FILE]
for file in config_files:
if not isfile(file):
logger.debug("Config file '%s' is not a file, ignoring it" % file)
continue
if file.endswith("~"):
logger.debug("Ignore config file '%s'" % file)
continue
if file.endswith(".yaml"):
self._read_config_file_yaml(file)
elif file.endswith(".ini"):
self._read_config_file_ini(file)
else:
output("unrecognized config file %s" % file)
def _read_config_file_yaml(self, path):
"""
:param path:
:return:
"""
import yaml
f = open(path)
c = yaml.load(f)
f.close()
self.config.update(c)
def _read_config_file_ini(self, path):
"""
:param path:
:return:
"""
logger.info("Reading config file %s" % path)
def as_dict(config):
"""
Converts a ConfigParser object into a dictionary.
The resulting dictionary has sections as keys which point to a dict of the
sections options as key => value pairs.
"""
the_dict = {}
for section in config.sections():
the_dict[section] = {}
for key, val in config.items(section):
the_dict[section][key] = val
return the_dict
if sys.version_info[0] == 2:
from ConfigParser import ConfigParser
else:
from configparser import ConfigParser
config = ConfigParser()
config.read(path)
self.config.update(as_dict(config))
def start(self):
"""
:return:
"""
if not self.config:
output("No configuration loaded, exiting")
sys.exit(0)
for job, job_config in self.config.items():
job_type = job_config.get("type", "find-remove")
# determine job class
try:
job_class = JOB_TYPE_MAPPING.get(job_type)
except KeyError:
raise RuntimeError("%s: no such type definition: '%s'" % (job, job_type))
# default config
if hasattr(job_class, "DEFAULT_CONFIG"):
new_config = deepcopy(job_class.DEFAULT_CONFIG)
new_config.update(job_config)
job_config = new_config
# validate configuration parameters
job_parameter_names = job_config.keys()
try:
job_parameter_names.remove("type")
except ValueError:
pass
for job_parameter_name in job_parameter_names:
if not job_parameter_name in job_class.PARAMETERS:
raise RuntimeError("%s: invalid field '%s' in job definition for job class %s" % (job, job_parameter_name, job_class.__name__))
job_instance = job_class(job, job_config)
if NOOP:
output("Executing job %s (noop)" % job)
job_instance.noop()
else:
output("Executing job %s" % job)
job_instance.execute()
if __name__ == "__main__":
h = HouseKeeper()
h.start()
| |
# -*- coding: utf-8 -*-
# Copyright 2017 IBM RESEARCH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""
A collection of useful quantum information functions.
Currently this file is very sparse. More functions will be added
over time.
"""
import math
import numpy as np
import scipy.linalg as la
from qiskit.tools.qi.pauli import pauli_group
###############################################################
# circuit manipulation.
###############################################################
# Define methods for making QFT circuits
def qft(circ, q, n):
"""n-qubit QFT on q in circ."""
for j in range(n):
for k in range(j):
circ.cu1(math.pi/float(2**(j-k)), q[j], q[k])
circ.h(q[j])
###############################################################
# State manipulation.
###############################################################
def partial_trace(state, sys, dims=None, reverse=True):
"""
Partial trace over subsystems of multi-partite matrix.
Note that subsystems are ordered as rho012 = rho0(x)rho1(x)rho2.
Args:
state (NxN matrix_like): a matrix
sys (list(int): a list of subsystems (starting from 0) to trace over
dims (list(int), optional): a list of the dimensions of the subsystems.
If this is not set it will assume all subsystems are qubits.
reverse (bool, optional): ordering of systems in operator.
If True system-0 is the right most system in tensor product.
If False system-0 is the left most system in tensor product.
Returns:
A matrix with the appropriate subsytems traced over.
"""
# convert op to density matrix
rho = np.array(state)
if rho.ndim == 1:
rho = outer(rho) # convert state vector to density mat
# compute dims if not specified
if dims is None:
n = int(np.log2(len(rho)))
dims = [2 for i in range(n)]
if len(rho) != 2 ** n:
raise Exception("Input is not a multi-qubit state, \
specifify input state dims")
else:
dims = list(dims)
# reverse sort trace sys
if isinstance(sys, int):
sys = [sys]
else:
sys = sorted(sys, reverse=True)
# trace out subsystems
for j in sys:
# get trace dims
if reverse:
dpre = dims[j + 1:]
dpost = dims[:j]
else:
dpre = dims[:j]
dpost = dims[j + 1:]
dim1 = int(np.prod(dpre))
dim2 = int(dims[j])
dim3 = int(np.prod(dpost))
# dims with sys-j removed
dims = dpre + dpost
# do the trace over j
rho = __trace_middle(rho, dim1, dim2, dim3)
return rho
def __trace_middle_dims(sys, dims, reverse=True):
"""
Get system dimensions for __trace_middle.
Args:
j (int): system to trace over.
dims(list[int]): dimensions of all subsystems.
reverse (bool): if true system-0 is right-most system tensor product.
Returns:
Tuple (dim1, dims2, dims3)
"""
dpre = dims[:sys]
dpost = dims[sys + 1:]
if reverse:
dpre, dpost = (dpost, dpre)
dim1 = int(np.prod(dpre))
dim2 = int(dims[sys])
dim3 = int(np.prod(dpost))
return (dim1, dim2, dim3)
def __trace_middle(op, dim1=1, dim2=1, dim3=1):
"""
Partial trace over middle system of tripartite state.
Args:
op (NxN matrix_like): a tri-partite matrix
dim1: dimension of the first subsystem
dim2: dimension of the second (traced over) subsystem
dim3: dimension of the third subsystem
Returns:
A (D,D) matrix where D = dim1 * dim3
"""
op = op.reshape(dim1, dim2, dim3, dim1, dim2, dim3)
d = dim1 * dim3
return op.trace(axis1=1, axis2=4).reshape(d, d)
def vectorize(rho, method='col'):
"""Flatten an operator to a vector in a specified basis.
Args:
rho (ndarray): a density matrix.
method (str): the method of vectorization. Allowed values are
- 'col' (default) flattens to column-major vector.
- 'row' flattens to row-major vector.
- 'pauli'flattens in the n-qubit Pauli basis.
- 'pauli-weights': flattens in the n-qubit Pauli basis ordered by
weight.
Returns:
ndarray: the resulting vector.
"""
rho = np.array(rho)
if method == 'col':
return rho.flatten(order='F')
elif method == 'row':
return rho.flatten(order='C')
elif method in ['pauli', 'pauli_weights']:
num = int(np.log2(len(rho))) # number of qubits
if len(rho) != 2**num:
raise Exception('Input state must be n-qubit state')
if method is 'pauli_weights':
pgroup = pauli_group(num, case=0)
else:
pgroup = pauli_group(num, case=1)
vals = [np.trace(np.dot(p.to_matrix(), rho)) for p in pgroup]
return np.array(vals)
def devectorize(vec, method='col'):
"""Devectorize a vectorized square matrix.
Args:
vec (ndarray): a vectorized density matrix.
basis (str): the method of devectorizaation. Allowed values are
- 'col' (default): flattens to column-major vector.
- 'row': flattens to row-major vector.
- 'pauli': flattens in the n-qubit Pauli basis.
- 'pauli-weights': flattens in the n-qubit Pauli basis ordered by
weight.
Returns:
ndarray: the resulting matrix.
"""
vec = np.array(vec)
d = int(np.sqrt(vec.size)) # the dimension of the matrix
if len(vec) != d*d:
raise Exception('Input is not a vectorized square matrix')
if method == 'col':
return vec.reshape(d, d, order='F')
elif method == 'row':
return vec.reshape(d, d, order='C')
elif method in ['pauli', 'pauli_weights']:
num = int(np.log2(d)) # number of qubits
if d != 2 ** num:
raise Exception('Input state must be n-qubit state')
if method is 'pauli_weights':
pgroup = pauli_group(num, case=0)
else:
pgroup = pauli_group(num, case=1)
pbasis = np.array([p.to_matrix() for p in pgroup]) / 2 ** num
return np.tensordot(vec, pbasis, axes=1)
def choi_to_rauli(choi, order=1):
"""
Convert a Choi-matrix to a Pauli-basis superoperator.
Note that this function assumes that the Choi-matrix
is defined in the standard column-stacking converntion
and is normalized to have trace 1. For a channel E this
is defined as: choi = (I \otimes E)(bell_state).
The resulting 'rauli' R acts on input states as
|rho_out>_p = R.|rho_in>_p
where |rho> = vectorize(rho, method='pauli') for order=1
and |rho> = vectorize(rho, method='pauli_weights') for order=0.
Args:
Choi (matrix): the input Choi-matrix.
order (int, optional): ordering of the Pauli group vector.
order=1 (default) is standard lexicographic ordering.
Eg: [II, IX, IY, IZ, XI, XX, XY,...]
order=0 is ordered by weights.
Eg. [II, IX, IY, IZ, XI, XY, XZ, XX, XY,...]
Returns:
A superoperator in the Pauli basis.
"""
# get number of qubits'
n = int(np.log2(np.sqrt(len(choi))))
pgp = pauli_group(n, case=order)
rauli = []
for i in pgp:
for j in pgp:
pauliop = np.kron(j.to_matrix().T, i.to_matrix())
rauli += [np.trace(np.dot(choi, pauliop))]
return np.array(rauli).reshape(4 ** n, 4 ** n)
def chop(op, epsilon=1e-10):
"""
Truncate small values of a complex array.
Args:
op (array_like): array to truncte small values.
epsilon (float): threshold.
Returns:
A new operator with small values set to zero.
"""
op.real[abs(op.real) < epsilon] = 0.0
op.imag[abs(op.imag) < epsilon] = 0.0
return op
def outer(v1, v2=None):
"""
Construct the outer product of two vectors.
The second vector argument is optional, if absent the projector
of the first vector will be returned.
Args:
v1 (ndarray): the first vector.
v2 (ndarray): the (optional) second vector.
Returns:
The matrix |v1><v2|.
"""
if v2 is None:
u = np.array(v1).conj()
else:
u = np.array(v2).conj()
return np.outer(v1, u)
###############################################################
# Measures.
###############################################################
def funm_svd(a, func):
"""Apply real scalar function to singular values of a matrix.
Args:
a : (N, N) array_like
Matrix at which to evaluate the function.
func : callable
Callable object that evaluates a scalar function f.
Returns:
funm : (N, N) ndarray
Value of the matrix function specified by func evaluated at `A`.
"""
U, s, Vh = la.svd(a, lapack_driver='gesvd')
S = np.diag(func(s))
return U.dot(S).dot(Vh)
def state_fidelity(state1, state2):
"""Return the state fidelity between two quantum states.
Either input may be a state vector, or a density matrix.
Args:
state1: a quantum state vector or density matrix.
state2: a quantum state vector or density matrix.
Returns:
The state fidelity F(state1, state2).
"""
# convert input to numpy arrays
s1 = np.array(state1)
s2 = np.array(state2)
# fidelity of two state vectors
if s1.ndim == 1 and s2.ndim == 1:
return np.abs(s2.conj().dot(s1))
# fidelity of vector and density matrix
elif s1.ndim == 1:
# psi = s1, rho = s2
return np.sqrt(np.abs(s1.conj().dot(s2).dot(s1)))
elif s2.ndim == 1:
# psi = s2, rho = s1
return np.sqrt(np.abs(s2.conj().dot(s1).dot(s2)))
# fidelity of two density matrices
else:
s1sq = funm_svd(s1, np.sqrt)
s2sq = funm_svd(s2, np.sqrt)
return np.linalg.norm(s1sq.dot(s2sq), ord='nuc')
def purity(state):
"""Calculate the purity of a quantum state.
Args:
state (np.array): a quantum state
Returns:
purity.
"""
rho = np.array(state)
if rho.ndim == 1:
rho = outer(rho)
return np.real(np.trace(rho.dot(rho)))
def concurrence(state):
"""Calculate the concurrence.
Args:
state (np.array): a quantum state
Returns:
concurrence.
"""
rho = np.array(state)
if rho.ndim == 1:
rho = outer(state)
if len(state) != 4:
raise Exception("Concurence is not defined for more than two qubits")
YY = np.fliplr(np.diag([-1, 1, 1, -1]))
A = rho.dot(YY).dot(rho.conj()).dot(YY)
w = la.eigh(A, eigvals_only=True)
w = np.sqrt(np.maximum(w, 0))
return max(0.0, w[-1]-np.sum(w[0:-1]))
###############################################################
# Other.
###############################################################
def is_pos_def(x):
return np.all(np.linalg.eigvals(x) > 0)
| |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests the graph quantization script.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
from tensorflow.core.framework import graph_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.platform import flags as flags_lib
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.tools.quantization import quantize_graph
flags = flags_lib
FLAGS = flags.FLAGS
def run_graph_def(graph_def, input_map, outputs):
graph = ops_lib.Graph()
with graph.as_default():
importer.import_graph_def(graph_def, input_map={}, name="")
with session.Session(graph=graph) as sess:
results = sess.run(outputs, feed_dict=input_map)
return results
def test_mat_mul(m, n, k, a, b):
"""Tests a MatMul replacement."""
a_constant_name = "a_constant"
b_constant_name = "b_constant"
mat_mul_name = "mat_mul"
float_graph_def = graph_pb2.GraphDef()
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=a, dtype=dtypes.float32, shape=[m, k])
float_graph_def.node.extend([a_constant])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=b, dtype=dtypes.float32, shape=[k, n])
float_graph_def.node.extend([b_constant])
mat_mul_node = quantize_graph.create_node("MatMul", mat_mul_name,
[a_constant_name, b_constant_name])
quantize_graph.set_attr_dtype(mat_mul_node, "T", dtypes.float32)
quantize_graph.set_attr_bool(mat_mul_node, "transpose_a", False)
quantize_graph.set_attr_bool(mat_mul_node, "transpose_b", False)
float_graph_def.node.extend([mat_mul_node])
test_graph(float_graph_def, {}, [mat_mul_name])
def test_conv(depth, image_width, image_height, image_batch_count, filter_size,
filter_count, stride, padding, input_values, filter_values):
"""Tests a Conv replacement."""
input_constant_name = "input_constant"
filter_constant_name = "filter_constant"
conv_name = "conv"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=input_values,
dtype=dtypes.float32,
shape=[image_batch_count, image_height, image_width, depth])
float_graph_def.node.extend([input_constant])
filter_constant = quantize_graph.create_constant_node(
filter_constant_name,
value=filter_values,
dtype=dtypes.float32,
shape=[filter_size, filter_size, depth, filter_count])
float_graph_def.node.extend([filter_constant])
conv_node = quantize_graph.create_node(
"Conv2D", conv_name, [input_constant_name, filter_constant_name])
quantize_graph.set_attr_dtype(conv_node, "T", dtypes.float32)
quantize_graph.set_attr_int_list(conv_node, "strides", [1, stride, stride, 1])
quantize_graph.set_attr_string(conv_node, "padding", padding)
float_graph_def.node.extend([conv_node])
test_graph(float_graph_def, {}, [conv_name])
def are_tensors_near(a, b, tolerance):
"""Tests whether two tensors are nearly identical.
This is a specialized comparison function designed to help debug problems with
quantization. It prints out information about the differences between tensors
on failure, paying special attention to possible biases by looking at the mean
and absolute average errors.
Args:
a: First comparison tensor.
b: Second comparison tensor.
tolerance: Float value indicating how large an error between values is ok.
Returns:
Boolean indicating whether the two inputs were close enough.
"""
flat_a = a.flatten()
flat_b = b.flatten()
if len(flat_a) != len(flat_b):
print("Tensors are different sizes: " + str(len(flat_a)) + " vs " + str(
len(flat_b)))
return False
value_count = len(flat_a)
how_many_different = 0
total_difference = 0
total_abs_difference = 0
for index in range(value_count):
a_value = flat_a[index]
b_value = flat_b[index]
difference = a_value - b_value
total_difference += difference
total_abs_difference += abs(difference)
if abs(difference) > tolerance:
how_many_different += 1
mean_difference = total_difference / value_count
mean_abs_difference = total_abs_difference / value_count
proportion_different = (how_many_different * 1.0) / value_count
if how_many_different == 0:
return True
else:
print("Tensors have {0} different values ({1}%), with mean difference"
" {2} and mean absolute difference {3}".format(
how_many_different, proportion_different * 100, mean_difference,
mean_abs_difference))
return False
def get_top_value(input_values):
max_value = None
max_index = None
for index, value in enumerate(input_values.flatten()):
if max_value is None or value > max:
max_value = value
max_index = index
return max_index, max_value
def test_graph(float_graph_def, input_map, output_names, log_graph=False):
"""Runs the float graph through the rewriter and tests the results."""
float_results = run_graph_def(
float_graph_def, input_map,
[output_name + ":0" for output_name in output_names])
# TODO(petewarden): round test is currently failing because there is no
# RoundToSteps op available.
# round_rewriter = quantize_graph.GraphRewriter(float_graph_def, "round")
# round_graph_def = round_rewriter.rewrite(output_name)
# round_results = run_graph_def(round_graph_def, input_map,
# [output_name + ":0"])
# assert are_tensors_near(expected, round_results[0], 1.0)
#
# TODO(petewarden): Add test for "quantize" mode.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite(output_names)
eightbit_results = run_graph_def(
eightbit_graph_def, input_map,
[output_name + ":0" for output_name in output_names])
for expected, result in zip(float_results, eightbit_results):
assert are_tensors_near(expected, result, 1.0)
if log_graph:
tf_logging.info("8bit:\n%s", str(eightbit_graph_def))
# Test the weights_rounded mode. This uses the default bit_depth.
weights_rounded_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "weights_rounded", quantized_input_range=None)
weights_rounded_graph_def = weights_rounded_rewriter.rewrite(output_names)
weights_rounded_results = run_graph_def(
weights_rounded_graph_def, input_map,
[output_name + ":0" for output_name in output_names])
for expected, result in zip(float_results, weights_rounded_results):
assert are_tensors_near(expected, result, 1.0)
class QuantizeGraphTest(test.TestCase):
def test_negative_const_problem(self):
shape_constant_name = "shape_constant"
shape_constant = quantize_graph.create_constant_node(
shape_constant_name, value=-0.8, dtype=dtypes.float32, shape=[1])
quantization_result = quantize_graph.quantize_weight_eightbit(
shape_constant, b"MIN_COMBINED")
self.assertEqual(4, len(quantization_result))
def test_odd_padding_problem(self):
"""Tests one error case we ran into in a real graph."""
test_conv(1, 4, 4, 1, 3, 1, 2, b"SAME",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
[1, 2, 3, 4, 5, 6, 7, 8, 9])
def test_mat_mul_tiny(self):
# These tests are added to test the generate case where
# min(matrix) == max(matrix), which used to cause problems.
test_mat_mul(1, 1, 1, [2], [3])
test_mat_mul(1, 2, 1, [1], [2, 3])
test_mat_mul(1, 1, 2, [1, 1], [1, 1])
test_mat_mul(1, 1, 2, [0, 0], [1, 1])
# The general case.
test_mat_mul(1, 1, 2, [1, 2], [1, 2])
def test_mat_mul_small(self):
test_mat_mul(2, 4, 3, [1, 2, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18])
def test_conv(self):
test_conv(1, 4, 3, 1, 3, 1, 1, b"SAME",
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
[1, 4, 7, 2, 5, 8, 3, 6, 9])
def test_reshape(self):
"""Tests that MatMul->Reshape->MatMul avoids extra quantize/dequantize."""
def make_matmul(name, a, b):
n = quantize_graph.create_node("MatMul", name, [a.name, b.name])
quantize_graph.set_attr_dtype(n, "T", dtypes.float32)
quantize_graph.set_attr_bool(n, "transpose_a", False)
quantize_graph.set_attr_bool(n, "transpose_b", False)
return n
# matmul_1 = input*weight_1
input_node = quantize_graph.create_constant_node(
"input", value=[0, 1, 2, 3], dtype=dtypes.float32, shape=[4, 1])
weight_1_node = quantize_graph.create_constant_node(
"weight_1",
value=[.5, .6, .7, .8, .9],
dtype=dtypes.float32,
shape=[1, 5])
matmul_1_node = make_matmul("matmul_1", input_node, weight_1_node)
# Reshape 4x5 to 10x2.
new_shape_node = quantize_graph.create_constant_node(
"new_shape_node", value=[10, 2], dtype=dtypes.int32, shape=[2])
reshape_node = quantize_graph.create_node(
"Reshape", "reshape", [matmul_1_node.name, new_shape_node.name])
quantize_graph.set_attr_dtype(reshape_node, "T", dtypes.float32)
# matmul_2_node = reshape*weight_2
weight_2_node = quantize_graph.create_constant_node(
"weight_2", value=[1.5, 2.5], dtype=dtypes.float32, shape=[2, 1])
matmul_2_node = make_matmul("matmul_2", reshape_node, weight_2_node)
g = graph_pb2.GraphDef()
g.node.extend([
input_node, weight_1_node, matmul_1_node, new_shape_node, reshape_node,
weight_2_node, matmul_2_node
])
# Test the graph
test_graph(g, {}, ["matmul_2"])
# Verify there is only one Quantize and one Requantize op.
eightbit_rewriter = quantize_graph.GraphRewriter(
g, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite(["matmul_2"])
ops = [node.op for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
self.assertEqual(1, ops.count("QuantizedReshape"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
def test_quantize_array(self):
# Test invalid parameters (empty array, or 0 buckets.
self.assertRaises(ValueError, quantize_graph.quantize_array, np.array([]),
2)
self.assertRaises(ValueError, quantize_graph.quantize_array,
np.array([1, 2]), 0)
# Test input array of length 1.
arr = np.array([1])
qarr = quantize_graph.quantize_array(arr, 1)
self.assertEqual(arr, qarr)
qarr = quantize_graph.quantize_array(arr, 2)
self.assertEqual(arr, qarr)
# Test input array with all elements equal.
arr = np.array([1, 1, 1])
qarr = quantize_graph.quantize_array(arr, 10)
self.assertTrue((np.array([1, 1, 1]) == qarr).all())
# Test "normal" input arrays.
arr = np.array([0, 0.3, 0.6, 1])
qarr = quantize_graph.quantize_array(arr, 1)
self.assertTrue((np.array([0.5, 0.5, 0.5, 0.5]) == qarr).all())
qarr = quantize_graph.quantize_array(arr, 2)
self.assertTrue((np.array([0.25, 0.25, 0.75, 0.75]) == qarr).all())
qarr = quantize_graph.quantize_array(arr.reshape((2, 2)), 2)
self.assertTrue((np.array([[0.25, 0.25], [0.75, 0.75]]) == qarr).all())
def test_non_float_concat(self):
concat_dim = quantize_graph.create_constant_node(
"concat_dim", value=0, dtype=dtypes.int32, shape=[])
a = quantize_graph.create_constant_node(
"a",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.int32,
shape=[2, 2, 3])
b = quantize_graph.create_constant_node(
"b",
value=[13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
dtype=dtypes.int32,
shape=[2, 2, 3])
concat = quantize_graph.create_node("Concat", "concat",
[concat_dim.name, a.name, b.name])
quantize_graph.set_attr_int(concat, "N", 2)
quantize_graph.set_attr_dtype(concat, "T", dtypes.int32)
g = graph_pb2.GraphDef()
g.node.extend([concat_dim, a, b, concat])
test_graph(g, {}, [concat.name])
def test_non_float_reshape(self):
a = quantize_graph.create_constant_node(
"a",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.int32,
shape=[2, 2, 3])
shape = quantize_graph.create_constant_node(
"shape", value=[12], dtype=dtypes.int32, shape=[1])
reshape = quantize_graph.create_node("Reshape", "reshape",
[a.name, shape.name])
quantize_graph.set_attr_dtype(reshape, "T", dtypes.int32)
g = graph_pb2.GraphDef()
g.node.extend([a, shape, reshape])
test_graph(g, {}, [reshape.name])
def test_concat(self):
shape_constant_name = "shape_constant"
a_constant_name = "a_constant"
b_constant_name = "b_constant"
concat_name = "concat"
float_graph_def = graph_pb2.GraphDef()
shape_constant = quantize_graph.create_constant_node(
shape_constant_name, value=0, dtype=dtypes.int32, shape=[])
float_graph_def.node.extend([shape_constant])
a_constant = quantize_graph.create_constant_node(
a_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[2, 2, 3])
float_graph_def.node.extend([a_constant])
b_constant = quantize_graph.create_constant_node(
b_constant_name,
value=[13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24],
dtype=dtypes.float32,
shape=[2, 2, 3])
float_graph_def.node.extend([b_constant])
concat_node = quantize_graph.create_node(
"Concat", concat_name,
[shape_constant_name, a_constant_name, b_constant_name])
quantize_graph.set_attr_int(concat_node, "N", 2)
quantize_graph.set_attr_dtype(concat_node, "T", dtypes.float32)
float_graph_def.node.extend([concat_node])
test_graph(float_graph_def, {}, [concat_name])
# Verify the concat is quantized.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite([concat_name])
ops = [node.op for node in eightbit_graph_def.node]
self.assertEqual(1, ops.count("QuantizedConcat"))
def test_multiple_outputs(self):
input_constant_name = "input_constant"
split_constant_name = "split_constant"
split_name = "split"
concat_constant_name = "concat_constant"
concat_name = "concat"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[2, 6])
float_graph_def.node.extend([input_constant])
split_constant = quantize_graph.create_constant_node(
split_constant_name, value=1, dtype=dtypes.int32, shape=[])
float_graph_def.node.extend([split_constant])
split_node = quantize_graph.create_node(
"Split", split_name, [split_constant_name, input_constant_name])
quantize_graph.set_attr_int(split_node, "num_split", 2)
quantize_graph.set_attr_dtype(split_node, "T", dtypes.float32)
float_graph_def.node.extend([split_node])
concat_constant = quantize_graph.create_constant_node(
concat_constant_name, value=1, dtype=dtypes.int32, shape=[])
float_graph_def.node.extend([concat_constant])
concat_node = quantize_graph.create_node(
"Concat", concat_name,
[concat_constant_name, split_name + ":0", split_name + ":1"])
quantize_graph.set_attr_int(concat_node, "N", 2)
quantize_graph.set_attr_dtype(concat_node, "T", dtypes.float32)
float_graph_def.node.extend([concat_node])
test_graph(float_graph_def, {}, [concat_name])
def test_node_name_from_input(self):
self.assertEqual("SomeName",
quantize_graph.node_name_from_input("^SomeName:2"))
def test_unique_node_name_from_input(self):
self.assertEqual("__hat__SomeName__port__2",
quantize_graph.unique_node_name_from_input("^SomeName:2"))
def test_identity(self):
input_constant_name = "input_constant"
identity_name = "identity"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[2, 6])
float_graph_def.node.extend([input_constant])
identity_node = quantize_graph.create_node("Identity", identity_name,
[input_constant_name])
quantize_graph.set_attr_dtype(identity_node, "T", dtypes.float32)
float_graph_def.node.extend([identity_node])
mul_name = "mul"
mul_node = quantize_graph.create_node("Mul", mul_name,
[identity_name, identity_name])
quantize_graph.set_attr_dtype(mul_node, "T", dtypes.float32)
float_graph_def.node.extend([mul_node])
test_graph(float_graph_def, {}, [mul_name])
def test_keep_control_edges(self):
no_op_name = "no_op"
a_constant_name = "a_constant"
b_constant_name = "b_constant"
a_check_name = "a_check"
b_check_name = "b_check"
a_identity_name = "a_identity"
b_identity_name = "b_identity"
add_name = "add"
graph_def = graph_pb2.GraphDef()
no_op = quantize_graph.create_node("NoOp", no_op_name, [])
graph_def.node.extend([no_op])
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=1, dtype=dtypes.float32, shape=[])
graph_def.node.extend([a_constant])
a_check_node = quantize_graph.create_node("CheckNumerics", a_check_name,
[a_constant_name])
graph_def.node.extend([a_check_node])
a_identity_node = quantize_graph.create_node(
"Identity", a_identity_name,
[a_constant_name, "^" + a_check_name, "^" + no_op_name])
graph_def.node.extend([a_identity_node])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=1, dtype=dtypes.float32, shape=[])
graph_def.node.extend([b_constant])
b_check_node = quantize_graph.create_node("CheckNumerics", b_check_name,
[b_constant_name])
graph_def.node.extend([b_check_node])
b_identity_node = quantize_graph.create_node(
"Identity", b_identity_name, [b_constant_name, "^" + b_check_name])
graph_def.node.extend([b_identity_node])
add_node = quantize_graph.create_node("Add", add_name,
[a_identity_name, b_identity_name])
quantize_graph.set_attr_dtype(add_node, "T", dtypes.float32)
graph_def.node.extend([add_node])
expected_output = graph_pb2.GraphDef()
no_op = quantize_graph.create_node("NoOp", no_op_name, [])
expected_output.node.extend([no_op])
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=1, dtype=dtypes.float32, shape=[])
expected_output.node.extend([a_constant])
a_identity_node = quantize_graph.create_node(
"Identity", a_identity_name, [a_constant_name, "^" + no_op_name])
expected_output.node.extend([a_identity_node])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=1, dtype=dtypes.float32, shape=[])
expected_output.node.extend([b_constant])
add_node = quantize_graph.create_node("Add", add_name,
[a_identity_name, b_constant_name])
quantize_graph.set_attr_dtype(add_node, "T", dtypes.float32)
expected_output.node.extend([add_node])
output = graph_util.remove_training_nodes(graph_def)
stripped_output = graph_util.extract_sub_graph(output, [add_name])
self.assertProtoEquals(expected_output, stripped_output)
def test_batch_norm(self):
input_constant_name = "input_constant"
mean_constant_name = "mean_constant"
variance_constant_name = "variance_constant"
beta_constant_name = "beta_constant"
gamma_constant_name = "gamma_constant"
batch_norm_name = "batch_norm"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 4, 2, 5, 3, 6, -1, -4, -2, -5, -3, -6],
dtype=dtypes.float32,
shape=[1, 1, 6, 2])
float_graph_def.node.extend([input_constant])
mean_constant = quantize_graph.create_constant_node(
mean_constant_name, value=[10, 20], dtype=dtypes.float32, shape=[2])
float_graph_def.node.extend([mean_constant])
variance_constant = quantize_graph.create_constant_node(
variance_constant_name,
value=[0.25, 0.5],
dtype=dtypes.float32,
shape=[2])
float_graph_def.node.extend([variance_constant])
beta_constant = quantize_graph.create_constant_node(
beta_constant_name, value=[0.1, 0.6], dtype=dtypes.float32, shape=[2])
float_graph_def.node.extend([beta_constant])
gamma_constant = quantize_graph.create_constant_node(
gamma_constant_name, value=[0, 0], dtype=dtypes.float32, shape=[2])
float_graph_def.node.extend([gamma_constant])
batch_norm_node = quantize_graph.create_node(
"BatchNormWithGlobalNormalization", batch_norm_name, [
input_constant_name, mean_constant_name, variance_constant_name,
beta_constant_name, gamma_constant_name
])
quantize_graph.set_attr_dtype(batch_norm_node, "T", dtypes.float32)
quantize_graph.set_attr_bool(batch_norm_node, "scale_after_normalization",
False)
quantize_graph.set_attr_float(batch_norm_node, "variance_epsilon", 0.001)
float_graph_def.node.extend([batch_norm_node])
test_graph(float_graph_def, {}, [batch_norm_name])
def test_max_pool(self):
input_constant_name = "input_constant"
max_pool_name = "max_pool"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
max_pool_node = quantize_graph.create_node("MaxPool", max_pool_name,
[input_constant_name])
quantize_graph.set_attr_int_list(max_pool_node, "ksize", [1, 2, 2, 1])
quantize_graph.set_attr_int_list(max_pool_node, "strides", [1, 1, 1, 1])
quantize_graph.set_attr_string(max_pool_node, "padding", b"SAME")
float_graph_def.node.extend([max_pool_node])
test_graph(float_graph_def, {}, [max_pool_name])
def test_avg_pool(self):
input_constant_name = "input_constant"
avg_pool_name = "avg_pool"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
avg_pool_node = quantize_graph.create_node("AvgPool", avg_pool_name,
[input_constant_name])
quantize_graph.set_attr_dtype(avg_pool_node, "T", dtypes.float32)
quantize_graph.set_attr_int_list(avg_pool_node, "ksize", [1, 2, 2, 1])
quantize_graph.set_attr_int_list(avg_pool_node, "strides", [1, 1, 1, 1])
quantize_graph.set_attr_string(avg_pool_node, "padding", b"SAME")
float_graph_def.node.extend([avg_pool_node])
test_graph(float_graph_def, {}, [avg_pool_name])
def test_relu(self):
input_constant_name = "input_constant"
relu_name = "relu"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
relu_node = quantize_graph.create_node("Relu", relu_name,
[input_constant_name])
quantize_graph.set_attr_dtype(relu_node, "T", dtypes.float32)
float_graph_def.node.extend([relu_node])
test_graph(float_graph_def, {}, [relu_name])
def test_relu_w_fake_quant_w_min_max_vars(self):
input_node = quantize_graph.create_constant_node(
"input",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
relu_node = quantize_graph.create_node("Relu", "relu", [input_node.name])
quantize_graph.set_attr_dtype(relu_node, "T", dtypes.float32)
min_node = quantize_graph.create_constant_node(
"min_bias_add", value=0, dtype=dtypes.float32, shape=[])
max_node = quantize_graph.create_constant_node(
"max_bias_add", value=12, dtype=dtypes.float32, shape=[])
fake_quant_node = quantize_graph.create_node(
"FakeQuantWithMinMaxVars", "fake_quant",
[relu_node.name, min_node.name, max_node.name])
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend(
[input_node, relu_node, min_node, max_node, fake_quant_node])
test_graph(float_graph_def, {}, [fake_quant_node.name], log_graph=True)
# Verify there is only one Quantize and one Requantize op.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
eightbit_graph_def = eightbit_rewriter.rewrite([fake_quant_node.name])
ops = [node.op for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
def test_relu6(self):
input_constant_name = "input_constant"
relu6_name = "relu6"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 2, 6, 1])
float_graph_def.node.extend([input_constant])
relu6_node = quantize_graph.create_node("Relu6", relu6_name,
[input_constant_name])
quantize_graph.set_attr_dtype(relu6_node, "T", dtypes.float32)
float_graph_def.node.extend([relu6_node])
test_graph(float_graph_def, {}, [relu6_name])
def test_bias_add(self):
input_constant_name = "input_constant"
offset_constant_name = "offset_constant"
bias_add_name = "bias_add"
float_graph_def = graph_pb2.GraphDef()
input_constant = quantize_graph.create_constant_node(
input_constant_name,
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
dtype=dtypes.float32,
shape=[1, 1, 2, 6])
float_graph_def.node.extend([input_constant])
offset_constant = quantize_graph.create_constant_node(
offset_constant_name,
value=[1, 2, 3, 4, 5, 6],
dtype=dtypes.float32,
shape=[6])
float_graph_def.node.extend([offset_constant])
bias_add_node = quantize_graph.create_node(
"BiasAdd", bias_add_name, [input_constant_name, offset_constant_name])
quantize_graph.set_attr_dtype(bias_add_node, "T", dtypes.float32)
float_graph_def.node.extend([bias_add_node])
test_graph(float_graph_def, {}, [bias_add_name])
def test_quantized_input_range_errors(self):
with self.assertRaises(ValueError):
# Invalid mode.
quantize_graph.GraphRewriter(graph_pb2.GraphDef(), "weights_rounded",
[0, 1])
with self.assertRaises(ValueError):
# Invalid range.
quantize_graph.GraphRewriter(graph_pb2.GraphDef(), "eightbit", [0, -1])
def test_quantized_input_range_bias_add(self):
input_shape = [1, 1, 2, 6]
input_n = quantize_graph.create_node("PlaceholderV2", "input", [])
quantize_graph.set_attr_dtype(input_n, "dtype", dtypes.float32)
quantize_graph.set_attr_shape(input_n, "shape", input_shape)
offset_n = quantize_graph.create_constant_node(
"offset", value=[1, 2, 3, 4, 5, 6], dtype=dtypes.float32, shape=[6])
bias_add_n = quantize_graph.create_node("BiasAdd", "bias_add",
[input_n.name, offset_n.name])
quantize_graph.set_attr_dtype(bias_add_n, "T", dtypes.float32)
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend([input_n, offset_n, bias_add_n])
input_map = {
input_n.name + ":0":
np.reshape([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], input_shape)
}
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[bias_add_n.name], [-1, 20.])
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[bias_add_n.name], [0, 12.])
def test_quantized_input_range_mat_mul(self):
shapes = [[3, 2], [2, 4]]
inputs = []
for i, shape in enumerate(shapes):
node = quantize_graph.create_node("PlaceholderV2", "input_%s" % i, [])
quantize_graph.set_attr_dtype(node, "dtype", dtypes.float32)
quantize_graph.set_attr_shape(node, "shape", shape)
inputs.append(node)
mat_mul_node = quantize_graph.create_node("MatMul", "mat_mul",
[n.name for n in inputs])
quantize_graph.set_attr_dtype(mat_mul_node, "T", dtypes.float32)
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend(inputs + [mat_mul_node])
input_map = {
inputs[0].name + ":0":
np.reshape([1, 2, 3, 4, 5, 6], shapes[0]),
inputs[1].name + ":0":
np.reshape([.8, .7, .6, .5, .4, .3, .2, .1], shapes[1])
}
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[mat_mul_node.name], [-1, 20.])
self._RunTestsForQuantizedInputRange(float_graph_def, input_map,
[mat_mul_node.name], [0, 6.])
def _RunTestsForQuantizedInputRange(self, float_graph_def, input_map,
output_names, input_range):
if sys.version_info[0] == 3:
# uint8->quint8 conversion for numpy is not working currently.
return
quantized_input_map = {}
for k, v in input_map.items():
arr = [
int(
round((n - input_range[0]) * 255 / (input_range[1] - input_range[
0]))) for n in v.flat
]
arr = np.array(arr, np.uint8)
arr = arr.reshape(v.shape)
arr = arr.astype(dtypes.quint8.as_numpy_dtype)
quantized_input_map[k] = arr
output_tensors = [output_name + ":0" for output_name in output_names]
float_results = run_graph_def(float_graph_def, input_map, output_tensors)
# Quantize treating the input as quantized in range <input_range>.
rewriter = quantize_graph.GraphRewriter(float_graph_def, "eightbit",
input_range)
graph_def = rewriter.rewrite(output_names)
results = run_graph_def(graph_def, quantized_input_map, output_tensors)
for expected, result in zip(float_results, results):
assert are_tensors_near(expected, result, .5)
ops = [node.op for node in graph_def.node]
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
self.assertEqual(len(output_names), ops.count("Dequantize"))
# Quantize without treating input as quantized.
rewriter = quantize_graph.GraphRewriter(
float_graph_def, "eightbit", quantized_input_range=None)
graph_def = rewriter.rewrite(output_names)
results = run_graph_def(graph_def, input_map, output_tensors)
for expected, result in zip(float_results, results):
assert are_tensors_near(expected, result, .5)
ops = [node.op for node in graph_def.node]
self.assertEqual(
len(input_map), ops.count("QuantizeV2") + ops.count("Quantize"))
self.assertEqual(len(output_names), ops.count("Dequantize"))
def test_bias_add_w_fake_quant_w_min_max_vars(self):
input_node = quantize_graph.create_constant_node(
"input",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dtype=dtypes.float32,
shape=[1, 1, 2, 5])
offset_node = quantize_graph.create_constant_node(
"offset", value=[1, 2, 3, 4, 5], dtype=dtypes.float32, shape=[5])
bias_add_node = quantize_graph.create_node(
"BiasAdd", "bias_add", [input_node.name, offset_node.name])
quantize_graph.set_attr_dtype(bias_add_node, "T", dtypes.float32)
min_node = quantize_graph.create_constant_node(
"min_bias_add", value=-.5, dtype=dtypes.float32, shape=[])
max_node = quantize_graph.create_constant_node(
"max_bias_add", value=15.5, dtype=dtypes.float32, shape=[])
fake_quant_node = quantize_graph.create_node(
"FakeQuantWithMinMaxVars", "fake_quant",
[bias_add_node.name, min_node.name, max_node.name])
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend([
input_node, offset_node, bias_add_node, min_node, max_node,
fake_quant_node
])
test_graph(float_graph_def, {}, [fake_quant_node.name], log_graph=True)
# Verify there is only one Quantize and one Requantize op.
# Pass in fallback_quantization_range, although it will have no effect
# because the FakeQuantWithMinMaxVars are used instead.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def,
"eightbit",
quantized_input_range=None,
fallback_quantization_range=[-100, 100])
eightbit_graph_def = eightbit_rewriter.rewrite([fake_quant_node.name])
ops = [node.op for node in eightbit_graph_def.node]
node_names = [node.name for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
# The fallback constants are not in the graph.
self.assertEqual(0, node_names.count("fallback_quantization_min_value"))
self.assertEqual(0, node_names.count("fallback_quantization_max_value"))
def test_bias_add_w_fallback_min_max_vars(self):
input_node = quantize_graph.create_constant_node(
"input",
value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dtype=dtypes.float32,
shape=[1, 1, 2, 5])
offset_node = quantize_graph.create_constant_node(
"offset", value=[1, 2, 3, 4, 5], dtype=dtypes.float32, shape=[5])
bias_add_node = quantize_graph.create_node(
"BiasAdd", "bias_add", [input_node.name, offset_node.name])
quantize_graph.set_attr_dtype(bias_add_node, "T", dtypes.float32)
float_graph_def = graph_pb2.GraphDef()
float_graph_def.node.extend([input_node, offset_node, bias_add_node])
test_graph(float_graph_def, {}, [bias_add_node.name], log_graph=True)
# Verify there is only one Quantize, one Requantize op, and no
# RequantizationRange op.
eightbit_rewriter = quantize_graph.GraphRewriter(
float_graph_def,
"eightbit",
quantized_input_range=None,
fallback_quantization_range=[-.5, 15.5])
eightbit_graph_def = eightbit_rewriter.rewrite([bias_add_node.name])
ops = [node.op for node in eightbit_graph_def.node]
node_names = [node.name for node in eightbit_graph_def.node]
# No quantize since all inputs are const and can be quantized up-front.
self.assertEqual(0, ops.count("QuantizeV2") + ops.count("Quantize"))
# One dequantize at the end.
self.assertEqual(1, ops.count("Dequantize"))
# No RequantizationRange
self.assertEqual(0, ops.count("RequantizationRange"))
# The fallback constants are in the graph.
self.assertEqual(1, node_names.count("fallback_quantization_min_value"))
self.assertEqual(1, node_names.count("fallback_quantization_max_value"))
def test_remove_redundant_quantization(self):
a_constant_name = "a_constant"
a_constant_min_name = "a_constant_min"
a_constant_max_name = "a_constant_max"
a_dequantize_name = "a_dequantize"
a_quantize_name = "a_quantize"
b_constant_name = "b_constant"
b_constant_min_name = "b_constant_min"
b_constant_max_name = "b_constant_max"
b_dequantize_name = "b_dequantize"
b_quantize_name = "b_quantize"
mat_mul_name = "mat_mul"
graph_def = graph_pb2.GraphDef()
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
graph_def.node.extend([a_constant])
a_constant_min = quantize_graph.create_constant_node(
a_constant_min_name, value=2, dtype=dtypes.float32, shape=[])
graph_def.node.extend([a_constant_min])
a_constant_max = quantize_graph.create_constant_node(
a_constant_max_name, value=2, dtype=dtypes.float32, shape=[])
graph_def.node.extend([a_constant_max])
a_dequantize_node = quantize_graph.create_node(
"Dequantize", a_dequantize_name,
[a_constant_name, a_constant_min_name, a_constant_max_name])
quantize_graph.set_attr_dtype(a_dequantize_node, "T", dtypes.uint8)
graph_def.node.extend([a_dequantize_node])
a_quantize_node = quantize_graph.create_node(
"QuantizeV2", a_quantize_name,
[a_dequantize_name, a_dequantize_name + ":1", a_dequantize_name + ":2"])
quantize_graph.set_attr_dtype(a_quantize_node, "T", dtypes.uint8)
graph_def.node.extend([a_quantize_node])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
graph_def.node.extend([b_constant])
b_constant_min = quantize_graph.create_constant_node(
b_constant_min_name, value=3, dtype=dtypes.float32, shape=[])
graph_def.node.extend([b_constant_min])
b_constant_max = quantize_graph.create_constant_node(
b_constant_max_name, value=3, dtype=dtypes.float32, shape=[])
graph_def.node.extend([b_constant_max])
b_dequantize_node = quantize_graph.create_node(
"Dequantize", b_dequantize_name,
[b_constant_name, b_constant_min_name, b_constant_max_name])
quantize_graph.set_attr_dtype(b_dequantize_node, "T", dtypes.uint8)
graph_def.node.extend([b_dequantize_node])
b_quantize_node = quantize_graph.create_node(
"QuantizeV2", b_quantize_name,
[b_dequantize_name, b_dequantize_name + ":1", b_dequantize_name + ":2"])
quantize_graph.set_attr_dtype(b_quantize_node, "T", dtypes.uint8)
graph_def.node.extend([b_quantize_node])
mat_mul_node = quantize_graph.create_node("QuantizedMatMul", mat_mul_name, [
a_quantize_name, b_quantize_name, a_quantize_name + ":1",
a_quantize_name + ":2", b_quantize_name + ":1", b_quantize_name + ":2"
])
quantize_graph.set_attr_dtype(mat_mul_node, "T1", dtypes.uint8)
quantize_graph.set_attr_dtype(mat_mul_node, "T2", dtypes.int32)
graph_def.node.extend([mat_mul_node])
expected_output = graph_pb2.GraphDef()
a_constant = quantize_graph.create_constant_node(
a_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
expected_output.node.extend([a_constant])
a_constant_min = quantize_graph.create_constant_node(
a_constant_min_name, value=2, dtype=dtypes.float32, shape=[])
expected_output.node.extend([a_constant_min])
a_constant_max = quantize_graph.create_constant_node(
a_constant_max_name, value=2, dtype=dtypes.float32, shape=[])
expected_output.node.extend([a_constant_max])
b_constant = quantize_graph.create_constant_node(
b_constant_name, value=(0,), dtype=dtypes.quint8, shape=[])
expected_output.node.extend([b_constant])
b_constant_min = quantize_graph.create_constant_node(
b_constant_min_name, value=3, dtype=dtypes.float32, shape=[])
expected_output.node.extend([b_constant_min])
b_constant_max = quantize_graph.create_constant_node(
b_constant_max_name, value=3, dtype=dtypes.float32, shape=[])
expected_output.node.extend([b_constant_max])
mat_mul_node = quantize_graph.create_node("QuantizedMatMul", mat_mul_name, [
a_constant_name, b_constant_name, a_constant_min_name,
a_constant_max_name, b_constant_min_name, b_constant_max_name
])
quantize_graph.set_attr_dtype(mat_mul_node, "T1", dtypes.uint8)
quantize_graph.set_attr_dtype(mat_mul_node, "T2", dtypes.int32)
expected_output.node.extend([mat_mul_node])
rewriter = quantize_graph.GraphRewriter(
graph_def, [mat_mul_name], quantized_input_range=None)
output = rewriter.remove_redundant_quantization(graph_def)
stripped_output = graph_util.extract_sub_graph(output, [mat_mul_name])
self.assertProtoEquals(expected_output, stripped_output)
if __name__ == "__main__":
test.main()
| |
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# prepare for Python 3
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
sys.dont_write_bytecode = True # prevent creation of .pyc files
try:
import Queue
except ImportError: # Queue was renamed to queue in Python 3
import queue as Queue
import logging
import os
import re
import resource
import subprocess
import threading
import time
from .model import CORELIMIT, MEMLIMIT, TIMELIMIT, SOFTTIMELIMIT
from . import cgroups
from .resources import *
from .runexecutor import RunExecutor
from .systeminfo import * # @UnusedWildImport
from . import util as util
WORKER_THREADS = []
STOPPED_BY_INTERRUPT = False
_BYTE_FACTOR = 1000 # byte in kilobyte
def init(config, benchmark):
benchmark.executable = benchmark.tool.executable()
benchmark.tool_version = benchmark.tool.version(benchmark.executable)
try:
processes = subprocess.Popen(['ps', '-eo', 'cmd'], stdout=subprocess.PIPE).communicate()[0]
if len(re.findall("python.*benchmark\.py", util.decode_to_string(processes))) > 1:
logging.warning("Already running instance of this script detected. " + \
"Please make sure to not interfere with somebody else's benchmarks.")
except OSError:
pass # this does not work on Windows
def get_system_info():
return SystemInfo()
def execute_benchmark(benchmark, output_handler):
run_sets_executed = 0
logging.debug("I will use {0} threads.".format(benchmark.num_of_threads))
my_cgroups = cgroups.find_my_cgroups()
coreAssignment = None # cores per run
memoryAssignment = None # memory banks per run
if CORELIMIT in benchmark.rlimits:
if not my_cgroups.require_subsystem(cgroups.CPUSET):
sys.exit("Cgroup subsystem cpuset is required for limiting the number of CPU cores/memory nodes.")
coreAssignment = get_cpu_cores_per_run(benchmark.rlimits[CORELIMIT], benchmark.num_of_threads, my_cgroups)
memoryAssignment = get_memory_banks_per_run(coreAssignment, my_cgroups)
if MEMLIMIT in benchmark.rlimits:
# check whether we have enough memory in the used memory banks for all runs
memLimit = benchmark.rlimits[MEMLIMIT] * _BYTE_FACTOR * _BYTE_FACTOR # MB to Byte
check_memory_size(memLimit, benchmark.num_of_threads, memoryAssignment, my_cgroups)
if benchmark.num_of_threads > 1 and is_turbo_boost_enabled():
logging.warning("Turbo boost of CPU is enabled. Starting more than one benchmark in parallel affects the CPU frequency and thus makes the performance unreliable.")
# iterate over run sets
for runSet in benchmark.run_sets:
if STOPPED_BY_INTERRUPT: break
if not runSet.should_be_executed():
output_handler.output_for_skipping_run_set(runSet)
elif not runSet.runs:
output_handler.output_for_skipping_run_set(runSet, "because it has no files")
else:
run_sets_executed += 1
# get times before runSet
ruBefore = resource.getrusage(resource.RUSAGE_CHILDREN)
walltime_before = time.time()
energyBefore = util.measure_energy()
output_handler.output_before_run_set(runSet)
# put all runs into a queue
for run in runSet.runs:
_Worker.working_queue.put(run)
# create some workers
for i in range(benchmark.num_of_threads):
cores = coreAssignment[i] if coreAssignment else None
memBanks = memoryAssignment[i] if memoryAssignment else None
WORKER_THREADS.append(_Worker(benchmark, cores, memBanks, output_handler))
# wait until all tasks are done,
# instead of queue.join(), we use a loop and sleep(1) to handle KeyboardInterrupt
finished = False
while not finished and not STOPPED_BY_INTERRUPT:
try:
_Worker.working_queue.all_tasks_done.acquire()
finished = (_Worker.working_queue.unfinished_tasks == 0)
finally:
_Worker.working_queue.all_tasks_done.release()
try:
time.sleep(0.1) # sleep some time
except KeyboardInterrupt:
stop()
# get times after runSet
walltime_after = time.time()
energy = util.measure_energy(energyBefore)
usedWallTime = walltime_after - walltime_before
ruAfter = resource.getrusage(resource.RUSAGE_CHILDREN)
usedCpuTime = (ruAfter.ru_utime + ruAfter.ru_stime) \
- (ruBefore.ru_utime + ruBefore.ru_stime)
if STOPPED_BY_INTERRUPT:
output_handler.set_error('interrupted', runSet)
output_handler.output_after_run_set(runSet, cputime=usedCpuTime, walltime=usedWallTime, energy=energy)
output_handler.output_after_benchmark(STOPPED_BY_INTERRUPT)
def stop():
global STOPPED_BY_INTERRUPT
STOPPED_BY_INTERRUPT = True
# kill running jobs
util.printOut("killing subprocesses...")
for worker in WORKER_THREADS:
worker.stop()
# wait until all threads are stopped
for worker in WORKER_THREADS:
worker.join()
class _Worker(threading.Thread):
"""
A Worker is a deamonic thread, that takes jobs from the working_queue and runs them.
"""
working_queue = Queue.Queue()
def __init__(self, benchmark, my_cpus, my_memory_nodes, output_handler):
threading.Thread.__init__(self) # constuctor of superclass
self.benchmark = benchmark
self.my_cpus = my_cpus
self.my_memory_nodes = my_memory_nodes
self.output_handler = output_handler
self.run_executor = RunExecutor()
self.setDaemon(True)
self.start()
def run(self):
while not _Worker.working_queue.empty() and not STOPPED_BY_INTERRUPT:
currentRun = _Worker.working_queue.get_nowait()
try:
self.execute(currentRun)
except SystemExit as e:
logging.critical(e)
except BaseException as e:
logging.exception('Exception during run execution')
_Worker.working_queue.task_done()
def execute(self, run):
"""
This function executes the tool with a sourcefile with options.
It also calls functions for output before and after the run.
"""
self.output_handler.output_before_run(run)
benchmark = self.benchmark
memlimit = None
if MEMLIMIT in benchmark.rlimits:
memlimit = benchmark.rlimits[MEMLIMIT] * _BYTE_FACTOR * _BYTE_FACTOR # MB to Byte
maxLogfileSize = benchmark.config.maxLogfileSize
if maxLogfileSize:
maxLogfileSize *= _BYTE_FACTOR * _BYTE_FACTOR # MB to Byte
elif maxLogfileSize == -1:
maxLogfileSize = None
result = \
self.run_executor.execute_run(
run.cmdline(), run.log_file,
hardtimelimit=benchmark.rlimits.get(TIMELIMIT),
softtimelimit=benchmark.rlimits.get(SOFTTIMELIMIT),
cores=self.my_cpus,
memory_nodes=self.my_memory_nodes,
memlimit=memlimit,
environments=benchmark.environment(),
workingDir=benchmark.working_directory(),
maxLogfileSize=maxLogfileSize)
for key, value in result.items():
if key == 'walltime':
run.walltime = value
elif key == 'cputime':
run.cputime = value
elif key == 'memory':
run.values['memUsage'] = result['memory']
elif key == 'energy':
for ekey, evalue in value.items():
run.values['energy-'+ekey] = evalue
else:
run.values['@' + key] = value
if self.my_cpus:
run.values['@cpuCores'] = self.my_cpus
if self.my_memory_nodes:
run.values['@memoryNodes'] = self.my_memory_nodes
if self.run_executor.PROCESS_KILLED:
# If the run was interrupted, we ignore the result and cleanup.
run.walltime = 0
run.cputime = 0
try:
if benchmark.config.debug:
os.rename(run.log_file, run.log_file + ".killed")
else:
os.remove(run.log_file)
except OSError:
pass
return
run.after_execution(result['exitcode'], termination_reason=result.get('terminationreason', None))
self.output_handler.output_after_run(run)
def stop(self):
# asynchronous call to runexecutor,
# the worker will stop asap, but not within this method.
self.run_executor.stop()
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TriggersOperations:
"""TriggersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.databoxedge.v2019_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_data_box_edge_device(
self,
device_name: str,
resource_group_name: str,
expand: Optional[str] = None,
**kwargs
) -> AsyncIterable["_models.TriggerList"]:
"""Lists all the triggers configured in the device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param expand: Specify $filter='CustomContextTag eq :code:`<tag>`' to filter on custom context
tag property.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either TriggerList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databoxedge.v2019_03_01.models.TriggerList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.TriggerList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_data_box_edge_device.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('TriggerList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_data_box_edge_device.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/triggers'} # type: ignore
async def get(
self,
device_name: str,
name: str,
resource_group_name: str,
**kwargs
) -> "_models.Trigger":
"""Get a specific trigger by name.
:param device_name: The device name.
:type device_name: str
:param name: The trigger name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Trigger, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_03_01.models.Trigger
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Trigger"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Trigger', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/triggers/{name}'} # type: ignore
async def _create_or_update_initial(
self,
device_name: str,
name: str,
resource_group_name: str,
trigger: "_models.Trigger",
**kwargs
) -> Optional["_models.Trigger"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Trigger"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(trigger, 'Trigger')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Trigger', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/triggers/{name}'} # type: ignore
async def begin_create_or_update(
self,
device_name: str,
name: str,
resource_group_name: str,
trigger: "_models.Trigger",
**kwargs
) -> AsyncLROPoller["_models.Trigger"]:
"""Creates or updates a trigger.
:param device_name: Creates or updates a trigger.
:type device_name: str
:param name: The trigger name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param trigger: The trigger.
:type trigger: ~azure.mgmt.databoxedge.v2019_03_01.models.Trigger
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Trigger or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databoxedge.v2019_03_01.models.Trigger]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Trigger"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
device_name=device_name,
name=name,
resource_group_name=resource_group_name,
trigger=trigger,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Trigger', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/triggers/{name}'} # type: ignore
async def _delete_initial(
self,
device_name: str,
name: str,
resource_group_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/triggers/{name}'} # type: ignore
async def begin_delete(
self,
device_name: str,
name: str,
resource_group_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the trigger on the gateway device.
:param device_name: The device name.
:type device_name: str
:param name: The trigger name.
:type name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
device_name=device_name,
name=name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/triggers/{name}'} # type: ignore
| |
from collections import OrderedDict, namedtuple
from copy import copy
from datetime import datetime
from classytags.arguments import (
Argument, MultiKeywordArgument, MultiValueArgument,
)
from classytags.core import Options, Tag
from classytags.helpers import AsTag, InclusionTag
from classytags.parser import Parser
from classytags.utils import flatten_context
from classytags.values import ListValue, StringValue
from django import template
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.db.models import Model
from django.middleware.common import BrokenLinkEmailsMiddleware
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.encoding import force_str, smart_str
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.translation import (
get_language, gettext_lazy as _, override as force_language,
)
from sekizai.templatetags.sekizai_tags import RenderBlock, SekizaiParser
from cms.cache.page import get_page_url_cache, set_page_url_cache
from cms.exceptions import PlaceholderNotFound
from cms.models import (
CMSPlugin, Page, Placeholder as PlaceholderModel, StaticPlaceholder,
)
from cms.plugin_pool import plugin_pool
from cms.toolbar.utils import get_toolbar_from_request
from cms.utils import get_current_site, get_language_from_request, get_site_id
from cms.utils.moderator import use_draft
from cms.utils.page import get_page_queryset
from cms.utils.placeholder import validate_placeholder_name
from cms.utils.urlutils import admin_reverse
NULL = object()
DeclaredPlaceholder = namedtuple('DeclaredPlaceholder', ['slot', 'inherit'])
DeclaredStaticPlaceholder = namedtuple('DeclaredStaticPlaceholder', ['slot', 'site_bound'])
register = template.Library()
def _get_page_by_untyped_arg(page_lookup, request, site_id):
"""
The `page_lookup` argument can be of any of the following types:
- Integer: interpreted as `pk` of the desired page
- String: interpreted as `reverse_id` of the desired page
- `dict`: a dictionary containing keyword arguments to find the desired page
(for instance: `{'pk': 1}`)
- `Page`: you can also pass a Page object directly, in which case there will be no database lookup.
- `None`: the current page will be used
"""
if page_lookup is None:
return request.current_page
if isinstance(page_lookup, Page):
if request.current_page and request.current_page.pk == page_lookup.pk:
return request.current_page
return page_lookup
if isinstance(page_lookup, str):
page_lookup = {'reverse_id': page_lookup}
elif isinstance(page_lookup, int):
page_lookup = {'pk': page_lookup}
elif not isinstance(page_lookup, dict):
raise TypeError('The page_lookup argument can be either a Dictionary, Integer, Page, or String.')
site = Site.objects._get_site_by_id(site_id)
try:
if 'pk' in page_lookup:
page = Page.objects.select_related('node').get(**page_lookup)
if request and use_draft(request):
if page.publisher_is_draft:
return page
else:
return page.publisher_draft
else:
if page.publisher_is_draft:
return page.publisher_public
else:
return page
else:
pages = get_page_queryset(site, draft=use_draft(request))
return pages.select_related('node').get(**page_lookup)
except Page.DoesNotExist:
subject = _('Page not found on %(domain)s') % {'domain': site.domain}
body = _("A template tag couldn't find the page with lookup arguments `%(page_lookup)s\n`. "
"The URL of the request was: http://%(host)s%(path)s") \
% {'page_lookup': repr(page_lookup), 'host': site.domain, 'path': request.path_info}
if settings.DEBUG:
raise Page.DoesNotExist(body)
else:
mw = settings.MIDDLEWARE
if getattr(settings, 'SEND_BROKEN_LINK_EMAILS', False):
mail_managers(subject, body, fail_silently=True)
elif 'django.middleware.common.BrokenLinkEmailsMiddleware' in mw:
middle = BrokenLinkEmailsMiddleware()
domain = request.get_host()
path = request.get_full_path()
referer = force_str(request.META.get('HTTP_REFERER', ''), errors='replace')
if not middle.is_ignorable_request(request, path, domain, referer):
mail_managers(subject, body, fail_silently=True)
return None
def _show_placeholder_by_id(context, placeholder_name, reverse_id,
lang=None, site=None, use_cache=True):
validate_placeholder_name(placeholder_name)
request = context['request']
toolbar = get_toolbar_from_request(request)
renderer = toolbar.get_content_renderer()
if site:
# Backwards compatibility.
# Assume user passed in a pk directly.
site_id = getattr(site, 'pk', site)
else:
site_id = renderer.current_site.pk
page = _get_page_by_untyped_arg(reverse_id, request, site_id)
if not page:
return ''
try:
placeholder = page.placeholders.get(slot=placeholder_name)
except PlaceholderModel.DoesNotExist:
if settings.DEBUG:
raise
return ''
else:
# save a query. cache the page.
placeholder.page = page
content = renderer.render_placeholder(
placeholder=placeholder,
context=context,
language=lang,
page=page,
editable=False,
use_cache=use_cache,
)
return content
def _show_uncached_placeholder_by_id(context, *args, **kwargs):
kwargs['use_cache'] = False
return _show_placeholder_by_id(context, *args, **kwargs)
@register.simple_tag(takes_context=True)
def render_extra_menu_items(context, obj, template='cms/toolbar/dragitem_extra_menu.html'):
request = context['request']
toolbar = get_toolbar_from_request(request)
template = toolbar.templates.get_cached_template(template)
if isinstance(obj, CMSPlugin):
items = []
for plugin_class in plugin_pool.plugins_with_extra_menu:
plugin_items = plugin_class.get_extra_plugin_menu_items(request, obj)
if plugin_items:
items.extend(plugin_items)
elif isinstance(obj, PlaceholderModel):
items = []
for plugin_class in plugin_pool.plugins_with_extra_placeholder_menu:
plugin_items = plugin_class.get_extra_placeholder_menu_items(request, obj)
if plugin_items:
items.extend(plugin_items)
else:
items = []
if not items:
return ''
return template.render({'items': items})
@register.simple_tag(takes_context=True)
def render_plugin(context, plugin):
request = context['request']
toolbar = get_toolbar_from_request(request)
renderer = toolbar.get_content_renderer()
content = renderer.render_plugin(
instance=plugin,
context=context,
editable=renderer._placeholders_are_editable,
)
return content
class PageUrl(AsTag):
name = 'page_url'
options = Options(
Argument('page_lookup'),
Argument('lang', required=False, default=None),
Argument('site', required=False, default=None),
'as',
Argument('varname', required=False, resolve=False),
)
def get_value_for_context(self, context, **kwargs):
#
# A design decision with several active members of the django-cms
# community that using this tag with the 'as' breakpoint should never
# return Exceptions regardless of the setting of settings.DEBUG.
#
# We wish to maintain backwards functionality where the non-as-variant
# of using this tag will raise DNE exceptions only when
# settings.DEBUG=False.
#
try:
return super().get_value_for_context(context, **kwargs)
except Page.DoesNotExist:
return ''
def get_value(self, context, page_lookup, lang, site):
site_id = get_site_id(site)
request = context.get('request', False)
if not request:
return ''
if lang is None:
lang = get_language_from_request(request)
url = get_page_url_cache(page_lookup, lang, site_id)
if url is None:
page = _get_page_by_untyped_arg(page_lookup, request, site_id)
if page:
url = page.get_absolute_url(language=lang)
set_page_url_cache(page_lookup, lang, site_id, url)
if url:
return url
return ''
class PlaceholderParser(Parser):
def parse_blocks(self):
for bit in getattr(self.kwargs['extra_bits'], 'value', self.kwargs['extra_bits']):
if getattr(bit, 'value', bit.var.value) == 'or':
return super().parse_blocks()
return
class PlaceholderOptions(Options):
def get_parser_class(self):
return PlaceholderParser
class Placeholder(Tag):
"""
This template node is used to output page content and
is also used in the admin to dynamically generate input fields.
eg: {% placeholder "placeholder_name" %}
{% placeholder "sidebar" inherit %}
{% placeholder "footer" inherit or %}
<a href="/about/">About us</a>
{% endplaceholder %}
Keyword arguments:
name -- the name of the placeholder
inherit -- optional argument which if given will result in inheriting
the content of the placeholder with the same name on parent pages
or -- optional argument which if given will make the template tag a block
tag whose content is shown if the placeholder is empty
"""
name = 'placeholder'
options = PlaceholderOptions(
Argument('name', resolve=False),
MultiValueArgument('extra_bits', required=False, resolve=False),
blocks=[
('endplaceholder', 'nodelist'),
],
)
def render_tag(self, context, name, extra_bits, nodelist=None):
request = context.get('request')
if not request:
return ''
validate_placeholder_name(name)
toolbar = get_toolbar_from_request(request)
renderer = toolbar.get_content_renderer()
inherit = 'inherit' in extra_bits
try:
content = renderer.render_page_placeholder(
slot=name,
context=context,
inherit=inherit,
nodelist=nodelist,
)
except PlaceholderNotFound:
content = ''
if not content and nodelist:
return nodelist.render(context)
return content
def get_declaration(self):
flags = self.kwargs['extra_bits']
slot = self.kwargs['name'].var.value.strip('"').strip("'")
if isinstance(flags, ListValue):
inherit = any(extra.var.value.strip() == 'inherit' for extra in flags)
return DeclaredPlaceholder(slot=slot, inherit=inherit)
return DeclaredPlaceholder(slot=slot, inherit=False)
class RenderPluginBlock(InclusionTag):
"""
Acts like the CMS's templatetag 'render_model_block' but with a plugin
instead of a model. This is used to link from a block of markup to a
plugin's changeform.
This is useful for UIs that have some plugins hidden from display in
preview mode, but the CMS author needs to expose a way to edit them
anyway. It is also useful for just making duplicate or alternate means of
triggering the change form for a plugin.
"""
name = 'render_plugin_block'
template = "cms/toolbar/plugin.html"
options = Options(
Argument('plugin'),
blocks=[('endrender_plugin_block', 'nodelist')],
)
def get_context(self, context, plugin, nodelist):
context['content'] = nodelist.render(context)
context['instance'] = plugin
return context
class PageAttribute(AsTag):
"""
This template node is used to output an attribute from a page such
as its title or slug.
Synopsis
{% page_attribute "field-name" %}
{% page_attribute "field-name" as varname %}
{% page_attribute "field-name" page_lookup %}
{% page_attribute "field-name" page_lookup as varname %}
Example
{# Output current page's page_title attribute: #}
{% page_attribute "page_title" %}
{# Output page_title attribute of the page with reverse_id "the_page": #}
{% page_attribute "page_title" "the_page" %}
{# Output slug attribute of the page with pk 10: #}
{% page_attribute "slug" 10 %}
{# Assign page_title attribute to a variable: #}
{% page_attribute "page_title" as title %}
Keyword arguments:
field-name -- the name of the field to output. Use one of:
- title
- menu_title
- page_title
- slug
- meta_description
- changed_date
- changed_by
page_lookup -- lookup argument for Page, if omitted field-name of current page is returned.
See _get_page_by_untyped_arg() for detailed information on the allowed types and their interpretation
for the page_lookup argument.
varname -- context variable name. Output will be added to template context as this variable.
This argument is required to follow the 'as' keyword.
"""
name = 'page_attribute'
options = Options(
Argument('name', resolve=False),
Argument('page_lookup', required=False, default=None),
'as',
Argument('varname', required=False, resolve=False)
)
valid_attributes = [
"title",
"slug",
"meta_description",
"page_title",
"menu_title",
"changed_date",
"changed_by",
]
def get_value(self, context, name, page_lookup):
if not 'request' in context:
return ''
name = name.lower()
request = context['request']
lang = get_language_from_request(request)
page = _get_page_by_untyped_arg(page_lookup, request, get_site_id(None))
if page and name in self.valid_attributes:
func = getattr(page, "get_%s" % name)
ret_val = func(language=lang, fallback=True)
if not isinstance(ret_val, datetime):
ret_val = escape(ret_val)
return ret_val
return ''
class CMSToolbar(RenderBlock):
name = 'cms_toolbar'
options = Options(
Argument('name', required=False), # just here so sekizai thinks this is a RenderBlock
parser_class=SekizaiParser,
)
def render_tag(self, context, name, nodelist):
request = context.get('request')
if not request:
return nodelist.render(context)
toolbar = get_toolbar_from_request(request)
if toolbar and toolbar.show_toolbar:
toolbar.init_toolbar(request)
return toolbar.render_with_structure(context, nodelist)
return nodelist.render(context)
class CMSEditableObject(InclusionTag):
"""
Templatetag that links a content extracted from a generic django model
to the model admin changeform.
"""
template = 'cms/toolbar/content.html'
edit_template = 'cms/toolbar/plugin.html'
name = 'render_model'
options = Options(
Argument('instance'),
Argument('attribute'),
Argument('edit_fields', default=None, required=False),
Argument('language', default=None, required=False),
Argument('filters', default=None, required=False),
Argument('view_url', default=None, required=False),
Argument('view_method', default=None, required=False),
'as',
Argument('varname', required=False, resolve=False),
)
def __init__(self, parser, tokens):
self.parser = parser
super().__init__(parser, tokens)
def _is_editable(self, request):
return (request and hasattr(request, 'toolbar') and request.toolbar.edit_mode_active)
def get_template(self, context, **kwargs):
if self._is_editable(context.get('request', None)):
return self.edit_template
return self.template
def render_tag(self, context, **kwargs):
"""
Overridden from InclusionTag to push / pop context to avoid leaks
"""
context.push()
template = self.get_template(context, **kwargs)
data = self.get_context(context, **kwargs)
output = render_to_string(template, flatten_context(data)).strip()
context.pop()
if kwargs.get('varname'):
context[kwargs['varname']] = output
return ''
else:
return output
def _get_editable_context(self, context, instance, language, edit_fields,
view_method, view_url, querystring, editmode=True):
"""
Populate the contex with the requested attributes to trigger the changeform
"""
request = context['request']
if hasattr(request, 'toolbar'):
lang = request.toolbar.toolbar_language
else:
lang = get_language()
opts = instance._meta
# Django < 1.10 creates dynamic proxy model subclasses when fields are
# deferred using .only()/.exclude(). Make sure to use the underlying
# model options when it's the case.
if getattr(instance, '_deferred', False):
opts = opts.proxy_for_model._meta
with force_language(lang):
extra_context = {}
if edit_fields == 'changelist':
instance.get_plugin_name = "%s %s list" % (smart_str(_('Edit')), smart_str(opts.verbose_name))
extra_context['attribute_name'] = 'changelist'
elif editmode:
instance.get_plugin_name = "%s %s" % (smart_str(_('Edit')), smart_str(opts.verbose_name))
if not context.get('attribute_name', None):
# Make sure CMS.Plugin object will not clash in the frontend.
extra_context['attribute_name'] = '-'.join(edit_fields) \
if not isinstance('edit_fields', str) else edit_fields
else:
instance.get_plugin_name = "%s %s" % (smart_str(_('Add')), smart_str(opts.verbose_name))
extra_context['attribute_name'] = 'add'
extra_context['instance'] = instance
extra_context['generic'] = opts
# view_method has the precedence and we retrieve the corresponding
# attribute in the instance class.
# If view_method refers to a method it will be called passing the
# request; if it's an attribute, it's stored for later use
if view_method:
method = getattr(instance, view_method)
if callable(method):
url_base = method(context['request'])
else:
url_base = method
else:
# The default view_url is the default admin changeform for the
# current instance
if not editmode:
view_url = 'admin:%s_%s_add' % (
opts.app_label, opts.model_name)
url_base = reverse(view_url)
elif not edit_fields:
if not view_url:
view_url = 'admin:%s_%s_change' % (
opts.app_label, opts.model_name)
if isinstance(instance, Page):
url_base = reverse(view_url, args=(instance.pk, language))
else:
url_base = reverse(view_url, args=(instance.pk,))
else:
if not view_url:
view_url = 'admin:%s_%s_edit_field' % (
opts.app_label, opts.model_name)
if view_url.endswith('_changelist'):
url_base = reverse(view_url)
else:
url_base = reverse(view_url, args=(instance.pk, language))
querystring['edit_fields'] = ",".join(context['edit_fields'])
if editmode:
extra_context['edit_url'] = "%s?%s" % (url_base, urlencode(querystring))
else:
extra_context['edit_url'] = "%s" % url_base
extra_context['refresh_page'] = True
# We may be outside the CMS (e.g.: an application which is not attached via Apphook)
# in this case we may only go back to the home page
if getattr(context['request'], 'current_page', None):
extra_context['redirect_on_close'] = context['request'].current_page.get_absolute_url(language)
else:
extra_context['redirect_on_close'] = ''
return extra_context
def _get_content(self, context, instance, attribute, language, filters):
"""
Renders the requested attribute
"""
extra_context = copy(context)
attr_value = None
if hasattr(instance, 'lazy_translation_getter'):
attr_value = instance.lazy_translation_getter(attribute, '')
if not attr_value:
attr_value = getattr(instance, attribute, '')
extra_context['content'] = attr_value
# This allow the requested item to be a method, a property or an
# attribute
if callable(extra_context['content']):
if isinstance(instance, Page):
extra_context['content'] = extra_context['content'](language)
else:
extra_context['content'] = extra_context['content'](context['request'])
if filters:
expression = self.parser.compile_filter("content|%s" % (filters))
extra_context['content'] = expression.resolve(extra_context)
return extra_context
def _get_data_context(self, context, instance, attribute, edit_fields,
language, filters, view_url, view_method):
"""
Renders the requested attribute and attach changeform trigger to it
Uses `_get_empty_context`
"""
if not attribute:
return context
attribute = attribute.strip()
# ugly-ish
if isinstance(instance, Page):
if attribute == 'title':
attribute = 'get_title'
if not edit_fields:
edit_fields = 'title'
elif attribute == 'page_title':
attribute = 'get_page_title'
if not edit_fields:
edit_fields = 'page_title'
elif attribute == 'menu_title':
attribute = 'get_menu_title'
if not edit_fields:
edit_fields = 'menu_title'
elif attribute == 'titles':
attribute = 'get_title'
if not edit_fields:
edit_fields = 'title,page_title,menu_title'
view_url = 'admin:cms_page_edit_title_fields'
extra_context = copy(context)
extra_context['attribute_name'] = attribute
extra_context = self._get_empty_context(extra_context, instance,
edit_fields, language, view_url,
view_method)
extra_context.update(self._get_content(extra_context, instance, attribute,
language, filters))
# content is for non-edit template content.html
# rendered_content is for edit template plugin.html
# in this templatetag both hold the same content
extra_context['content'] = extra_context['content']
extra_context['rendered_content'] = extra_context['content']
return extra_context
def _get_empty_context(self, context, instance, edit_fields, language,
view_url, view_method, editmode=True):
"""
Inject in a copy of the context the data requested to trigger the edit.
`content` and `rendered_content` is emptied.
"""
if not language:
language = get_language_from_request(context['request'])
# This allow the requested item to be a method, a property or an
# attribute
if not instance and editmode:
return context
extra_context = copy(context)
# ugly-ish
if instance and isinstance(instance, Page):
if edit_fields == 'titles':
edit_fields = 'title,page_title,menu_title'
view_url = 'admin:cms_page_edit_title_fields'
if edit_fields == 'changelist':
view_url = 'admin:%s_%s_changelist' % (
instance._meta.app_label, instance._meta.model_name)
querystring = OrderedDict((('language', language),))
if edit_fields:
extra_context['edit_fields'] = edit_fields.strip().split(",")
# If the toolbar is not enabled the following part is just skipped: it
# would cause a perfomance hit for no reason
if self._is_editable(context.get('request', None)):
extra_context.update(self._get_editable_context(
extra_context, instance, language, edit_fields, view_method,
view_url, querystring, editmode))
# content is for non-edit template content.html
# rendered_content is for edit template plugin.html
# in this templatetag both hold the same content
extra_context['content'] = ''
extra_context['rendered_content'] = ''
return extra_context
def get_context(self, context, **kwargs):
"""
Uses _get_data_context to render the requested attributes
"""
kwargs.pop('varname')
extra_context = self._get_data_context(context, **kwargs)
extra_context['render_model'] = True
return extra_context
class CMSEditableObjectIcon(CMSEditableObject):
"""
Templatetag that links a content extracted from a generic django model
to the model admin changeform.
The output of this templatetag is just an icon to trigger the changeform.
"""
name = 'render_model_icon'
options = Options(
Argument('instance'),
Argument('edit_fields', default=None, required=False),
Argument('language', default=None, required=False),
Argument('view_url', default=None, required=False),
Argument('view_method', default=None, required=False),
'as',
Argument('varname', required=False, resolve=False),
)
def get_context(self, context, **kwargs):
"""
Uses _get_empty_context and adds the `render_model_icon` variable.
"""
kwargs.pop('varname')
extra_context = self._get_empty_context(context, **kwargs)
extra_context['render_model_icon'] = True
return extra_context
class CMSEditableObjectAdd(CMSEditableObject):
"""
Templatetag that links a content extracted from a generic django model
to the model admin changeform.
The output of this templatetag is just an icon to trigger the changeform.
"""
name = 'render_model_add'
options = Options(
Argument('instance'),
Argument('language', default=None, required=False),
Argument('view_url', default=None, required=False),
Argument('view_method', default=None, required=False),
'as',
Argument('varname', required=False, resolve=False),
)
def get_context(self, context, instance, language, view_url, view_method,
varname):
"""
Uses _get_empty_context and adds the `render_model_icon` variable.
"""
if isinstance(instance, Model) and not instance.pk:
instance.pk = 0
extra_context = self._get_empty_context(context, instance, None,
language, view_url,
view_method, editmode=False)
extra_context['render_model_add'] = True
return extra_context
class CMSEditableObjectAddBlock(CMSEditableObject):
"""
Templatetag that links arbitrary content to the addform for the specified
model (based on the provided model instance).
"""
name = 'render_model_add_block'
options = Options(
Argument('instance'),
Argument('language', default=None, required=False),
Argument('view_url', default=None, required=False),
Argument('view_method', default=None, required=False),
'as',
Argument('varname', required=False, resolve=False),
blocks=[('endrender_model_add_block', 'nodelist')],
)
def render_tag(self, context, **kwargs):
"""
Renders the block and then inject the resulting HTML in the template
context
"""
context.push()
template = self.get_template(context, **kwargs)
data = self.get_context(context, **kwargs)
data['content'] = kwargs['nodelist'].render(data)
data['rendered_content'] = data['content']
output = render_to_string(template, flatten_context(data))
context.pop()
if kwargs.get('varname'):
context[kwargs['varname']] = output
return ''
else:
return output
def get_context(self, context, **kwargs):
"""
Uses _get_empty_context and adds the `render_model_icon` variable.
"""
instance = kwargs.pop('instance')
if isinstance(instance, Model) and not instance.pk:
instance.pk = 0
kwargs.pop('varname')
kwargs.pop('nodelist')
extra_context = self._get_empty_context(context, instance, None,
editmode=False, **kwargs)
extra_context['render_model_add'] = True
return extra_context
class CMSEditableObjectBlock(CMSEditableObject):
"""
Templatetag that links a content extracted from a generic django model
to the model admin changeform.
The rendered content is to be specified in the enclosed block.
"""
name = 'render_model_block'
options = Options(
Argument('instance'),
Argument('edit_fields', default=None, required=False),
Argument('language', default=None, required=False),
Argument('view_url', default=None, required=False),
Argument('view_method', default=None, required=False),
'as',
Argument('varname', required=False, resolve=False),
blocks=[('endrender_model_block', 'nodelist')],
)
def render_tag(self, context, **kwargs):
"""
Renders the block and then inject the resulting HTML in the template
context
"""
context.push()
template = self.get_template(context, **kwargs)
data = self.get_context(context, **kwargs)
data['content'] = kwargs['nodelist'].render(data)
data['rendered_content'] = data['content']
output = render_to_string(template, flatten_context(data))
context.pop()
if kwargs.get('varname'):
context[kwargs['varname']] = output
return ''
else:
return output
def get_context(self, context, **kwargs):
"""
Uses _get_empty_context and adds the `instance` object to the local
context. Context here is to be intended as the context of the nodelist
in the block.
"""
kwargs.pop('varname')
kwargs.pop('nodelist')
extra_context = self._get_empty_context(context, **kwargs)
extra_context['instance'] = kwargs.get('instance')
extra_context['render_model_block'] = True
return extra_context
class StaticPlaceholderNode(Tag):
name = 'static_placeholder'
options = PlaceholderOptions(
Argument('code', required=True),
MultiValueArgument('extra_bits', required=False, resolve=False),
blocks=[
('endstatic_placeholder', 'nodelist'),
]
)
def render_tag(self, context, code, extra_bits, nodelist=None):
request = context.get('request')
if not code or not request:
# an empty string was passed in or the variable is not available in the context
if nodelist:
return nodelist.render(context)
return ''
toolbar = get_toolbar_from_request(request)
renderer = toolbar.get_content_renderer()
if isinstance(code, StaticPlaceholder):
static_placeholder = code
else:
kwargs = {
'code': code,
'defaults': {'creation_method': StaticPlaceholder.CREATION_BY_TEMPLATE}
}
if 'site' in extra_bits:
kwargs['site'] = get_current_site()
else:
kwargs['site_id__isnull'] = True
static_placeholder = StaticPlaceholder.objects.get_or_create(**kwargs)[0]
content = renderer.render_static_placeholder(
static_placeholder,
context=context,
nodelist=nodelist,
)
return content
def get_declaration(self, context):
flags = self.kwargs['extra_bits']
slot = self.kwargs['code'].resolve(context)
if isinstance(flags, ListValue):
site_bound = any(extra.var.value.strip() == 'site' for extra in flags)
return DeclaredStaticPlaceholder(slot=slot, site_bound=site_bound)
return DeclaredStaticPlaceholder(slot=slot, site_bound=False)
class RenderPlaceholder(AsTag):
"""
Render the content of the plugins contained in a placeholder.
The result can be assigned to a variable within the template's context by using the `as` keyword.
It behaves in the same way as the `PageAttribute` class, check its docstring for more details.
"""
name = 'render_placeholder'
options = Options(
Argument('placeholder'),
Argument('width', default=None, required=False),
'language',
Argument('language', default=None, required=False),
'as',
Argument('varname', required=False, resolve=False)
)
def _get_value(self, context, editable=True, **kwargs):
request = context['request']
toolbar = get_toolbar_from_request(request)
renderer = toolbar.get_content_renderer()
placeholder = kwargs.get('placeholder')
nocache = kwargs.get('nocache', False)
if not placeholder:
return ''
if isinstance(placeholder, str):
placeholder = PlaceholderModel.objects.get(slot=placeholder)
content = renderer.render_placeholder(
placeholder=placeholder,
context=context,
language=kwargs.get('language'),
editable=editable,
use_cache=not nocache,
width=kwargs.get('width'),
)
return content
def get_value_for_context(self, context, **kwargs):
return self._get_value(context, editable=False, **kwargs)
def get_value(self, context, **kwargs):
return self._get_value(context, **kwargs)
class RenderUncachedPlaceholder(RenderPlaceholder):
"""
Uncached version of RenderPlaceholder
This templatetag will neither get the result from cache, nor will update
the cache value for the given placeholder
"""
name = 'render_uncached_placeholder'
def _get_value(self, context, editable=True, **kwargs):
kwargs['nocache'] = True
return super()._get_value(context, editable, **kwargs)
class EmptyListValue(list, StringValue):
"""
A list of template variables for easy resolving
"""
def __init__(self, value=NULL):
list.__init__(self)
if value is not NULL:
self.append(value)
def resolve(self, context):
resolved = [item.resolve(context) for item in self]
return self.clean(resolved)
class MultiValueArgumentBeforeKeywordArgument(MultiValueArgument):
sequence_class = EmptyListValue
def parse(self, parser, token, tagname, kwargs):
if '=' in token:
if self.name not in kwargs:
kwargs[self.name] = self.sequence_class()
return False
return super().parse(
parser,
token,
tagname,
kwargs
)
class CMSAdminURL(AsTag):
name = 'cms_admin_url'
options = Options(
Argument('viewname'),
MultiValueArgumentBeforeKeywordArgument('args', required=False),
MultiKeywordArgument('kwargs', required=False),
'as',
Argument('varname', resolve=False, required=False)
)
def get_value(self, context, viewname, args, kwargs):
return admin_reverse(viewname, args=args, kwargs=kwargs)
register.tag('page_attribute', PageAttribute)
register.tag('render_plugin_block', RenderPluginBlock)
register.tag('placeholder', Placeholder)
register.tag('cms_toolbar', CMSToolbar)
register.tag('page_url', PageUrl)
register.tag('page_id_url', PageUrl)
register.tag('render_model_block', CMSEditableObjectBlock)
register.tag('render_model_add_block', CMSEditableObjectAddBlock)
register.tag('render_model_add', CMSEditableObjectAdd)
register.tag('render_model_icon', CMSEditableObjectIcon)
register.tag('render_model', CMSEditableObject)
register.simple_tag(
_show_placeholder_by_id,
takes_context=True,
name='show_placeholder',
)
register.simple_tag(
_show_placeholder_by_id,
takes_context=True,
name='show_placeholder_by_id',
)
register.simple_tag(
_show_uncached_placeholder_by_id,
takes_context=True,
name='show_uncached_placeholder',
)
register.simple_tag(
_show_uncached_placeholder_by_id,
takes_context=True,
name='show_uncached_placeholder_by_id',
)
register.tag('cms_admin_url', CMSAdminURL)
register.tag('render_placeholder', RenderPlaceholder)
register.tag('render_uncached_placeholder', RenderUncachedPlaceholder)
register.tag('static_placeholder', StaticPlaceholderNode)
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import numbers
import re
from heat.engine import resources
class InvalidSchemaError(Exception):
pass
class Schema(collections.Mapping):
"""
Schema base class for validating properties or parameters.
Schema objects are serialisable to dictionaries following a superset of
the HOT input Parameter schema using dict().
Serialises to JSON in the form::
{
'type': 'list',
'required': False
'constraints': [
{
'length': {'min': 1},
'description': 'List must not be empty'
}
],
'schema': {
'*': {
'type': 'string'
}
},
'description': 'An example list property.'
}
"""
KEYS = (
TYPE, DESCRIPTION, DEFAULT, SCHEMA, REQUIRED, CONSTRAINTS,
) = (
'type', 'description', 'default', 'schema', 'required', 'constraints',
)
# Keywords for data types; each Schema subclass can define its respective
# type name used in templates
TYPE_KEYS = (
INTEGER_TYPE, STRING_TYPE, NUMBER_TYPE, BOOLEAN_TYPE, MAP_TYPE,
LIST_TYPE,
) = (
'INTEGER', 'STRING', 'NUMBER', 'BOOLEAN', 'MAP',
'LIST',
)
# Default type names for data types used in templates; can be overridden by
# subclasses
TYPES = (
INTEGER, STRING, NUMBER, BOOLEAN, MAP, LIST,
) = (
'Integer', 'String', 'Number', 'Boolean', 'Map', 'List',
)
def __init__(self, data_type, description=None,
default=None, schema=None,
required=False, constraints=[]):
self._len = None
self.type = data_type
if self.type not in self.TYPES:
raise InvalidSchemaError(_('Invalid type (%s)') % self.type)
self.description = description
self.required = required
if isinstance(schema, type(self)):
if self.type != self.LIST:
msg = _('Single schema valid only for '
'%(ltype)s, not %(utype)s') % dict(ltype=self.LIST,
utype=self.type)
raise InvalidSchemaError(msg)
self.schema = AnyIndexDict(schema)
else:
self.schema = schema
if self.schema is not None and self.type not in (self.LIST,
self.MAP):
msg = _('Schema valid only for %(ltype)s or '
'%(mtype)s, not %(utype)s') % dict(ltype=self.LIST,
mtype=self.MAP,
utype=self.type)
raise InvalidSchemaError(msg)
self.constraints = constraints
for c in constraints:
if not self._is_valid_constraint(c):
err_msg = _('%(name)s constraint '
'invalid for %(utype)s') % dict(
name=type(c).__name__,
utype=self.type)
raise InvalidSchemaError(err_msg)
self.default = default
self._validate_default()
def _validate_default(self):
if self.default is not None:
try:
self.validate_constraints(self.default)
except (ValueError, TypeError) as exc:
raise InvalidSchemaError(_('Invalid default '
'%(default)s (%(exc)s)') %
dict(default=self.default, exc=exc))
def set_default(self, default=None):
"""Set the default value for this Schema object."""
self.default = default
def _is_valid_constraint(self, constraint):
valid_types = getattr(constraint, 'valid_types', [])
return any(self.type == getattr(self, t, None) for t in valid_types)
@staticmethod
def str_to_num(value):
"""Convert a string representation of a number into a numeric type."""
if isinstance(value, numbers.Number):
return value
try:
return int(value)
except ValueError:
return float(value)
def validate_constraints(self, value, context=None):
for constraint in self.constraints:
constraint.validate(value, context)
def __getitem__(self, key):
if key == self.TYPE:
return self.type.lower()
elif key == self.DESCRIPTION:
if self.description is not None:
return self.description
elif key == self.DEFAULT:
if self.default is not None:
return self.default
elif key == self.SCHEMA:
if self.schema is not None:
return dict((n, dict(s)) for n, s in self.schema.items())
elif key == self.REQUIRED:
return self.required
elif key == self.CONSTRAINTS:
if self.constraints:
return [dict(c) for c in self.constraints]
raise KeyError(key)
def __iter__(self):
for k in self.KEYS:
try:
self[k]
except KeyError:
pass
else:
yield k
def __len__(self):
if self._len is None:
self._len = len(list(iter(self)))
return self._len
class AnyIndexDict(collections.Mapping):
"""
A Mapping that returns the same value for any integer index.
Used for storing the schema for a list. When converted to a dictionary,
it contains a single item with the key '*'.
"""
ANYTHING = '*'
def __init__(self, value):
self.value = value
def __getitem__(self, key):
if key != self.ANYTHING and not isinstance(key, (int, long)):
raise KeyError(_('Invalid key %s') % str(key))
return self.value
def __iter__(self):
yield self.ANYTHING
def __len__(self):
return 1
class Constraint(collections.Mapping):
"""
Parent class for constraints on allowable values for a Property.
Constraints are serialisable to dictionaries following the HOT input
Parameter constraints schema using dict().
"""
(DESCRIPTION,) = ('description',)
def __init__(self, description=None):
self.description = description
def __str__(self):
def desc():
if self.description:
yield self.description
yield self._str()
return '\n'.join(desc())
def validate(self, value, context=None):
if not self._is_valid(value, context):
if self.description:
err_msg = self.description
else:
err_msg = self._err_msg(value)
raise ValueError(err_msg)
@classmethod
def _name(cls):
return '_'.join(w.lower() for w in re.findall('[A-Z]?[a-z]+',
cls.__name__))
def __getitem__(self, key):
if key == self.DESCRIPTION:
if self.description is None:
raise KeyError(key)
return self.description
if key == self._name():
return self._constraint()
raise KeyError(key)
def __iter__(self):
if self.description is not None:
yield self.DESCRIPTION
yield self._name()
def __len__(self):
return 2 if self.description is not None else 1
class Range(Constraint):
"""
Constrain values within a range.
Serialises to JSON as::
{
'range': {'min': <min>, 'max': <max>},
'description': <description>
}
"""
(MIN, MAX) = ('min', 'max')
valid_types = (Schema.INTEGER_TYPE, Schema.NUMBER_TYPE,)
def __init__(self, min=None, max=None, description=None):
super(Range, self).__init__(description)
self.min = min
self.max = max
for param in (min, max):
if not isinstance(param, (float, int, long, type(None))):
raise InvalidSchemaError(_('min/max must be numeric'))
if min is max is None:
raise InvalidSchemaError(_('range must have min and/or max'))
def _str(self):
if self.max is None:
fmt = _('The value must be at least %(min)s.')
elif self.min is None:
fmt = _('The value must be no greater than %(max)s.')
else:
fmt = _('The value must be in the range %(min)s to %(max)s.')
return fmt % self._constraint()
def _err_msg(self, value):
return '%s is out of range (min: %s, max: %s)' % (value,
self.min,
self.max)
def _is_valid(self, value, context):
value = Schema.str_to_num(value)
if self.min is not None:
if value < self.min:
return False
if self.max is not None:
if value > self.max:
return False
return True
def _constraint(self):
def constraints():
if self.min is not None:
yield self.MIN, self.min
if self.max is not None:
yield self.MAX, self.max
return dict(constraints())
class Length(Range):
"""
Constrain the length of values within a range.
Serialises to JSON as::
{
'length': {'min': <min>, 'max': <max>},
'description': <description>
}
"""
valid_types = (Schema.STRING_TYPE, Schema.LIST_TYPE, Schema.MAP_TYPE,)
def __init__(self, min=None, max=None, description=None):
super(Length, self).__init__(min, max, description)
for param in (min, max):
if not isinstance(param, (int, long, type(None))):
msg = _('min/max length must be integral')
raise InvalidSchemaError(msg)
def _str(self):
if self.max is None:
fmt = _('The length must be at least %(min)s.')
elif self.min is None:
fmt = _('The length must be no greater than %(max)s.')
else:
fmt = _('The length must be in the range %(min)s to %(max)s.')
return fmt % self._constraint()
def _err_msg(self, value):
return 'length (%d) is out of range (min: %s, max: %s)' % (len(value),
self.min,
self.max)
def _is_valid(self, value, context):
return super(Length, self)._is_valid(len(value), context)
class AllowedValues(Constraint):
"""
Constrain values to a predefined set.
Serialises to JSON as::
{
'allowed_values': [<allowed1>, <allowed2>, ...],
'description': <description>
}
"""
valid_types = (Schema.STRING_TYPE, Schema.INTEGER_TYPE, Schema.NUMBER_TYPE,
Schema.BOOLEAN_TYPE, Schema.LIST_TYPE,)
def __init__(self, allowed, description=None):
super(AllowedValues, self).__init__(description)
if (not isinstance(allowed, collections.Sequence) or
isinstance(allowed, basestring)):
raise InvalidSchemaError(_('AllowedValues must be a list'))
self.allowed = tuple(allowed)
def _str(self):
allowed = ', '.join(str(a) for a in self.allowed)
return _('Allowed values: %s') % allowed
def _err_msg(self, value):
allowed = '[%s]' % ', '.join(str(a) for a in self.allowed)
return '"%s" is not an allowed value %s' % (value, allowed)
def _is_valid(self, value, context):
# For list values, check if all elements of the list are contained
# in allowed list.
if isinstance(value, list):
return all(v in self.allowed for v in value)
return value in self.allowed
def _constraint(self):
return list(self.allowed)
class AllowedPattern(Constraint):
"""
Constrain values to a predefined regular expression pattern.
Serialises to JSON as::
{
'allowed_pattern': <pattern>,
'description': <description>
}
"""
valid_types = (Schema.STRING_TYPE,)
def __init__(self, pattern, description=None):
super(AllowedPattern, self).__init__(description)
self.pattern = pattern
self.match = re.compile(pattern).match
def _str(self):
return _('Value must match pattern: %s') % self.pattern
def _err_msg(self, value):
return '"%s" does not match pattern "%s"' % (value, self.pattern)
def _is_valid(self, value, context):
match = self.match(value)
return match is not None and match.end() == len(value)
def _constraint(self):
return self.pattern
class CustomConstraint(Constraint):
"""
A constraint delegating validation to an external class.
"""
valid_types = (Schema.STRING_TYPE, Schema.INTEGER_TYPE, Schema.NUMBER_TYPE,
Schema.BOOLEAN_TYPE, Schema.LIST_TYPE)
def __init__(self, name, description=None, environment=None):
super(CustomConstraint, self).__init__(description)
self.name = name
self._environment = environment
self._custom_constraint = None
def _constraint(self):
return self.name
@property
def custom_constraint(self):
if self._custom_constraint is None:
if self._environment is None:
self._environment = resources.global_env()
constraint_class = self._environment.get_constraint(self.name)
if constraint_class:
self._custom_constraint = constraint_class()
return self._custom_constraint
def _str(self):
message = getattr(self.custom_constraint, "message", None)
if not message:
message = _('Value must be of type %s') % self.name
return message
def _err_msg(self, value):
constraint = self.custom_constraint
if constraint is None:
return _('"%(value)s" does not validate %(name)s '
'(constraint not found)') % {
"value": value, "name": self.name}
error = getattr(constraint, "error", None)
if error:
return error(value)
return _('"%(value)s" does not validate %(name)s') % {
"value": value, "name": self.name}
def _is_valid(self, value, context):
constraint = self.custom_constraint
if not constraint:
return False
return constraint.validate(value, context)
| |
"""Tests for the kitchen.dashboard app"""
import os
import simplejson as json
from django.test import TestCase
from mock import patch
from kitchen.backends import lchef as chef, plugins
from kitchen.dashboard import views, graphs
from kitchen.dashboard.templatetags import filters
from kitchen.settings import STATIC_ROOT, REPO, ENABLE_PLUGINS
# We need to always regenerate the node data bag in case there where changes
chef.build_node_data_bag()
TOTAL_NODES = 10
class TestViews(TestCase):
filepath = os.path.join(STATIC_ROOT, 'img', 'node_map.svg')
def setUp(self):
if os.path.exists(self.filepath):
os.remove(self.filepath)
@patch('kitchen.backends.lchef.KITCHEN_DIR', '/badrepopath/')
def test_list_no_repo(self):
"""Should display a RepoError message when repo dir doesn't exist"""
resp = self.client.get("/")
self.assertEqual(resp.status_code, 200)
self.assertTrue("<title>Kitchen</title>" in resp.content)
expected = "Repo dir doesn't exist at '/badrepopath/'"
self.assertTrue(expected in resp.content)
def test_list(self):
"""Should display the default node list when no params are given"""
resp = self.client.get("/")
self.assertEqual(resp.status_code, 200)
self.assertTrue("<title>Kitchen</title>" in resp.content)
self.assertTrue("Environment" in resp.content)
self.assertTrue("Roles" in resp.content)
# 3 nodes in the production environment, which is default
nodes = ["testnode" + str(i) for i in range(1, 4)]
for node in nodes:
self.assertTrue(node in resp.content, node)
def test_list_env(self):
"""Should display proper nodes when an environment is given"""
resp = self.client.get("/?env=staging&virt=")
self.assertEqual(resp.status_code, 200)
self.assertTrue("<td>testnode4</td>" in resp.content)
self.assertFalse("<td>testnode5</td>" in resp.content)
self.assertFalse("<td>testnode1</td>" in resp.content)
self.assertFalse("<td>testnode2</td>" in resp.content)
self.assertFalse("<td>testnode6</td>" in resp.content)
# Should not display any nodes
resp = self.client.get("/?env=testing")
self.assertEqual(resp.status_code, 200)
nodes = ["<td>testnode{0}</td>".format(str(i) for i in range(1, 7))]
for node in nodes:
self.assertTrue(node not in resp.content, node)
def test_list_roles(self):
"""Should display proper nodes when a role is given"""
resp = self.client.get("/?env=&roles=dbserver&virt=")
self.assertEqual(resp.status_code, 200)
self.assertTrue("<td>testnode3</td>" in resp.content)
self.assertTrue("<td>testnode5</td>" in resp.content)
self.assertTrue("<td>testnode1</td>" not in resp.content)
self.assertTrue("<td>testnode2</td>" not in resp.content)
self.assertTrue("<td>testnode4</td>" not in resp.content)
self.assertTrue("<td>testnode6</td>" not in resp.content)
def test_list_tags(self):
"""Should display tags when selected nodes have tags"""
resp = self.client.get("/")
self.assertEqual(resp.status_code, 200)
self.assertTrue('btn-custom disabled">ATest</a>' in resp.content)
def test_list_tags_class(self):
"""Should display tags with css class when selected nodes have tags"""
resp = self.client.get("/")
self.assertEqual(resp.status_code, 200)
self.assertTrue('tn-custom btn-warning disabled">WIP<' in resp.content)
def test_list_links(self):
"""Should display links when selected nodes have links"""
resp = self.client.get("/")
self.assertEqual(resp.status_code, 200)
self.assertTrue('href="http://testnode1:22002"' in resp.content)
self.assertTrue('src="http://haproxy.1wt.eu/img' in resp.content)
self.assertTrue('href="http://testnode1/api' in resp.content)
@patch('kitchen.dashboard.views.SHOW_LINKS', False)
def test_list_links_disabled(self):
"""Should not display links when SHOW_LINKS is False"""
resp = self.client.get("/")
self.assertEqual(resp.status_code, 200)
self.assertTrue('href="http://testnode1:22002"' not in resp.content)
self.assertTrue('src="http://haproxy.1wt.eu/img' not in resp.content)
def test_virt(self):
"""Should display nodes when repo is correct"""
resp = self.client.get("/virt/")
self.assertEqual(resp.status_code, 200)
self.assertTrue("<title>Kitchen</title>" in resp.content)
def test_virt_tags(self):
"""Should display tags with css class when selected nodes have tags"""
resp = self.client.get("/virt/")
self.assertEqual(resp.status_code, 200)
self.assertTrue(
'btn-small btn-custom btn-warning disabled">WIP<' in resp.content)
self.assertTrue(
'btn-custom btn-danger disabled">dummy<' in resp.content)
def test_virt_links(self):
"""Should display links when selected nodes have links"""
resp = self.client.get("/virt/")
self.assertEqual(resp.status_code, 200)
self.assertTrue('href="http://testnode1:22002"' in resp.content)
self.assertTrue('src="http://haproxy.1wt.eu/img' in resp.content)
self.assertTrue('href="http://testnode1/api' in resp.content)
@patch('kitchen.dashboard.views.SHOW_LINKS', False)
def test_virt_links_disabled(self):
"""Should not display links when SHOW_LINKS is False"""
resp = self.client.get("/virt/")
self.assertEqual(resp.status_code, 200)
self.assertTrue('href="http://testnode1:22002"' not in resp.content)
self.assertTrue('src="http://haproxy.1wt.eu/img' not in resp.content)
def test_graph_no_env(self):
"""Should not generate a graph when no environment is selected"""
resp = self.client.get("/graph/?env=")
self.assertEqual(resp.status_code, 200)
self.assertTrue("<title>Kitchen</title>" in resp.content)
self.assertTrue("Environment" in resp.content)
self.assertTrue("Please select an environment" in resp.content)
self.assertFalse('<img src="/static/img/node_map.svg"' in resp.content)
self.assertTrue("webserver" in resp.content)
self.assertTrue("staging" in resp.content)
self.assertFalse(os.path.exists(self.filepath))
self.assertFalse("Hidden relationships: " in resp.content)
@patch('kitchen.backends.lchef.KITCHEN_DIR', '/badrepopath/')
def test_graph_no_nodes(self):
"""Should display an error message when there is a repo error"""
resp = self.client.get("/graph/")
self.assertEqual(resp.status_code, 200)
expected = "Repo dir doesn't exist at '/badrepopath/'"
self.assertTrue(expected in resp.content)
def test_graph_graphviz_error(self):
"""Should display an error message when there is a GraphViz error"""
error_msg = "GraphVizs executables not found"
def mock_factory():
def mock_method(a, b, c):
return False, error_msg
return mock_method
with patch.object(graphs, 'generate_node_map',
new_callable=mock_factory):
resp = self.client.get("/graph/")
self.assertEqual(resp.status_code, 200)
self.assertTrue(error_msg in resp.content,
"Did not find expected string '{0}'".format(error_msg))
self.assertFalse(os.path.exists(self.filepath))
def test_graph_extra_roles_display(self):
"""Should display an extra roles message when graph detects new relations"""
resp = self.client.get("/graph/?env=production&roles=dbserver")
self.assertEqual(resp.status_code, 200)
self.assertTrue("Hidden relationships: " in resp.content)
self.assertTrue('<a href="#" data-type="roles" data-name="webserver"'
' class="sidebar_link" id="related_role">webserver'
'</a>,' in resp.content)
self.assertTrue('<a href="#" data-type="roles" data-name="worker" '
'class="sidebar_link" id="related_role">worker'
'</a>' in resp.content)
def test_graph_extra_roles_no_display_when_no_roles(self):
"""Should not display an extra roles message when there are no given roles"""
resp = self.client.get("/graph/?env=production&roles=")
self.assertEqual(resp.status_code, 200)
self.assertFalse("Hidden relationships: " in resp.content)
class TestPluginViews(TestCase):
def _patch_enable_plugins(self, enable_plugins):
views.PLUGINS = plugins.import_plugins(enable_plugins)
chef.plugins = plugins.import_plugins(enable_plugins)
def tearDown(self):
self._patch_enable_plugins(ENABLE_PLUGINS)
def test_plugin_interface_no_plugin(self):
"""Should return a 404 when a requested plugin does not exist"""
self._patch_enable_plugins([])
with self.settings(DEBUG=True):
resp = self.client.get("/plugins/name/method")
self.assertEqual(resp.status_code, 404)
self.assertTrue("Requested plugin 'name' not found" in str(resp))
def test_plugin_interface_missing_method(self):
"""Should return 404 when requested method does not exist"""
self._patch_enable_plugins(['monitoring'])
with self.settings(DEBUG=True):
resp = self.client.get("/plugins/monitoring/boo")
self.assertEqual(resp.status_code, 404)
self.assertTrue("has no method 'boo'" in str(resp))
def test_plugin_interface_wrong_arguments(self):
"""Should return 404 when requested args are wrong"""
self._patch_enable_plugins(['monitoring'])
with self.settings(DEBUG=True):
resp = self.client.get("/plugins/monitoring/links")
self.assertEqual(resp.status_code, 404)
self.assertTrue("returned unexpected result: None" in str(resp))
def test_plugin_interface_no_view(self):
"""Should return 404 when requested method is not a view"""
self._patch_enable_plugins(['monitoring'])
with self.settings(DEBUG=True):
resp = self.client.get("/plugins/monitoring/inject")
self.assertEqual(resp.status_code, 404)
self.assertTrue("is not defined as a view" in str(resp))
def test_plugin_interface(self):
"""Should evaluate view when a requested plugin does exist"""
self._patch_enable_plugins(['monitoring'])
resp = self.client.get("/plugins/monitoring/links?fqdn=testnode1")
self.assertEqual(resp.status_code, 302)
self.assertEqual(
resp['location'], 'http://monitoring.mydomain.com/testnode1')
class TestGraph(TestCase):
nodes = chef.get_nodes_extended()
roles = chef.get_roles()
filepath = os.path.join(STATIC_ROOT, 'img', 'node_map.svg')
def setUp(self):
if os.path.exists(self.filepath):
os.remove(self.filepath)
def test_build_links_empty(self):
"""Should not generate links when nodes do not have any defined"""
data = chef.filter_nodes(self.nodes, 'staging', virt_roles='guest')
links = graphs._build_links(data)
expected = {'testnode4': {'role_prefix': 'webserver'}}
self.assertEqual(links, expected)
def test_build_links_client_nodes(self):
"""Should generate links when nodes have client_nodes set"""
data = chef.filter_nodes(self.nodes, 'production',
['loadbalancer', 'webserver', 'dbserver'], 'guest')
links = graphs._build_links(data)
self.maxDiff = None
expected = {
'testnode1': {'role_prefix': 'loadbalancer'},
'testnode2': {
'role_prefix': 'webserver',
'client_nodes': [('testnode1', 'apache2')]
},
'testnode3.mydomain.com': {
'role_prefix': 'dbserver',
'client_nodes': [
('testnode2', 'mysql'), ('testnode7', 'mysql')
]
},
'testnode7': {
'role_prefix': 'webserver',
'client_nodes': [('testnode1', 'apache2')]
}
}
self.assertEqual(links, expected)
def test_build_links_needs_nodes(self):
"""Should generate links when nodes have needs_nodes set"""
data = chef.filter_nodes(self.nodes, 'production', ['dbserver', 'worker'],
'guest')
links = graphs._build_links(data)
expected = {
'testnode3.mydomain.com': {'role_prefix': 'dbserver'},
'testnode8': {
'role_prefix': 'worker',
'needs_nodes': [
('testnode3.mydomain.com', 'mysql')
]
}
}
self.assertEqual(links, expected)
def test_build_links_all(self):
"""Should generate all links when nodes define connections"""
data = chef.filter_nodes(self.nodes, 'production', virt_roles='guest')
links = graphs._build_links(data)
expected = {
'testnode1': {'role_prefix': 'loadbalancer'},
'testnode2': {
'role_prefix': 'webserver',
'client_nodes': [('testnode1', 'apache2')]
},
'testnode3.mydomain.com': {
'role_prefix': 'dbserver',
'client_nodes': [
('testnode2', 'mysql'), ('testnode7', 'mysql')
]
},
'testnode7': {
'role_prefix': 'webserver',
'client_nodes': [('testnode1', 'apache2')]
},
'testnode8': {
'role_prefix': 'worker',
'needs_nodes': [
('testnode3.mydomain.com', 'mysql')
]
}
}
self.assertEqual(links, expected)
def test_generate_empty_graph(self):
"""Should generate an empty graph when no nodes are given"""
data = chef.filter_nodes(self.nodes, 'badenv')
graphs.generate_node_map(data, self.roles)
self.assertTrue(os.path.exists(self.filepath))
size = os.path.getsize(self.filepath)
max_size = 650
self.assertTrue(size < max_size,
"Size greater than {0}: {1}".format(max_size, size))
def test_generate_small_graph(self):
"""Should generate a graph when some nodes are given"""
data = chef.filter_nodes(self.nodes, 'staging', virt_roles='guest')
graphs.generate_node_map(data, self.roles)
self.assertTrue(os.path.exists(self.filepath))
size = os.path.getsize(self.filepath)
#min_size = 3000 # png
#max_size = 4000 # png
min_size = 1000 # svg
max_size = 1500 # svg
self.assertTrue(size > min_size and size < max_size,
"Size not between {0} and {1}: {2}".format(
min_size, max_size, size))
def test_generate_connected_graph(self):
"""Should generate a connected graph when connected nodes are given"""
data = chef.filter_nodes(self.nodes, 'production', virt_roles='guest')
graphs.generate_node_map(data, self.roles)
self.assertTrue(os.path.exists(self.filepath))
size = os.path.getsize(self.filepath)
# Graph size with connections
#min_size = 20000 # png
#max_size = 23000 # png
min_size = 5000 # svg
max_size = 7000 # svg
self.assertTrue(size > min_size and size < max_size,
"Size not between {0} and {1}: {2}".format(
min_size, max_size, size))
def test_graph_timeout(self):
"""Should display an error message when GraphViz excesses the timeout
"""
error_msg = "Unable to draw graph, timeout exceeded"
data = chef.filter_nodes(self.nodes, 'production')
with patch('kitchen.dashboard.graphs.GraphThread.isAlive',
return_value=True):
with patch('kitchen.dashboard.graphs.GraphThread.kill',
return_value=True):
success, msg = graphs.generate_node_map(data, self.roles)
self.assertFalse(success)
self.assertTrue(error_msg in msg)
def test_get_role_relations(self):
"""Should return role dependencies when roles with relationships are given"""
prod_nodes = chef.filter_nodes(self.nodes, 'production')
extra_roles = graphs.get_role_relations('production', ['dbserver'],
prod_nodes)
self.assertEqual(extra_roles, ['webserver', 'worker'])
extra_roles = graphs.get_role_relations('production', ['loadbalancer'],
prod_nodes)
self.assertEqual(extra_roles, ['webserver'])
extra_roles = graphs.get_role_relations('production', ['worker'],
prod_nodes)
self.assertEqual(extra_roles, ['dbserver'])
extra_roles = graphs.get_role_relations('production', ['webserver'],
prod_nodes)
self.assertEqual(extra_roles, ['dbserver', 'loadbalancer'])
def test_get_role_relations_empty_when_roles(self):
"""Should obtain no roles when the given roles have no extra relationships"""
stag_nodes = chef.filter_nodes(self.nodes, 'staging')
extra_roles = graphs.get_role_relations('staging', ['webserver'],
stag_nodes)
self.assertEqual(extra_roles, [])
def test_get_role_relations_empty_when_no_roles(self):
"""Should obtain no roles when a role filter list is not given"""
prod_nodes = chef.filter_nodes(self.nodes, 'staging')
extra_roles = graphs.get_role_relations('production', [], prod_nodes)
self.assertEqual(extra_roles, [])
class TestAPI(TestCase):
def test_get_roles_not_allowed(self):
"""Should return NOT ALLOWED when HTTP method is not GET"""
resp = self.client.post("/api/roles")
self.assertEqual(resp.status_code, 405)
def test_get_roles(self):
"""Should return all available roles in JSON format"""
resp = self.client.get("/api/roles")
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(len(data), 4)
existing_roles = ['loadbalancer', 'webserver', 'dbserver', 'worker']
for role in data:
self.assertTrue(role['name'] in existing_roles,
role['name'] + " is not an existing role name")
self.assertEqual(data[0]['name'], 'dbserver')
self.assertEqual(data[0]['run_list'], ['recipe[mysql::server]'])
def test_get_nodes_not_allowed(self):
"""Should return NOT ALLOWED when HTTP method is not GET"""
resp = self.client.post("/api/nodes")
self.assertEqual(resp.status_code, 405)
def test_get_nodes(self):
"""Should return all available nodes when no parameters are given"""
resp = self.client.get("/api/nodes")
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(len(data), TOTAL_NODES)
self.assertTrue('role' not in data[0]) # not extended
def test_get_nodes_env_filter(self):
"""Should return filtered nodes when filter parameters are given"""
resp = self.client.get("/api/nodes/?env=staging")
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(len(data), 1)
expected_node = {
'chef_environment': 'staging', 'ipaddress': '4.4.4.4',
'virtualization': {'role': 'guest'},
'run_list': ['role[webserver]'], 'name': 'testnode4'
}
self.assertEqual(data[0], expected_node)
def test_get_nodes_extended(self):
"""Should return available nodes with extended info when extended=true
"""
resp = self.client.get("/api/nodes/?extended=true")
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(len(data), TOTAL_NODES)
self.assertTrue('role' in data[0])
def test_get_nodes_extended_env_filter(self):
"""Should return filtered nodes when filter parameters are given"""
resp = self.client.get("/api/nodes/?env=staging&extended=true")
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content)
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['chef_environment'], 'staging')
self.assertEqual(data[0]['role'], ['webserver'])
def test_get_node(self):
"""Should return a node hash when node name is found"""
resp = self.client.get("/api/nodes/testnode6")
self.assertEqual(resp.status_code, 200)
expected_response = {
'name': 'testnode6', 'run_list': ['role[webserver]']
}
self.assertEqual(json.loads(resp.content), expected_response)
def test_get_node_not_found(self):
"""Should return NOT FOUND when node name does not exist"""
resp = self.client.get("/api/nodes/node_does_not_exist")
self.assertEqual(resp.status_code, 404)
class TestTemplateTags(TestCase):
run_list = [
"role[dbserver]", "recipe[haproxy]", "role[webserver]",
"role[worker]", "recipe[apache2]", "role[loadbalancer]",
"recipe[mysql::server]"
]
def test_role_filter_with_valid_runlist(self):
"""Should return a role list when a valid run list is given"""
role_list = filters.get_role_list(self.run_list)
expected_roles = ['dbserver', 'webserver', 'worker', 'loadbalancer']
self.assertEqual(len(role_list), len(expected_roles))
for role in role_list:
self.assertTrue(role in expected_roles)
expected_roles.remove(role)
self.assertEqual(len(expected_roles), 0)
def test_role_filter_with_runlist_and_exclude_node_prefix(self):
"""Should exclude roles with prefix when EXCLUDE_ROLE_PREFIX is set"""
role_to_filter = REPO['EXCLUDE_ROLE_PREFIX'] + "_filterthisrole"
run_list_with_excluded = self.run_list + [role_to_filter]
role_list = filters.get_role_list(run_list_with_excluded)
expected_roles = ['dbserver', 'webserver', 'worker', 'loadbalancer']
self.assertEqual(len(role_list), len(expected_roles))
for role in role_list:
self.assertTrue(role in expected_roles)
expected_roles.remove(role)
self.assertEqual(len(expected_roles), 0)
def test_role_filter_with_wrong_runlist(self):
"""Should return an empty role list when an invalid run list is given
"""
role_list = filters.get_role_list(None)
self.assertEqual(role_list, [])
def test_recipe_filter_with_valid_runlist(self):
"""Should return a recipe list when a valid run list is given"""
recipe_list = filters.get_recipe_list(self.run_list)
expected_recipes = ['haproxy', 'apache2', 'mysql::server']
self.assertEqual(len(recipe_list), len(expected_recipes))
for recipe in recipe_list:
self.assertTrue(recipe in expected_recipes)
expected_recipes.remove(recipe)
self.assertEqual(len(expected_recipes), 0)
def test_recipe_filter_with_wrong_runlist(self):
"""Should return an empty recipe list when an invalid run list is given
"""
recipe_list = filters.get_recipe_list(None)
self.assertEqual(recipe_list, [])
def test_memory_GB_filter_with_valid_string(self):
"""Should return memory in GB when given value is in kB"""
memory = filters.get_memory_in_GB('7124000kB')
self.assertEqual(memory, '7 GB')
memory = filters.get_memory_in_GB('1024000kB')
self.assertEqual(memory, '1 GB')
def test_memory_GB_filter_with_invalid_string(self):
"""Should return an empty string when an invalid value is given"""
invalid_strings = ['java', '1024000KFC', 'itsover9000', '12', '1']
for string in invalid_strings:
memory = filters.get_memory_in_GB(string)
self.assertEqual(memory, '')
def test_memory_GB_filter_with_empty_string(self):
"""Should return an empty string when None is given"""
self.assertEqual(filters.get_memory_in_GB(None), '')
def test_get_tag_class(self):
"""Should return a css class when tag has a defined class"""
self.assertEqual(filters.get_tag_class("WIP"), "btn-warning")
self.assertEqual(filters.get_tag_class("dummy"), "btn-danger")
specific_tags = ["Node", "Node1", "NodeSpecial3", "Node*", "Node_"]
for specific_tag in specific_tags:
self.assertEqual(filters.get_tag_class(specific_tag), "btn-info")
def test_get_tag_class_no_class(self):
"""Should return an empty string when tag has no defined class"""
undefined_tags = ["foo", "Nod", "DUMMY", "Dummy", "wip", "WiP",
"node", "NoDe", "", "12", "", "_-_"]
for undefined_tag in undefined_tags:
self.assertEqual(filters.get_tag_class(undefined_tag), "")
def test_get_link_no_url(self):
"""Should return an empty string when link has no url
"""
link = {"title": "foo"}
self.assertEqual(filters.get_link(link), "")
def test_get_link_no_img_no_title(self):
"""Should return an empty string when link has no img and no title key
"""
link = {"url": "https://github.com/edelight/kitchen"}
self.assertEqual(filters.get_link(link), "")
def test_get_link_no_img(self):
"""Should return a text link when link has an empty img field"""
link = {
"url": "https://github.com/edelight/kitchen",
"title": "api", "img": ""
}
expected_link_html = ('<a href="{0}" target="_blank" title="{1}"'
' class="btn btn-small btn-custom">{1}'
'</a>'.format(link['url'], link['title']))
self.assertEqual(filters.get_link(link), expected_link_html)
def test_get_link(self):
"""Should return a text link when link has an empty img field"""
link = {
"url": "https://github.com/edelight/kitchen",
"title": "api", "img": "http://foo/bar.png",
}
expected_link_html = ('<a href="{url}" target="_blank" title="{title}"'
' class="btn-custom"><img width="25"'
' height="25" src="{img}"></a>'.format(**link))
self.assertEqual(filters.get_link(link), expected_link_html)
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import gflags
import httplib2
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run
import os, datetime
##########################Vars
client_id='924881045523-kc7leju7role0too3k4itlo864eprl1u.apps.googleusercontent.com'
client_secret='rqZxYuy0Cht37rJ0GSZ05YoY'
user_agent='Python2.7'
BROWSERdeveloperKey='AIzaSyBHozNPRDnVkdPo_JlP_4TLbNrJIsd3bQ4'
SERVERdeveloperKey='AIzaSyDe68JsIJK5O5Cqd-tAVGqaSeHqcFCNPh8'
batchRunScripts = os.path.join('/usr/local', 'batchRunScripts')
os.chdir(batchRunScripts)
#here = os.path.dirname(os.path.realpath(os.path.curdir))
storage_file = os.path.join(batchRunScripts, 'calendar.dat')
############################
FLAGS = gflags.FLAGS
# The client_id and client_secret are copied from the API Access tab on
# the Google APIs Console
FLOW = OAuth2WebServerFlow(
client_id=client_id,
client_secret=client_secret,
scope='https://www.googleapis.com/auth/calendar',
user_agent=user_agent)
# To disable the local server feature, uncomment the following line:
FLAGS.auth_local_webserver = False
# If the Credentials don't exist or are invalid, run through the native client
# flow. The Storage object will ensure that if successful the good
# Credentials will get written back to a file.
storage = Storage(storage_file)
credentials = storage.get()
if credentials is None or credentials.invalid == True:
credentials = run(FLOW, storage)
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with our good Credentials.
http = httplib2.Http()
http = credentials.authorize(http)
prodcompletebysourcecal = 'pbr49v778pi6n9cqark5rd1dns@group.calendar.google.com'
marketplvendorscmpcal = 'qfr9hv1frv22ovnk5hoptsqj38@group.calendar.google.com'
calendarId = prodcompletebysourcecal
#calendarId = 'https://www.google.com/calendar/feeds/k8oohvl27sq3u0odgafpbmdl6s@group.calendar.google.com/'
# calendarId = 'https://www.google.com/calendar/feeds/k8oohvl27sq3u0odgafpbmdl6s@group.calendar.google.com/private-cfbcfde94d17e48fbf1f824a8536e0ba/basic'
# Build a service object for interacting with the API.
service = build(serviceName='calendar', version='v3', http=http)
# Getting All Event Ids
page_token = None
events_list = []
try:
while True:
events = service.events().list(calendarId=calendarId, pageToken=page_token).execute()
for event in events['items']:
event_id = event['id']
events_list.append(event_id)
#print event_id
page_token = events.get('nextPageToken')
if not page_token:
break
except:
page_token = None
while True:
events = service.events().list(calendarId=calendarId, pageToken=page_token).execute()
for event in events['items']:
event_id = event['id']
events_list.append(event_id)
#print event_id
page_token = events.get('nextPageToken')
if not page_token:
break
###########################
#### END AUTH SECTION
###########################
#############################Get Data to send to API###########################
#from python.gcal_functions import stillcomplete, fashioncomplete, sql_query_production_numbers
def sql_query_production_numbers():
import sqlalchemy
import datetime
from collections import defaultdict
orcl_engine = sqlalchemy.create_engine('oracle+cx_oracle://prod_team_ro:9thfl00r@borac101-vip.l3.bluefly.com:1521/bfyprd11')
connection = orcl_engine.connect()
### Get Production Complete Totals and Build Dict of key value pairs
complete_by_vendor = """SELECT POMGR.SUPPLIER_INGEST_STYLE.VENDOR_ID as "vendor_name",
COUNT(DISTINCT POMGR.SUPPLIER_INGEST_IMAGE.STYLE_ID) AS "style_count",
TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY-MM-DD') as "production_complete_dt"
FROM POMGR.SUPPLIER_INGEST_STYLE
RIGHT JOIN POMGR.SUPPLIER_INGEST_SKU
ON POMGR.SUPPLIER_INGEST_SKU.STYLE_ID = POMGR.SUPPLIER_INGEST_STYLE.ID
LEFT JOIN POMGR.SUPPLIER_INGEST_IMAGE
ON POMGR.SUPPLIER_INGEST_STYLE.ID = POMGR.SUPPLIER_INGEST_IMAGE.STYLE_ID
RIGHT JOIN POMGR.PRODUCT_COLOR
ON POMGR.SUPPLIER_INGEST_STYLE.BLUEFLY_PRODUCT_COLOR = POMGR.PRODUCT_COLOR.ID
WHERE POMGR.SUPPLIER_INGEST_STYLE.VENDOR_ID LIKE '%%'
AND POMGR.SUPPLIER_INGEST_IMAGE.URL IS NOT NULL
AND TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY-MM-DD') IS NOT NULL
GROUP BY POMGR.SUPPLIER_INGEST_STYLE.VENDOR_ID,
TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY-MM-DD')
ORDER BY 3 DESC"""
prodcomplete = connection.execute(complete_by_vendor)
marketpl_prodcomplete_dict = {}
for row in prodcomplete:
tmp_dict = {}
tmp_dict['total'] = row['style_count']
tmp_dict['vendor_name'] = row['vendor_name']
marketpl_prodcomplete_dict[row['production_complete_dt']] = tmp_dict
### Get Complete by inventory Source Totals and Build Dict of key value pairs
querymake_complete_by_source ='''SELECT "DateComplete", "TotalCompletions", "Asset-Total", "Asset-Apparel", "Asset-Non-Appr", "FullFill-Bluefly", "FF-Vendor-Dropship", "Marketplace" FROM(
with data as (
select
max(distinct POMGR.SKU.PRODUCT_COLOR_ID) as "DATA_COLORSTYLE",
max(distinct POMGR.SKU.sku_code) as "DATA_SKU_CODE"
FROM POMGR.SKU
LEFT JOIN POMGR.PRODUCT_COLOR
ON POMGR.SKU.PRODUCT_COLOR_ID = POMGR.PRODUCT_COLOR.ID
group by POMGR.SKU.PRODUCT_COLOR_ID
order by POMGR.SKU.PRODUCT_COLOR_ID desc
)
SELECT
TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY-MM-DD') AS "DateComplete",
COUNT(distinct POMGR.PRODUCT_COLOR.ID) AS "TotalCompletions",
SUM(
CASE
WHEN ( data.DATA_SKU_CODE LIKE '8%'
)
THEN 1
ELSE 0
END) "Asset-Total",
SUM(
CASE
WHEN ( data.DATA_SKU_CODE LIKE '8%' and POMGR.PRODUCT_FOLDER_DENORMALIZED.PATH LIKE '%/%men/apparel/%'
)
THEN 1
ELSE 0
END) "Asset-Apparel",
SUM(
CASE
WHEN (SUBSTR(data.DATA_SKU_CODE, 1,1) = '8' and POMGR.PRODUCT_FOLDER_DENORMALIZED.PATH LIKE '%/non apparel/%'
)
THEN 1
ELSE 0
END) "Asset-Non-Appr",
SUM(
CASE
WHEN data.DATA_SKU_CODE LIKE '101%'
THEN 1
ELSE 0
END) "FullFill-Bluefly",
SUM(
CASE
WHEN data.DATA_SKU_CODE LIKE '102%'
THEN 1
ELSE 0
END) "FF-Vendor-Dropship",
SUM(
CASE
WHEN data.DATA_SKU_CODE LIKE '103%'
THEN 1
ELSE 0
END) "Marketplace"
FROM
POMGR.PRODUCT_COLOR
LEFT JOIN POMGR.PRODUCT
ON
POMGR.PRODUCT_COLOR.PRODUCT_ID = POMGR.PRODUCT.ID
LEFT JOIN POMGR.PRODUCT_FOLDER
ON
POMGR.PRODUCT.PRODUCT_FOLDER_ID = POMGR.PRODUCT_FOLDER.ID
LEFT JOIN POMGR.PRODUCT_FOLDER_DENORMALIZED
ON
POMGR.PRODUCT_FOLDER.ID = POMGR.PRODUCT_FOLDER_DENORMALIZED.ID
LEFT JOIN data
ON
data.DATA_COLORSTYLE = POMGR.PRODUCT_COLOR.ID
WHERE
TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY') = '2014'
GROUP BY
TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY-MM-DD'),
TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY')
ORDER BY
"DateComplete" DESC,
TO_CHAR(POMGR.PRODUCT_COLOR.PRODUCTION_COMPLETE_DT, 'YYYY') DESC
)'''
complete_by_source = connection.execute(querymake_complete_by_source)
complete_by_source_dict = {}
for row in complete_by_source:
tmp_dict = {}
tmp_dict['total'] = row['TotalCompletions']
tmp_dict['total_asset'] = row['Asset-Total']
tmp_dict['total_ff'] = row['FullFill-Bluefly']
tmp_dict['total_swids'] = row['FF-Vendor-Dropship']
tmp_dict['total_mpl'] = row['Marketplace']
complete_by_source_dict[row['DateComplete']] = tmp_dict
connection.close()
return complete_by_source_dict
#############################END Funcx Section##########################
######RUN######
## Delete all Events by ID prior to reup
for event in events_list:
service.events().delete(calendarId=calendarId, eventId=event).execute()
print "Deleted all Events"
#calendar_list_entry = service.calendarList().get(calendarId='primary').execute()
#cals = service.calendarList().get(calendarId='john.bragato@gmail.com').execute()
#############################Get Data Functions to Query DB###########################
complete_by_source_dict = sql_query_production_numbers()
########################################## = stillcomplete = fashioncomplete()
lotsofdicts = [complete_by_source_dict]
##############################################################################
for iterdict in lotsofdicts:
count = 0
for k,value in iterdict.iteritems():
import datetime, time
#for value in [v]:
try:
total = value['total']
total_asset = value['total_asset']
total_ff = value['total_ff']
total_swids = value['total_swids']
total_mpl = value['total_mpl']
colorId = '8'
calendarId = prodcompletebysourcecal
summary = "Total: {0}".format(total)
description = """
Total: {0}
\tAssets: {1}
\tFullfill: {2}
\tSWI: {3}
\tMarketplace: {4}
""".format(total,total_asset,total_ff,total_swids,total_mpl)
location = 'Home'
except KeyError:
value['vendor_name']
total = value['total']
vendor_name = value['vendor_name']
colorId = '8'
calendarId = marketplvendorscmpcal
description = """Vendor: {0}\n\tTotalComplete: {1}""".format(vendor_name,total)
location = 'Home'
summary = description #"Vendor: {0}".format(vendor_name)
if type(k) == str:
k = datetime.datetime.strptime(k,'%Y-%d-%M')
try:
event = {
'summary': summary,
'description': description,
'location': location,
'colorId': colorId,
'start': {
'date': "{0:%Y-%m-%d}".format(k.date()),
'timeZone': 'America/New_York'
},
'end': {
'date': "{0:%Y-%m-%d}".format(k.date()),
'timeZone': 'America/New_York'
},
# 'recurrence': [
# 'RRULE:FREQ=WEEKLY;UNTIL=20110701T100000-07:00',
# ],
# 'attendees': [
# {
# 'email': 'james.hoetker@bluefly.com',
# # Other attendee's data...
# },
# # ...
# ],
}
print event
created_event = service.events().insert(calendarId=calendarId, body=event).execute()
print created_event['id']
except OSError:
print 'ERROR {}'.format(event)
pass
| |
# Copyright 2013-2020 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import functools
import typing as T
from pathlib import Path
from .. import mlog
from .. import mesonlib
from ..environment import Environment
from .base import DependencyException, SystemDependency
from .pkgconfig import PkgConfigDependency
from .misc import threads_factory
if T.TYPE_CHECKING:
from ..environment import Properties
# On windows 3 directory layouts are supported:
# * The default layout (versioned) installed:
# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
# - $BOOST_ROOT/lib/*.lib
# * The non-default layout (system) installed:
# - $BOOST_ROOT/include/boost/*.hpp
# - $BOOST_ROOT/lib/*.lib
# * The pre-built binaries from sf.net:
# - $BOOST_ROOT/boost/*.hpp
# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1
#
# Note that we should also try to support:
# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/)
# libboost_<module>-mt.dll.a
#
# The `modules` argument accept library names. This is because every module that
# has libraries to link against also has multiple options regarding how to
# link. See for example:
# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
# **On Unix**, official packaged versions of boost libraries follow the following schemes:
#
# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0
# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0
# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0
# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib)
# libboost_<module>.a
# cygboost_<module>_1_64.dll (location = /usr/bin)
# Win / VS: boost_<module>-vc<ver>-mt[-gd]-<arch>-1_67.dll (location = C:/local/boost_1_67_0)
# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib)
# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib)
#
# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
#
# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
#
# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36".
# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming)
# However, its not clear that any Unix distribution follows this scheme.
# Furthermore, the boost documentation for unix above uses examples from windows like
# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
#
# We follow the following strategy for finding modules:
# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
# B) Foreach candidate
# 1. Look for the boost headers (boost/version.pp)
# 2. Find all boost libraries
# 2.1 Add all libraries in lib*
# 2.2 Filter out non boost libraries
# 2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.)
# 2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
# 3. Select the libraries matching the requested modules
@functools.total_ordering
class BoostIncludeDir():
def __init__(self, path: Path, version_int: int):
self.path = path
self.version_int = version_int
major = int(self.version_int / 100000)
minor = int((self.version_int / 100) % 1000)
patch = int(self.version_int % 100)
self.version = f'{major}.{minor}.{patch}'
self.version_lib = f'{major}_{minor}'
def __repr__(self) -> str:
return f'<BoostIncludeDir: {self.version} -- {self.path}>'
def __lt__(self, other: object) -> bool:
if isinstance(other, BoostIncludeDir):
return (self.version_int, self.path) < (other.version_int, other.path)
return NotImplemented
@functools.total_ordering
class BoostLibraryFile():
# Python libraries are special because of the included
# minor version in the module name.
boost_python_libs = ['boost_python', 'boost_numpy']
reg_python_mod_split = re.compile(r'(boost_[a-zA-Z]+)([0-9]*)')
reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$')
reg_ver_tag = re.compile(r'^[0-9_]+$')
def __init__(self, path: Path):
self.path = path
self.name = self.path.name
# Initialize default properties
self.static = False
self.toolset = ''
self.arch = ''
self.version_lib = ''
self.mt = True
self.runtime_static = False
self.runtime_debug = False
self.python_debug = False
self.debug = False
self.stlport = False
self.deprecated_iostreams = False
# Post process the library name
name_parts = self.name.split('.')
self.basename = name_parts[0]
self.suffixes = name_parts[1:]
self.vers_raw = [x for x in self.suffixes if x.isdigit()]
self.suffixes = [x for x in self.suffixes if not x.isdigit()]
self.nvsuffix = '.'.join(self.suffixes) # Used for detecting the library type
self.nametags = self.basename.split('-')
self.mod_name = self.nametags[0]
if self.mod_name.startswith('lib'):
self.mod_name = self.mod_name[3:]
# Set library version if possible
if len(self.vers_raw) >= 2:
self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1])
# Detecting library type
if self.nvsuffix in ['so', 'dll', 'dll.a', 'dll.lib', 'dylib']:
self.static = False
elif self.nvsuffix in ['a', 'lib']:
self.static = True
else:
raise DependencyException(f'Unable to process library extension "{self.nvsuffix}" ({self.path})')
# boost_.lib is the dll import library
if self.basename.startswith('boost_') and self.nvsuffix == 'lib':
self.static = False
# Process tags
tags = self.nametags[1:]
# Filter out the python version tag and fix modname
if self.is_python_lib():
tags = self.fix_python_name(tags)
if not tags:
return
# Without any tags mt is assumed, however, an absence of mt in the name
# with tags present indicates that the lib was built without mt support
self.mt = False
for i in tags:
if i == 'mt':
self.mt = True
elif len(i) == 3 and i[1:] in ['32', '64']:
self.arch = i
elif BoostLibraryFile.reg_abi_tag.match(i):
self.runtime_static = 's' in i
self.runtime_debug = 'g' in i
self.python_debug = 'y' in i
self.debug = 'd' in i
self.stlport = 'p' in i
self.deprecated_iostreams = 'n' in i
elif BoostLibraryFile.reg_ver_tag.match(i):
self.version_lib = i
else:
self.toolset = i
def __repr__(self) -> str:
return f'<LIB: {self.abitag} {self.mod_name:<32} {self.path}>'
def __lt__(self, other: object) -> bool:
if isinstance(other, BoostLibraryFile):
return (
self.mod_name, self.static, self.version_lib, self.arch,
not self.mt, not self.runtime_static,
not self.debug, self.runtime_debug, self.python_debug,
self.stlport, self.deprecated_iostreams,
self.name,
) < (
other.mod_name, other.static, other.version_lib, other.arch,
not other.mt, not other.runtime_static,
not other.debug, other.runtime_debug, other.python_debug,
other.stlport, other.deprecated_iostreams,
other.name,
)
return NotImplemented
def __eq__(self, other: object) -> bool:
if isinstance(other, BoostLibraryFile):
return self.name == other.name
return NotImplemented
def __hash__(self) -> int:
return hash(self.name)
@property
def abitag(self) -> str:
abitag = ''
abitag += 'S' if self.static else '-'
abitag += 'M' if self.mt else '-'
abitag += ' '
abitag += 's' if self.runtime_static else '-'
abitag += 'g' if self.runtime_debug else '-'
abitag += 'y' if self.python_debug else '-'
abitag += 'd' if self.debug else '-'
abitag += 'p' if self.stlport else '-'
abitag += 'n' if self.deprecated_iostreams else '-'
abitag += ' ' + (self.arch or '???')
abitag += ' ' + (self.toolset or '?')
abitag += ' ' + (self.version_lib or 'x_xx')
return abitag
def is_boost(self) -> bool:
return any([self.name.startswith(x) for x in ['libboost_', 'boost_']])
def is_python_lib(self) -> bool:
return any([self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs])
def fix_python_name(self, tags: T.List[str]) -> T.List[str]:
# Handle the boost_python naming madeness.
# See https://github.com/mesonbuild/meson/issues/4788 for some distro
# specific naming variantions.
other_tags = [] # type: T.List[str]
# Split the current modname into the base name and the version
m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
cur_name = m_cur.group(1)
cur_vers = m_cur.group(2)
# Update the current version string if the new version string is longer
def update_vers(new_vers: str) -> None:
nonlocal cur_vers
new_vers = new_vers.replace('_', '')
new_vers = new_vers.replace('.', '')
if not new_vers.isdigit():
return
if len(new_vers) > len(cur_vers):
cur_vers = new_vers
for i in tags:
if i.startswith('py'):
update_vers(i[2:])
elif i.isdigit():
update_vers(i)
elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.':
update_vers(i)
else:
other_tags += [i]
self.mod_name = cur_name + cur_vers
return other_tags
def mod_name_matches(self, mod_name: str) -> bool:
if self.mod_name == mod_name:
return True
if not self.is_python_lib():
return False
m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
m_arg = BoostLibraryFile.reg_python_mod_split.match(mod_name)
if not m_cur or not m_arg:
return False
if m_cur.group(1) != m_arg.group(1):
return False
cur_vers = m_cur.group(2)
arg_vers = m_arg.group(2)
# Always assume python 2 if nothing is specified
if not arg_vers:
arg_vers = '2'
return cur_vers.startswith(arg_vers)
def version_matches(self, version_lib: str) -> bool:
# If no version tag is present, assume that it fits
if not self.version_lib or not version_lib:
return True
return self.version_lib == version_lib
def arch_matches(self, arch: str) -> bool:
# If no version tag is present, assume that it fits
if not self.arch or not arch:
return True
return self.arch == arch
def vscrt_matches(self, vscrt: str) -> bool:
# If no vscrt tag present, assume that it fits ['/MD', '/MDd', '/MT', '/MTd']
if not vscrt:
return True
if vscrt in ['/MD', '-MD']:
return not self.runtime_static and not self.runtime_debug
elif vscrt in ['/MDd', '-MDd']:
return not self.runtime_static and self.runtime_debug
elif vscrt in ['/MT', '-MT']:
return (self.runtime_static or not self.static) and not self.runtime_debug
elif vscrt in ['/MTd', '-MTd']:
return (self.runtime_static or not self.static) and self.runtime_debug
mlog.warning(f'Boost: unknow vscrt tag {vscrt}. This may cause the compilation to fail. Please consider reporting this as a bug.', once=True)
return True
def get_compiler_args(self) -> T.List[str]:
args = [] # type: T.List[str]
if self.mod_name in boost_libraries:
libdef = boost_libraries[self.mod_name] # type: BoostLibrary
if self.static:
args += libdef.static
else:
args += libdef.shared
if self.mt:
args += libdef.multi
else:
args += libdef.single
return args
def get_link_args(self) -> T.List[str]:
return [self.path.as_posix()]
class BoostDependency(SystemDependency):
def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
super().__init__('boost', environment, kwargs, language='cpp')
buildtype = environment.coredata.get_option(mesonlib.OptionKey('buildtype'))
assert isinstance(buildtype, str)
self.debug = buildtype.startswith('debug')
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
self.boost_root = None # type: T.Optional[Path]
self.explicit_static = 'static' in kwargs
# Extract and validate modules
self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
for i in self.modules:
if not isinstance(i, str):
raise DependencyException('Boost module argument is not a string.')
if i.startswith('boost_'):
raise DependencyException('Boost modules must be passed without the boost_ prefix')
self.modules_found = [] # type: T.List[str]
self.modules_missing = [] # type: T.List[str]
# Do we need threads?
if 'thread' in self.modules:
if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
self.is_found = False
return
# Try figuring out the architecture tag
self.arch = environment.machines[self.for_machine].cpu_family
self.arch = boost_arch_map.get(self.arch, None)
# First, look for paths specified in a machine file
props = self.env.properties[self.for_machine]
if any(x in self.env.properties[self.for_machine] for x in
['boost_includedir', 'boost_librarydir', 'boost_root']):
self.detect_boost_machine_file(props)
return
# Finally, look for paths from .pc files and from searching the filesystem
self.detect_roots()
def check_and_set_roots(self, roots: T.List[Path], use_system: bool) -> None:
roots = list(mesonlib.OrderedSet(roots))
for j in roots:
# 1. Look for the boost headers (boost/version.hpp)
mlog.debug(f'Checking potential boost root {j.as_posix()}')
inc_dirs = self.detect_inc_dirs(j)
inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions
# Early abort when boost is not found
if not inc_dirs:
continue
lib_dirs = self.detect_lib_dirs(j, use_system)
self.is_found = self.run_check(inc_dirs, lib_dirs)
if self.is_found:
self.boost_root = j
break
def detect_boost_machine_file(self, props: 'Properties') -> None:
"""Detect boost with values in the machine file or environment.
The machine file values are defaulted to the environment values.
"""
# XXX: if we had a TypedDict we woudn't need this
incdir = props.get('boost_includedir')
assert incdir is None or isinstance(incdir, str)
libdir = props.get('boost_librarydir')
assert libdir is None or isinstance(libdir, str)
if incdir and libdir:
inc_dir = Path(incdir)
lib_dir = Path(libdir)
if not inc_dir.is_absolute() or not lib_dir.is_absolute():
raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute')
mlog.debug('Trying to find boost with:')
mlog.debug(f' - boost_includedir = {inc_dir}')
mlog.debug(f' - boost_librarydir = {lib_dir}')
return self.detect_split_root(inc_dir, lib_dir)
elif incdir or libdir:
raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)')
rootdir = props.get('boost_root')
# It shouldn't be possible to get here without something in boost_root
assert(rootdir)
raw_paths = mesonlib.stringlistify(rootdir)
paths = [Path(x) for x in raw_paths]
if paths and any([not x.is_absolute() for x in paths]):
raise DependencyException('boost_root path given in machine file must be absolute')
self.check_and_set_roots(paths, use_system=False)
def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
# 2. Find all boost libraries
libs = [] # type: T.List[BoostLibraryFile]
for i in lib_dirs:
libs = self.detect_libraries(i)
if libs:
mlog.debug(f' - found boost library dir: {i}')
# mlog.debug(' - raw library list:')
# for j in libs:
# mlog.debug(' - {}'.format(j))
break
libs = sorted(set(libs))
modules = ['boost_' + x for x in self.modules]
for inc in inc_dirs:
mlog.debug(f' - found boost {inc.version} include dir: {inc.path}')
f_libs = self.filter_libraries(libs, inc.version_lib)
mlog.debug(' - filtered library list:')
for j in f_libs:
mlog.debug(f' - {j}')
# 3. Select the libraries matching the requested modules
not_found = [] # type: T.List[str]
selected_modules = [] # type: T.List[BoostLibraryFile]
for mod in modules:
found = False
for l in f_libs:
if l.mod_name_matches(mod):
selected_modules += [l]
found = True
break
if not found:
not_found += [mod]
# log the result
mlog.debug(' - found:')
comp_args = [] # type: T.List[str]
link_args = [] # type: T.List[str]
for j in selected_modules:
c_args = j.get_compiler_args()
l_args = j.get_link_args()
mlog.debug(' - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args)))
comp_args += c_args
link_args += l_args
comp_args = list(set(comp_args))
link_args = list(set(link_args))
self.modules_found = [x.mod_name for x in selected_modules]
self.modules_found = [x[6:] for x in self.modules_found]
self.modules_found = sorted(set(self.modules_found))
self.modules_missing = not_found
self.modules_missing = [x[6:] for x in self.modules_missing]
self.modules_missing = sorted(set(self.modules_missing))
# if we found all modules we are done
if not not_found:
self.version = inc.version
self.compile_args = ['-I' + inc.path.as_posix()]
self.compile_args += comp_args
self.compile_args += self._extra_compile_args()
self.compile_args = list(mesonlib.OrderedSet(self.compile_args))
self.link_args = link_args
mlog.debug(f' - final compile args: {self.compile_args}')
mlog.debug(f' - final link args: {self.link_args}')
return True
# in case we missed something log it and try again
mlog.debug(' - NOT found:')
for mod in not_found:
mlog.debug(f' - {mod}')
return False
def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
candidates = [] # type: T.List[Path]
inc_root = root / 'include'
candidates += [root / 'boost']
candidates += [inc_root / 'boost']
if inc_root.is_dir():
for i in inc_root.iterdir():
if not i.is_dir() or not i.name.startswith('boost-'):
continue
candidates += [i / 'boost']
candidates = [x for x in candidates if x.is_dir()]
candidates = [x / 'version.hpp' for x in candidates]
candidates = [x for x in candidates if x.exists()]
return [self._include_dir_from_version_header(x) for x in candidates]
def detect_lib_dirs(self, root: Path, use_system: bool) -> T.List[Path]:
# First check the system include paths. Only consider those within the
# given root path
if use_system:
system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
system_dirs = [Path(x) for x in system_dirs_t]
system_dirs = [x.resolve() for x in system_dirs if x.exists()]
system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
system_dirs = list(mesonlib.OrderedSet(system_dirs))
if system_dirs:
return system_dirs
# No system include paths were found --> fall back to manually looking
# for library dirs in root
dirs = [] # type: T.List[Path]
subdirs = [] # type: T.List[Path]
for i in root.iterdir():
if i.is_dir() and i.name.startswith('lib'):
dirs += [i]
# Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu
for i in dirs:
for j in i.iterdir():
if j.is_dir() and j.name.endswith('-linux-gnu'):
subdirs += [j]
# Filter out paths that don't match the target arch to avoid finding
# the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
if not self.arch:
return dirs + subdirs
arch_list_32 = ['32', 'i386']
arch_list_64 = ['64']
raw_list = dirs + subdirs
no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])]
matching_arch = [] # type: T.List[Path]
if '32' in self.arch:
matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])]
elif '64' in self.arch:
matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])]
return sorted(matching_arch) + sorted(no_arch)
def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
# MSVC is very picky with the library tags
vscrt = ''
try:
crt_val = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value
buildtype = self.env.coredata.options[mesonlib.OptionKey('buildtype')].value
vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
except (KeyError, IndexError, AttributeError):
pass
# mlog.debug(' - static: {}'.format(self.static))
# mlog.debug(' - not explicit static: {}'.format(not self.explicit_static))
# mlog.debug(' - mt: {}'.format(self.multithreading))
# mlog.debug(' - version: {}'.format(lib_vers))
# mlog.debug(' - arch: {}'.format(self.arch))
# mlog.debug(' - vscrt: {}'.format(vscrt))
libs = [x for x in libs if x.static == self.static or not self.explicit_static]
libs = [x for x in libs if x.mt == self.multithreading]
libs = [x for x in libs if x.version_matches(lib_vers)]
libs = [x for x in libs if x.arch_matches(self.arch)]
libs = [x for x in libs if x.vscrt_matches(vscrt)]
libs = [x for x in libs if x.nvsuffix != 'dll'] # Only link to import libraries
# Only filter by debug when we are building in release mode. Debug
# libraries are automatically preferred through sorting otherwise.
if not self.debug:
libs = [x for x in libs if not x.debug]
# Take the abitag from the first library and filter by it. This
# ensures that we have a set of libraries that are always compatible.
if not libs:
return []
abitag = libs[0].abitag
libs = [x for x in libs if x.abitag == abitag]
return libs
def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
libs = set() # type: T.Set[BoostLibraryFile]
for i in libdir.iterdir():
if not i.is_file():
continue
if not any([i.name.startswith(x) for x in ['libboost_', 'boost_']]):
continue
libs.add(BoostLibraryFile(i.resolve()))
return [x for x in libs if x.is_boost()] # Filter out no boost libraries
def detect_split_root(self, inc_dir: Path, lib_dir: Path) -> None:
boost_inc_dir = None
for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
if j.is_file():
boost_inc_dir = self._include_dir_from_version_header(j)
break
if not boost_inc_dir:
self.is_found = False
return
self.is_found = self.run_check([boost_inc_dir], [lib_dir])
def detect_roots(self) -> None:
roots = [] # type: T.List[Path]
# Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
# allows BoostDependency to find boost from Conan. See #5438
try:
boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
if boost_pc.found():
boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None})
if boost_root:
roots += [Path(boost_root)]
except DependencyException:
pass
# Add roots from system paths
inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
inc_paths = [x.parent for x in inc_paths if x.exists()]
inc_paths = [x.resolve() for x in inc_paths]
roots += inc_paths
# Add system paths
if self.env.machines[self.for_machine].is_windows():
# Where boost built from source actually installs it
c_root = Path('C:/Boost')
if c_root.is_dir():
roots += [c_root]
# Where boost documentation says it should be
prog_files = Path('C:/Program Files/boost')
# Where boost prebuilt binaries are
local_boost = Path('C:/local')
candidates = [] # type: T.List[Path]
if prog_files.is_dir():
candidates += [*prog_files.iterdir()]
if local_boost.is_dir():
candidates += [*local_boost.iterdir()]
roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
else:
tmp = [] # type: T.List[Path]
# Add some default system paths
tmp += [Path('/opt/local')]
tmp += [Path('/usr/local/opt/boost')]
tmp += [Path('/usr/local')]
tmp += [Path('/usr')]
# Cleanup paths
tmp = [x for x in tmp if x.is_dir()]
tmp = [x.resolve() for x in tmp]
roots += tmp
self.check_and_set_roots(roots, use_system=True)
def log_details(self) -> str:
res = ''
if self.modules_found:
res += 'found: ' + ', '.join(self.modules_found)
if self.modules_missing:
if res:
res += ' | '
res += 'missing: ' + ', '.join(self.modules_missing)
return res
def log_info(self) -> str:
if self.boost_root:
return self.boost_root.as_posix()
return ''
def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir:
# Extract the version with a regex. Using clib_compiler.get_define would
# also work, however, this is slower (since it the compiler has to be
# invoked) and overkill since the layout of the header is always the same.
assert hfile.exists()
raw = hfile.read_text(encoding='utf-8')
m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw)
if not m:
mlog.debug(f'Failed to extract version information from {hfile}')
return BoostIncludeDir(hfile.parents[1], 0)
return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
def _extra_compile_args(self) -> T.List[str]:
# BOOST_ALL_DYN_LINK should not be required with the known defines below
return ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
# See https://mesonbuild.com/Reference-tables.html#cpu-families
boost_arch_map = {
'aarch64': 'a64',
'arc': 'a32',
'arm': 'a32',
'ia64': 'i64',
'mips': 'm32',
'mips64': 'm64',
'ppc': 'p32',
'ppc64': 'p64',
'sparc': 's32',
'sparc64': 's64',
'x86': 'x32',
'x86_64': 'x64',
}
#### ---- BEGIN GENERATED ---- ####
# #
# Generated with tools/boost_names.py:
# - boost version: 1.73.0
# - modules found: 159
# - libraries found: 43
#
class BoostLibrary():
def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
self.name = name
self.shared = shared
self.static = static
self.single = single
self.multi = multi
class BoostModule():
def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
self.name = name
self.key = key
self.desc = desc
self.libs = libs
# dict of all know libraries with additional compile options
boost_libraries = {
'boost_atomic': BoostLibrary(
name='boost_atomic',
shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_chrono': BoostLibrary(
name='boost_chrono',
shared=['-DBOOST_CHRONO_DYN_LINK=1'],
static=['-DBOOST_CHRONO_STATIC_LINK=1'],
single=['-DBOOST_CHRONO_THREAD_DISABLED'],
multi=[],
),
'boost_container': BoostLibrary(
name='boost_container',
shared=['-DBOOST_CONTAINER_DYN_LINK=1'],
static=['-DBOOST_CONTAINER_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_context': BoostLibrary(
name='boost_context',
shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_contract': BoostLibrary(
name='boost_contract',
shared=['-DBOOST_CONTRACT_DYN_LINK'],
static=['-DBOOST_CONTRACT_STATIC_LINK'],
single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
multi=[],
),
'boost_coroutine': BoostLibrary(
name='boost_coroutine',
shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_date_time': BoostLibrary(
name='boost_date_time',
shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_exception': BoostLibrary(
name='boost_exception',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_fiber': BoostLibrary(
name='boost_fiber',
shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_fiber_numa': BoostLibrary(
name='boost_fiber_numa',
shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_filesystem': BoostLibrary(
name='boost_filesystem',
shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'],
static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_graph': BoostLibrary(
name='boost_graph',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_iostreams': BoostLibrary(
name='boost_iostreams',
shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_locale': BoostLibrary(
name='boost_locale',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_log': BoostLibrary(
name='boost_log',
shared=['-DBOOST_LOG_DYN_LINK=1'],
static=[],
single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_log_setup': BoostLibrary(
name='boost_log_setup',
shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
static=[],
single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_math_c99': BoostLibrary(
name='boost_math_c99',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99f': BoostLibrary(
name='boost_math_c99f',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99l': BoostLibrary(
name='boost_math_c99l',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1': BoostLibrary(
name='boost_math_tr1',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1f': BoostLibrary(
name='boost_math_tr1f',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1l': BoostLibrary(
name='boost_math_tr1l',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_mpi': BoostLibrary(
name='boost_mpi',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_nowide': BoostLibrary(
name='boost_nowide',
shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_prg_exec_monitor': BoostLibrary(
name='boost_prg_exec_monitor',
shared=['-DBOOST_TEST_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_program_options': BoostLibrary(
name='boost_program_options',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_random': BoostLibrary(
name='boost_random',
shared=['-DBOOST_RANDOM_DYN_LINK'],
static=[],
single=[],
multi=[],
),
'boost_regex': BoostLibrary(
name='boost_regex',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_serialization': BoostLibrary(
name='boost_serialization',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_addr2line': BoostLibrary(
name='boost_stacktrace_addr2line',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_backtrace': BoostLibrary(
name='boost_stacktrace_backtrace',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_basic': BoostLibrary(
name='boost_stacktrace_basic',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_noop': BoostLibrary(
name='boost_stacktrace_noop',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg': BoostLibrary(
name='boost_stacktrace_windbg',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg_cached': BoostLibrary(
name='boost_stacktrace_windbg_cached',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_system': BoostLibrary(
name='boost_system',
shared=['-DBOOST_SYSTEM_DYN_LINK=1'],
static=['-DBOOST_SYSTEM_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_test_exec_monitor': BoostLibrary(
name='boost_test_exec_monitor',
shared=['-DBOOST_TEST_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_thread': BoostLibrary(
name='boost_thread',
shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
single=[],
multi=[],
),
'boost_timer': BoostLibrary(
name='boost_timer',
shared=['-DBOOST_TIMER_DYN_LINK=1'],
static=['-DBOOST_TIMER_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_type_erasure': BoostLibrary(
name='boost_type_erasure',
shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
static=[],
single=[],
multi=[],
),
'boost_unit_test_framework': BoostLibrary(
name='boost_unit_test_framework',
shared=['-DBOOST_TEST_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_wave': BoostLibrary(
name='boost_wave',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_wserialization': BoostLibrary(
name='boost_wserialization',
shared=[],
static=[],
single=[],
multi=[],
),
}
# #
#### ---- END GENERATED ---- ####
| |
"""HTTP websocket server functional tests"""
import asyncio
import pytest
import aiohttp
from aiohttp import helpers, web
from aiohttp._ws_impl import WSMsgType
@asyncio.coroutine
def test_websocket_json(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive()
msg_json = msg.json()
answer = msg_json['test']
ws.send_str(answer)
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
expected_value = 'value'
payload = '{"test": "%s"}' % expected_value
ws.send_str(payload)
resp = yield from ws.receive()
assert resp.data == expected_value
@asyncio.coroutine
def test_websocket_json_invalid_message(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
try:
yield from ws.receive_json()
except ValueError:
ws.send_str('ValueError was raised')
else:
raise Exception('No Exception')
finally:
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
payload = 'NOT A VALID JSON STRING'
ws.send_str(payload)
data = yield from ws.receive_str()
assert 'ValueError was raised' in data
@asyncio.coroutine
def test_websocket_send_json(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
data = yield from ws.receive_json()
ws.send_json(data)
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
expected_value = 'value'
ws.send_json({'test': expected_value})
data = yield from ws.receive_json()
assert data['test'] == expected_value
@asyncio.coroutine
def test_websocket_receive_json(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
data = yield from ws.receive_json()
answer = data['test']
ws.send_str(answer)
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
expected_value = 'value'
payload = '{"test": "%s"}' % expected_value
ws.send_str(payload)
resp = yield from ws.receive()
assert resp.data == expected_value
@asyncio.coroutine
def test_send_recv_text(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_str()
ws.send_str(msg+'/answer')
yield from ws.close()
closed.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
ws.send_str('ask')
msg = yield from ws.receive()
assert msg.type == aiohttp.WSMsgType.TEXT
assert 'ask/answer' == msg.data
msg = yield from ws.receive()
assert msg.type == aiohttp.WSMsgType.CLOSE
assert msg.data == 1000
assert msg.extra == ''
assert ws.closed
assert ws.close_code == 1000
yield from closed
@asyncio.coroutine
def test_send_recv_bytes(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
msg = yield from ws.receive_bytes()
ws.send_bytes(msg+b'/answer')
yield from ws.close()
closed.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
ws.send_bytes(b'ask')
msg = yield from ws.receive()
assert msg.type == aiohttp.WSMsgType.BINARY
assert b'ask/answer' == msg.data
msg = yield from ws.receive()
assert msg.type == aiohttp.WSMsgType.CLOSE
assert msg.data == 1000
assert msg.extra == ''
assert ws.closed
assert ws.close_code == 1000
yield from closed
@asyncio.coroutine
def test_send_recv_json(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
data = yield from ws.receive_json()
ws.send_json({'response': data['request']})
yield from ws.close()
closed.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
ws.send_str('{"request": "test"}')
msg = yield from ws.receive()
data = msg.json()
assert msg.type == aiohttp.WSMsgType.TEXT
assert data['response'] == 'test'
msg = yield from ws.receive()
assert msg.type == aiohttp.WSMsgType.CLOSE
assert msg.data == 1000
assert msg.extra == ''
yield from ws.close()
yield from closed
@asyncio.coroutine
def test_close_timeout(loop, test_client):
aborted = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse(timeout=0.1)
yield from ws.prepare(request)
assert 'request' == (yield from ws.receive_str())
ws.send_str('reply')
begin = ws._loop.time()
assert (yield from ws.close())
elapsed = ws._loop.time() - begin
assert elapsed < 0.201, \
'close() should have returned before ' \
'at most 2x timeout.'
assert ws.close_code == 1006
assert isinstance(ws.exception(), asyncio.TimeoutError)
aborted.set_result(1)
return ws
app = web.Application(loop=loop)
app.router.add_route('GET', '/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
ws.send_str('request')
assert 'reply' == (yield from ws.receive_str())
# The server closes here. Then the client sends bogus messages with an
# internval shorter than server-side close timeout, to make the server
# hanging indefinitely.
yield from asyncio.sleep(0.08, loop=loop)
msg = yield from ws._reader.read()
assert msg.type == WSMsgType.CLOSE
ws.send_str('hang')
yield from asyncio.sleep(0.08, loop=loop)
ws.send_str('hang')
yield from asyncio.sleep(0.08, loop=loop)
ws.send_str('hang')
yield from asyncio.sleep(0.08, loop=loop)
ws.send_str('hang')
yield from asyncio.sleep(0.08, loop=loop)
assert (yield from aborted)
yield from ws.close()
@asyncio.coroutine
def test_auto_pong_with_closing_by_peer(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive()
msg = yield from ws.receive()
assert msg.type == WSMsgType.CLOSE
assert msg.data == 1000
assert msg.extra == 'exit message'
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoclose=False, autoping=False)
ws.ping()
ws.send_str('ask')
msg = yield from ws.receive()
assert msg.type == WSMsgType.PONG
yield from ws.close(code=1000, message='exit message')
yield from closed
@asyncio.coroutine
def test_ping(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
ws.ping('data')
yield from ws.receive()
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoping=False)
msg = yield from ws.receive()
assert msg.type == WSMsgType.PING
assert msg.data == b'data'
ws.pong()
yield from ws.close()
yield from closed
@asyncio.coroutine
def test_client_ping(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
yield from ws.receive()
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoping=False)
ws.ping('data')
msg = yield from ws.receive()
assert msg.type == WSMsgType.PONG
assert msg.data == b'data'
ws.pong()
yield from ws.close()
@asyncio.coroutine
def test_pong(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse(autoping=False)
yield from ws.prepare(request)
msg = yield from ws.receive()
assert msg.type == WSMsgType.PING
ws.pong('data')
msg = yield from ws.receive()
assert msg.type == WSMsgType.CLOSE
assert msg.data == 1000
assert msg.extra == 'exit message'
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoping=False)
ws.ping('data')
msg = yield from ws.receive()
assert msg.type == WSMsgType.PONG
assert msg.data == b'data'
yield from ws.close(code=1000, message='exit message')
yield from closed
@asyncio.coroutine
def test_change_status(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
ws.set_status(200)
assert 200 == ws.status
yield from ws.prepare(request)
assert 101 == ws.status
yield from ws.close()
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoping=False)
yield from ws.close()
yield from closed
yield from ws.close()
@asyncio.coroutine
def test_handle_protocol(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse(protocols=('foo', 'bar'))
yield from ws.prepare(request)
yield from ws.close()
assert 'bar' == ws.protocol
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', protocols=('eggs', 'bar'))
yield from ws.close()
yield from closed
@asyncio.coroutine
def test_server_close_handshake(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse(protocols=('foo', 'bar'))
yield from ws.prepare(request)
yield from ws.close()
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoclose=False,
protocols=('eggs', 'bar'))
msg = yield from ws.receive()
assert msg.type == WSMsgType.CLOSE
yield from ws.close()
yield from closed
@asyncio.coroutine
def test_client_close_handshake(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse(
autoclose=False, protocols=('foo', 'bar'))
yield from ws.prepare(request)
msg = yield from ws.receive()
assert msg.type == WSMsgType.CLOSE
assert not ws.closed
yield from ws.close()
assert ws.closed
assert ws.close_code == 1007
msg = yield from ws.receive()
assert msg.type == WSMsgType.CLOSED
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoclose=False,
protocols=('eggs', 'bar'))
yield from ws.close(code=1007)
msg = yield from ws.receive()
assert msg.type == WSMsgType.CLOSED
yield from closed
@asyncio.coroutine
def test_server_close_handshake_server_eats_client_messages(loop, test_client):
closed = helpers.create_future(loop)
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse(protocols=('foo', 'bar'))
yield from ws.prepare(request)
yield from ws.close()
closed.set_result(None)
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/', autoclose=False, autoping=False,
protocols=('eggs', 'bar'))
msg = yield from ws.receive()
assert msg.type == WSMsgType.CLOSE
ws.send_str('text')
ws.send_bytes(b'bytes')
ws.ping()
yield from ws.close()
yield from closed
@asyncio.coroutine
def test_receive_msg(loop, test_client):
@asyncio.coroutine
def handler(request):
ws = web.WebSocketResponse()
yield from ws.prepare(request)
with pytest.warns(DeprecationWarning):
msg = yield from ws.receive_msg()
assert msg.data == b'data'
yield from ws.close()
return ws
app = web.Application(loop=loop)
app.router.add_get('/', handler)
client = yield from test_client(app)
ws = yield from client.ws_connect('/')
ws.send_bytes(b'data')
yield from ws.close()
| |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions for the graph_editor.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
from six import iteritems
from tensorflow.python.framework import ops as tf_ops
from tensorflow.python.ops import array_ops as tf_array_ops
__all__ = [
"make_list_of_op",
"get_tensors",
"make_list_of_t",
"get_generating_ops",
"get_consuming_ops",
"ControlOutputs",
"placeholder_name",
"make_placeholder_from_tensor",
"make_placeholder_from_dtype_and_shape",
]
def concatenate_unique(la, lb):
"""Add all the elements of `lb` to `la` if they are not there already.
The elements added to `la` maintain ordering with respect to `lb`.
Args:
la: List of Python objects.
lb: List of Python objects.
Returns:
`la`: The list `la` with missing elements from `lb`.
"""
la_set = set(la)
for l in lb:
if l not in la_set:
la.append(l)
la_set.add(l)
return la
# TODO(fkp): very generic code, it should be moved in a more generic place.
class ListView(object):
"""Immutable list wrapper.
This class is strongly inspired by the one in tf.Operation.
"""
def __init__(self, list_):
if not isinstance(list_, list):
raise TypeError("Expected a list, got: {}.".format(type(list_)))
self._list = list_
def __iter__(self):
return iter(self._list)
def __len__(self):
return len(self._list)
def __bool__(self):
return bool(self._list)
# Python 3 wants __bool__, Python 2.7 wants __nonzero__
__nonzero__ = __bool__
def __getitem__(self, i):
return self._list[i]
def __add__(self, other):
if not isinstance(other, list):
other = list(other)
return list(self) + other
# TODO(fkp): very generic code, it should be moved in a more generic place.
def is_iterable(obj):
"""Return true if the object is iterable."""
if isinstance(obj, tf_ops.Tensor):
return False
try:
_ = iter(obj)
except Exception: # pylint: disable=broad-except
return False
return True
def flatten_tree(tree, leaves=None):
"""Flatten a tree into a list.
Args:
tree: iterable or not. If iterable, its elements (child) can also be
iterable or not.
leaves: list to which the tree leaves are appended (None by default).
Returns:
A list of all the leaves in the tree.
"""
if leaves is None:
leaves = []
if isinstance(tree, dict):
for _, child in iteritems(tree):
flatten_tree(child, leaves)
elif is_iterable(tree):
for child in tree:
flatten_tree(child, leaves)
else:
leaves.append(tree)
return leaves
def transform_tree(tree, fn, iterable_type=tuple):
"""Transform all the nodes of a tree.
Args:
tree: iterable or not. If iterable, its elements (child) can also be
iterable or not.
fn: function to apply to each leaves.
iterable_type: type use to construct the resulting tree for unknown
iterable, typically `list` or `tuple`.
Returns:
A tree whose leaves has been transformed by `fn`.
The hierarchy of the output tree mimics the one of the input tree.
"""
if is_iterable(tree):
if isinstance(tree, dict):
res = tree.__new__(type(tree))
res.__init__(
(k, transform_tree(child, fn)) for k, child in iteritems(tree))
return res
elif isinstance(tree, tuple):
# NamedTuple?
if hasattr(tree, "_asdict"):
res = tree.__new__(type(tree), **transform_tree(tree._asdict(), fn))
else:
res = tree.__new__(type(tree),
(transform_tree(child, fn) for child in tree))
return res
elif isinstance(tree, collections.Sequence):
res = tree.__new__(type(tree))
res.__init__(transform_tree(child, fn) for child in tree)
return res
else:
return iterable_type(transform_tree(child, fn) for child in tree)
else:
return fn(tree)
def check_graphs(*args):
"""Check that all the element in args belong to the same graph.
Args:
*args: a list of object with a obj.graph property.
Raises:
ValueError: if all the elements do not belong to the same graph.
"""
graph = None
for i, sgv in enumerate(args):
if graph is None and sgv.graph is not None:
graph = sgv.graph
elif sgv.graph is not None and sgv.graph is not graph:
raise ValueError("Argument[{}]: Wrong graph!".format(i))
def get_unique_graph(tops, check_types=None, none_if_empty=False):
"""Return the unique graph used by the all the elements in tops.
Args:
tops: list of elements to check (usually a list of tf.Operation and/or
tf.Tensor). Or a tf.Graph.
check_types: check that the element in tops are of given type(s). If None,
the types (tf.Operation, tf.Tensor) are used.
none_if_empty: don't raise an error if tops is an empty list, just return
None.
Returns:
The unique graph used by all the tops.
Raises:
TypeError: if tops is not a iterable of tf.Operation.
ValueError: if the graph is not unique.
"""
if isinstance(tops, tf_ops.Graph):
return tops
if not is_iterable(tops):
raise TypeError("{} is not iterable".format(type(tops)))
if check_types is None:
check_types = (tf_ops.Operation, tf_ops.Tensor)
elif not is_iterable(check_types):
check_types = (check_types,)
g = None
for op in tops:
if not isinstance(op, check_types):
raise TypeError("Expected a type in ({}), got: {}".format(", ".join([str(
t) for t in check_types]), type(op)))
if g is None:
g = op.graph
elif g is not op.graph:
raise ValueError("Operation {} does not belong to given graph".format(op))
if g is None and not none_if_empty:
raise ValueError("Can't find the unique graph of an empty list")
return g
def make_list_of_op(ops, check_graph=True, allow_graph=True, ignore_ts=False):
"""Convert ops to a list of `tf.Operation`.
Args:
ops: can be an iterable of `tf.Operation`, a `tf.Graph` or a single
operation.
check_graph: if `True` check if all the operations belong to the same graph.
allow_graph: if `False` a `tf.Graph` cannot be converted.
ignore_ts: if True, silently ignore `tf.Tensor`.
Returns:
A newly created list of `tf.Operation`.
Raises:
TypeError: if ops cannot be converted to a list of `tf.Operation` or,
if `check_graph` is `True`, if all the ops do not belong to the
same graph.
"""
if isinstance(ops, tf_ops.Graph):
if allow_graph:
return ops.get_operations()
else:
raise TypeError("allow_graph is False: cannot convert a tf.Graph.")
else:
if not is_iterable(ops):
ops = [ops]
if not ops:
return []
if check_graph:
check_types = None if ignore_ts else tf_ops.Operation
get_unique_graph(ops, check_types=check_types)
return [op for op in ops if isinstance(op, tf_ops.Operation)]
# TODO(fkp): move this function in tf.Graph?
def get_tensors(graph):
"""get all the tensors which are input or output of an op in the graph.
Args:
graph: a `tf.Graph`.
Returns:
A list of `tf.Tensor`.
Raises:
TypeError: if graph is not a `tf.Graph`.
"""
if not isinstance(graph, tf_ops.Graph):
raise TypeError("Expected a graph, got: {}".format(type(graph)))
ts = []
for op in graph.get_operations():
ts += op.outputs
return ts
def make_list_of_t(ts, check_graph=True, allow_graph=True, ignore_ops=False):
"""Convert ts to a list of `tf.Tensor`.
Args:
ts: can be an iterable of `tf.Tensor`, a `tf.Graph` or a single tensor.
check_graph: if `True` check if all the tensors belong to the same graph.
allow_graph: if `False` a `tf.Graph` cannot be converted.
ignore_ops: if `True`, silently ignore `tf.Operation`.
Returns:
A newly created list of `tf.Tensor`.
Raises:
TypeError: if `ts` cannot be converted to a list of `tf.Tensor` or,
if `check_graph` is `True`, if all the ops do not belong to the same graph.
"""
if isinstance(ts, tf_ops.Graph):
if allow_graph:
return get_tensors(ts)
else:
raise TypeError("allow_graph is False: cannot convert a tf.Graph.")
else:
if not is_iterable(ts):
ts = [ts]
if not ts:
return []
if check_graph:
check_types = None if ignore_ops else tf_ops.Tensor
get_unique_graph(ts, check_types=check_types)
return [t for t in ts if isinstance(t, tf_ops.Tensor)]
def get_generating_ops(ts):
"""Return all the generating ops of the tensors in `ts`.
Args:
ts: a list of `tf.Tensor`
Returns:
A list of all the generating `tf.Operation` of the tensors in `ts`.
Raises:
TypeError: if `ts` cannot be converted to a list of `tf.Tensor`.
"""
ts = make_list_of_t(ts, allow_graph=False)
return [t.op for t in ts]
def get_consuming_ops(ts):
"""Return all the consuming ops of the tensors in ts.
Args:
ts: a list of `tf.Tensor`
Returns:
A list of all the consuming `tf.Operation` of the tensors in `ts`.
Raises:
TypeError: if ts cannot be converted to a list of `tf.Tensor`.
"""
ts = make_list_of_t(ts, allow_graph=False)
ops = []
for t in ts:
for op in t.consumers():
if op not in ops:
ops.append(op)
return ops
class ControlOutputs(object):
"""The control outputs topology."""
def __init__(self, graph):
"""Create a dictionary of control-output dependencies.
Args:
graph: a `tf.Graph`.
Returns:
A dictionary where a key is a `tf.Operation` instance and the
corresponding value is a list of all the ops which have the key
as one of their control-input dependencies.
Raises:
TypeError: graph is not a `tf.Graph`.
"""
if not isinstance(graph, tf_ops.Graph):
raise TypeError("Expected a tf.Graph, got: {}".format(type(graph)))
self._control_outputs = {}
self._graph = graph
self._version = None
self._build()
def update(self):
"""Update the control outputs if the graph has changed."""
if self._version != self._graph.version:
self._build()
return self
def _build(self):
"""Build the control outputs dictionary."""
self._control_outputs.clear()
ops = self._graph.get_operations()
for op in ops:
for control_input in op.control_inputs:
if control_input not in self._control_outputs:
self._control_outputs[control_input] = []
if op not in self._control_outputs[control_input]:
self._control_outputs[control_input].append(op)
self._version = self._graph.version
def get_all(self):
return self._control_outputs
def get(self, op):
"""return the control outputs of op."""
if op in self._control_outputs:
return self._control_outputs[op]
else:
return ()
@property
def graph(self):
return self._graph
def scope_finalize(scope):
if scope and scope[-1] != "/":
scope += "/"
return scope
def scope_dirname(scope):
slash = scope.rfind("/")
if slash == -1:
return ""
return scope[:slash + 1]
def scope_basename(scope):
slash = scope.rfind("/")
if slash == -1:
return scope
return scope[slash + 1:]
def placeholder_name(t=None, scope=None):
"""Create placeholder name for the graph editor.
Args:
t: optional tensor on which the placeholder operation's name will be based
on
scope: absolute scope with which to prefix the placeholder's name. None
means that the scope of t is preserved. "" means the root scope.
Returns:
A new placeholder name prefixed by "geph". Note that "geph" stands for
Graph Editor PlaceHolder. This convention allows to quickly identify the
placeholder generated by the Graph Editor.
Raises:
TypeError: if t is not None or a tf.Tensor.
"""
if scope is not None:
scope = scope_finalize(scope)
if t is not None:
if not isinstance(t, tf_ops.Tensor):
raise TypeError("Expected a tf.Tenfor, got: {}".format(type(t)))
op_dirname = scope_dirname(t.op.name)
op_basename = scope_basename(t.op.name)
if scope is None:
scope = op_dirname
if op_basename.startswith("geph__"):
ph_name = op_basename
else:
ph_name = "geph__{}_{}".format(op_basename, t.value_index)
return scope + ph_name
else:
if scope is None:
scope = ""
return scope + "geph"
def make_placeholder_from_tensor(t, scope=None):
"""Create a `tf.placeholder` for the Graph Editor.
Note that the correct graph scope must be set by the calling function.
Args:
t: a `tf.Tensor` whose name will be used to create the placeholder
(see function placeholder_name).
scope: absolute scope within which to create the placeholder. None
means that the scope of `t` is preserved. `""` means the root scope.
Returns:
A newly created `tf.placeholder`.
Raises:
TypeError: if `t` is not `None` or a `tf.Tensor`.
"""
return tf_array_ops.placeholder(
dtype=t.dtype, shape=t.get_shape(), name=placeholder_name(
t, scope=scope))
def make_placeholder_from_dtype_and_shape(dtype, shape=None, scope=None):
"""Create a tf.placeholder for the Graph Editor.
Note that the correct graph scope must be set by the calling function.
The placeholder is named using the function placeholder_name (with no
tensor argument).
Args:
dtype: the tensor type.
shape: the tensor shape (optional).
scope: absolute scope within which to create the placeholder. None
means that the scope of t is preserved. "" means the root scope.
Returns:
A newly created tf.placeholder.
"""
return tf_array_ops.placeholder(
dtype=dtype, shape=shape, name=placeholder_name(scope=scope))
_INTERNAL_VARIABLE_RE = re.compile(r"^__\w+__$")
def get_predefined_collection_names():
"""Return all the predefined collection names."""
return [getattr(tf_ops.GraphKeys, key) for key in dir(tf_ops.GraphKeys)
if not _INTERNAL_VARIABLE_RE.match(key)]
def find_corresponding_elem(target, dst_graph, dst_scope="", src_scope=""):
"""Find corresponding op/tensor in a different graph.
Args:
target: A `tf.Tensor` or a `tf.Operation` belonging to the original graph.
dst_graph: The graph in which the corresponding graph element must be found.
dst_scope: A scope which is prepended to the name to look for.
src_scope: A scope which is removed from the original of `target` name.
Returns:
The corresponding tf.Tensor` or a `tf.Operation`.
Raises:
ValueError: if `src_name` does not start with `src_scope`.
TypeError: if `target` is not a `tf.Tensor` or a `tf.Operation`
KeyError: If the corresponding graph element cannot be found.
"""
src_name = target.name
if src_scope:
src_scope = scope_finalize(src_scope)
if not src_name.startswidth(src_scope):
raise ValueError("{} does not start with {}".format(src_name, src_scope))
src_name = src_name[len(src_scope):]
dst_name = src_name
if dst_scope:
dst_scope = scope_finalize(dst_scope)
dst_name = dst_scope + dst_name
if isinstance(target, tf_ops.Tensor):
return dst_graph.get_tensor_by_name(dst_name)
if isinstance(target, tf_ops.Operation):
return dst_graph.get_operation_by_name(dst_name)
raise TypeError("Expected tf.Tensor or tf.Operation, got: {}", type(target))
def find_corresponding(targets, dst_graph, dst_scope="", src_scope=""):
"""Find corresponding ops/tensors in a different graph.
`targets` is a Python tree, that is, a nested structure of iterable
(list, tupple, dictionary) whose leaves are instances of
`tf.Tensor` or `tf.Operation`
Args:
targets: A Python tree containing `tf.Tensor` or `tf.Operation`
belonging to the original graph.
dst_graph: The graph in which the corresponding graph element must be found.
dst_scope: A scope which is prepended to the name to look for.
src_scope: A scope which is removed from the original of `top` name.
Returns:
A Python tree containin the corresponding tf.Tensor` or a `tf.Operation`.
Raises:
ValueError: if `src_name` does not start with `src_scope`.
TypeError: if `top` is not a `tf.Tensor` or a `tf.Operation`
KeyError: If the corresponding graph element cannot be found.
"""
def func(top):
return find_corresponding_elem(top, dst_graph, dst_scope, src_scope)
return transform_tree(targets, func)
| |
#! /usr/bin/env python
#adam-example#ipython -i Plot_Light_Cutter.py /u/ki/awright/data/10_2_DARK/DARK/DARK_*.fits
import pyfits
from matplotlib.pylab import *
import sys
sys.path.append('/u/ki/awright/InstallingSoftware/pythons')
from fitter import Gauss
from UsefulTools import names, FromPick_data_true, FromPick_data_spots, GetMiddle, GetSpots_bins_values, ShortFileString
import imagetools
GetCCD=imagetools.GetCCD
Nsub_to_Npy=imagetools.Nsub_to_Npy
from mpl_toolkits.axes_grid1 import ImageGrid
files=imagetools.ArgCleaner()
# files in /u/ki/awright/data/2010-02-12_W-C-IC/DARK
files=imagetools.OrderFiles(files)
fl=files[0]
if 'DARK' in fl:
lightbins=linspace(-15,15,150)
xlims=-9,16
darkmode=True
else:
lightbins=linspace(0,2,200)
xlims=.3,1.3
darkmode=False
#set values
count_thresh=100
widen=0.0
#widen=.5
#####tag='10_2_DARK'
tag='10_3_DARK_newlims'
f=figure(figsize=(13,8.5))
uplims=zeros((10,))
lowlims=zeros((10,))
for fl in files:
fit=pyfits.open(fl)
data=fit[0].data
CCDnum=GetCCD(fl)
npy=Nsub_to_Npy(CCDnum)
ax=f.add_subplot(2,5,npy)
light=data.reshape((-1,))
x,bins,patch=hist(light,bins=lightbins,log=True)
fitinst=Gauss(GetMiddle(bins),x,threshold=.001)
xx,yy=fitinst.getfitline()
plot(xx,yy,'green')
sigma=fitinst.sigma
mean=fitinst.mean
title('CCD '+str(CCDnum)+' mean='+str(round(mean,2)))
#lowlim=mean-5*sigma
#uplim=mean+5*sigma
#plot([lowlim,lowlim],[1,max(x)],'green')
#plot([uplim,uplim],[1,max(x)],'green')
countup=cumsum(x<count_thresh)
counter=bincount(countup)
start_spot=sum(counter[:counter.argmax()])+1
end_spot=sum(counter[:counter.argmax()+1])
lowlim=bins[start_spot]
uplim=bins[end_spot]
#compare uplim to 4 sigma
sig5=mean+5*sigma
plot([sig5,sig5],[1,max(x)],'orange')
lowlims[CCDnum-1]=lowlim
#uplims[CCDnum-1]=min(uplim,sig5) #this is for 10_2_DARK and 10_3_DARKestablished
uplims[CCDnum-1]=max(uplim,sig5) #this is for 10_2_DARK_newlims
if sig5 < uplim:
text( sig5, max(x),'5sigma<uplim')
plot([lowlim,lowlim],[1,max(x)],'r')
plot([uplim,uplim],[1,max(x)],'r')
Ncut=sum((light<lowlim)+(light>uplim))
xlim(xlims[0],xlims[1])
ylim(1,10**7)
text(xlim()[0],10**6.7,'%cut='+str(round(100*Ncut/float(light.size),6)))
fit.close()
#now pick the spots for each one (see if there are flyers too)
line1and2='./create_global_weights_flags_para.sh ${SUBARUDIR}/${run}_${filter} BASE_WEIGHT '
middle=' DARK '
savefig('plt'+tag+'_ImageFitLimit_Hists')
#Plot the things to get an idea of limits
lims=zip(lowlims, uplims)
f=figure(figsize=(13,8.5))
suptitle('The BASE_WEIGHT Images')
for lim,fl in zip(lims,files):
CCDnum=GetCCD(fl)
npy=Nsub_to_Npy(CCDnum)
fit=pyfits.open(fl)
data=fit[0].data
ax=f.add_subplot(2,5,npy)
title('CCD '+str(CCDnum)+'\nlims: '+str(round(lim[0],2))+' '+str(round(lim[1],2)))
bins=linspace(lim[0],lim[1],50) #append([-inf],linspace(lim[0],lim[1],50))
hister=asarray(digitize(data.flatten(),bins=bins)-1,dtype='float32')
hister[hister==-1]=nan
hister[hister==len(bins)-1]=nan
imshow(hister.reshape(data.shape),interpolation='nearest',origin='lower left')
cbar=colorbar()
cdn,cup=int(cbar.get_clim()[0]),int(cbar.get_clim()[1])
if cdn<0:cdn=0
cbar.set_ticks(range(cdn,cup+1,5))
cbar.set_ticklabels([round(bins[i],2) for i in range(cdn,cup+1,5)])
ax.set_yticklabels([]);ax.set_xticklabels([]);ax.set_xticks([]);ax.set_yticks([])
fit.close()
savefig('plt'+tag+'_ImageFitLimit_CCDs')
#Now plot the limits
#lims=[(, ),
#(, ),
#(, ),
#(, ),
#(, ),
#(, ),
#(, ),
#(, ),
#(, ),
#(, )]
lims={}
#below are lims for 2011-01-06_W-S-Z+
lims['2011-01-06_W-S-Z+']=[(.585,.91 ),(.78,1.01 ),(.86,1.042 ),(.8,1.025 ),(.425, .95),(.405, .985),(.795, 1.05),(.86,1.07 ),(.63, .94),(.34, .93)]
#below are the limits for 2011-01-06_W-C-IC
lims['2011-01-06_W-C-IC']=[(.58,.96),(.78,1.0),(.86,1.04 ),(.78,1.01 ),(.36,.94 ),(.55,1.03 ),(.85,1.05 ),(.90,1.07 ),(.65,.95 ),(.38,.92 )]
#below are the limits for 2010-04-15_W-S-I+
lims['2010-04-15_W-S-I+']=[(.5,.93),(.83,1.01),(.87, 1.05),(.82, 1.0),(.46, .97),(.45, 1.0),(.84, 1.04),(.89, 1.07),(.69, .95),(.49, .95)]
#below are the limits for 2010-04-15_W-S-G+
lims['2010-04-15_W-S-G+']=[(.46,.94 ), (.81,1.01 ), (.83, 1.06), (.8,1.03 ), (.43, .99), (.42, 1.01), (.82, 1.05), (.9, 1.1), (.68, .98), (.47, .98)]
#below are the limits for 2010-02-12_W-C-IC
lims['2010-02-12_W-C-IC']=[(.48,.91),(.82, 1.01),(.87, 1.05),(.82, 1.02),(.48, .97),(.43, .99),(.83, 1.05),(.89, 1.08),(.70, .99),(.5, .95)]
#below are the limits for 2010-02-12_W-C-RC
lims['2010-02-12_W-C-RC']=[(0.48, 0.91), (0.82, 1.01), (0.86, 1.06), (0.81, 1.022), (0.47, 1.01), (0.42, .99), (0.8, 1.05), (0.9, 1.09), (0.71, .97), (0.5, 0.95)]
#below are the limits for 2010-02-12_W-J-B
lims['2010-02-12_W-J-B']=[(0.47, 0.93), (0.81, 1.03), (0.84, 1.07), (0.73, 1.04), (0.45, 1.02), (0.41, 1.03), (0.78, 1.06), (0.85, 1.11), (0.69, 1.01),(0.5, 0.99)]
#below are the limits for 2010-02-12_W-J-V
lims['2010-02-12_W-J-V']=[(.49,.91),(.82, 1.02),(.85, 1.06),(.81, 1.02),(.47, .98),(.44, .99),(.85, 1.05), (.90, 1.10), (.72, 1.00), (.51, .97)]
#these are the uniform wide limits used for the 10_3 config
lims['WideLims']=zip(-.04+array([ 0.46 , 0.78 , 0.83 , 0.73 , 0.36 , 0.405, 0.78 , 0.85 ,0.63 , 0.34 ]),.08+array([ 0.96, 1.03, 1.07, 1.04, 1.02, 1.03, 1.06, 1.11, 1.01, 0.99]))
if darkmode:
dark_lims=zip(-widen+lowlims,widen+uplims)
base_weight_lims=zip(-.04+array([ 0.46 , 0.78 , 0.83 , 0.73 , 0.36 , 0.405, 0.78 , 0.85 ,0.63 , 0.34 ]),.08+array([ 0.96, 1.03, 1.07, 1.04, 1.02, 1.03, 1.06, 1.11, 1.01, 0.99])) #these are the uniform wide limits used for the 10_3 config
base_weight_lims=zip(-.24+array([ 0.46 , 0.78 , 0.83 , 0.73 , 0.36 , 0.405, 0.78 , 0.85 ,0.63 , 0.34 ]),.28+array([ 0.96, 1.03, 1.07, 1.04, 1.02, 1.03, 1.06, 1.11, 1.01, 0.99])) #going extra wide for 10_2 because this only matters if the cuts are too tight, and I don't feel like really caring about this
else:
dark_lims=[(-1.73, 4.15), (-1.88, 4.6), (-1.88 ,4.9), (-1.88, 5.35), (-2.94, 5.95), (-1.88, 5.5), (-1.73, 4.75), (-2.04, 4.75), (-1.43, 4.6), (-1.58, 4.75)] #this is dark lims for 10_3
base_weight_lims=zip(lowlims, uplims)
print 'lowlim highlim ccd#'
scalecheck=''
f=figure(figsize=(13,8.5))
#suptitle('Chips Cut Out Shown In Black')
for bwlim,fl,darklim in zip(base_weight_lims,files,dark_lims):
if darkmode: lim=darklim
else: lim=bwlim
CCDnum=GetCCD(fl)
npy=Nsub_to_Npy(CCDnum)
fit=pyfits.open(fl)
data=fit[0].data
#ar=zeros(data.shape)
endline3 = str(round(bwlim[0],2))+' '+str(round(bwlim[1],2))
dark = str(round(darklim[0],2))+' '+str(round(darklim[1],2))
end=' '+str(CCDnum)
dn,up=lim[0],lim[1]
toolow=data<dn
toohigh=data>up
cutout=toohigh+toolow
ax=f.add_subplot(2,5,npy)
imshow(cutout,cmap='Greys',interpolation='nearest',origin='lower left')
ax.set_yticklabels([]);ax.set_xticklabels([]);ax.set_xticks([]);ax.set_yticks([])
print line1and2+endline3+middle+dark+end
scalecheck+= 'ds9 '+fl+' -scale limits '+str(round(lim[0],2))+' '+str(round(lim[0],2))+' -view layout vertical -geometry 650x1600 -zoom to fit &\n'
scalecheck+= 'ds9 '+fl+' -scale limits '+str(round(lim[1],2))+' '+str(round(lim[1],2))+' -view layout vertical -geometry 650x1600 -zoom to fit &\n'
fit.close()
print scalecheck
f.subplots_adjust(hspace=0, top=.99, right=.99, left=.01, bottom=.01)
f.subplots_adjust(wspace=-.2)
savefig('plt'+tag+'_FinalLimit_ds9')
f=figure(figsize=(13,8.5))
for bwlim,fl,darklim in zip(base_weight_lims,files,dark_lims):
if darkmode: lim=darklim
else: lim=bwlim
CCDnum=GetCCD(fl)
npy=Nsub_to_Npy(CCDnum)
fit=pyfits.open(fl)
data=fit[0].data
lowlim,uplim=lim
npy=Nsub_to_Npy(CCDnum)
ax=f.add_subplot(2,5,npy)
light=data.reshape((-1,))
x,bins,patch=hist(light,bins=lightbins,log=True) ##may need limits changed (DARK=-15,15)
title('CCD '+str(CCDnum))
plot([lowlim,lowlim],[1,max(x)],'r')
plot([uplim,uplim],[1,max(x)],'r')
Ncut=sum((light<lowlim)+(light>uplim))
xlim(xlims[0],xlims[1])
ylim(1,10**7)
text(xlim()[0],10**6.7,'%cut='+str(round(100*Ncut/float(light.size),6)))
fit.close()
#now pick the spots for each one (see if there are flyers too)
#f.subplots_adjust(hspace=0, top=.99, right=.99, left=.01, bottom=.01)
savefig('plt'+tag+'_FinalLimit_Hists')
| |
"""Test cases and utilities for hs_core module. See also ./tests folder."""
from dateutil import parser
import tempfile
import os
from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.messages.storage.fallback import FallbackStorage
from django.test import TestCase, RequestFactory
from hs_core.models import ResourceFile
from hs_core.hydroshare import add_resource_files
from hs_core.views.utils import create_folder, move_or_rename_file_or_folder, zip_folder, \
unzip_file, remove_folder
from hs_core.views.utils import run_ssh_command
from theme.models import UserProfile
from django_irods.icommands import SessionException
from django_irods.storage import IrodsStorage
class MockIRODSTestCaseMixin(object):
"""Mix in to allow for mock iRODS testing."""
def setUp(self):
"""Set up iRODS patchers for testing of data bags, etc."""
super(MockIRODSTestCaseMixin, self).setUp()
# only mock up testing iRODS operations when local iRODS container is not used
if settings.IRODS_HOST != 'data.local.org':
from mock import patch
self.irods_patchers = (
patch("hs_core.hydroshare.hs_bagit.delete_files_and_bag"),
patch("hs_core.hydroshare.hs_bagit.create_bag"),
patch("hs_core.hydroshare.hs_bagit.create_bag_files"),
patch("hs_core.tasks.create_bag_by_irods"),
patch("hs_core.hydroshare.utils.copy_resource_files_and_AVUs"),
)
for patcher in self.irods_patchers:
patcher.start()
def tearDown(self):
"""Stop iRODS patchers."""
if settings.IRODS_HOST != 'data.local.org':
for patcher in self.irods_patchers:
patcher.stop()
super(MockIRODSTestCaseMixin, self).tearDown()
class TestCaseCommonUtilities(object):
"""Enable common utilities for iRODS testing."""
def assert_federated_irods_available(self):
"""assert federated iRODS is available before proceeding with federation-related tests."""
self.assertTrue(settings.REMOTE_USE_IRODS and
settings.HS_USER_ZONE_HOST == 'users.local.org' and
settings.IRODS_HOST == 'data.local.org',
"irods docker containers are not set up properly for federation testing")
self.irods_fed_storage = IrodsStorage('federated')
self.irods_storage = IrodsStorage()
def create_irods_user_in_user_zone(self):
"""Create corresponding irods account in user zone."""
try:
exec_cmd = "{0} {1} {2}".format(settings.LINUX_ADMIN_USER_CREATE_USER_IN_USER_ZONE_CMD,
self.user.username, self.user.username)
output = run_ssh_command(host=settings.HS_USER_ZONE_HOST,
uname=settings.LINUX_ADMIN_USER_FOR_HS_USER_ZONE,
pwd=settings.LINUX_ADMIN_USER_PWD_FOR_HS_USER_ZONE,
exec_cmd=exec_cmd)
for out_str in output:
if 'ERROR:' in out_str.upper():
# irods account failed to create
self.assertRaises(SessionException(-1, out_str, out_str))
user_profile = UserProfile.objects.filter(user=self.user).first()
user_profile.create_irods_user_account = True
user_profile.save()
except Exception as ex:
self.assertRaises(SessionException(-1, str(ex), str(ex)))
def delete_irods_user_in_user_zone(self):
"""Delete irods test user in user zone."""
try:
exec_cmd = "{0} {1}".format(settings.LINUX_ADMIN_USER_DELETE_USER_IN_USER_ZONE_CMD,
self.user.username)
output = run_ssh_command(host=settings.HS_USER_ZONE_HOST,
uname=settings.LINUX_ADMIN_USER_FOR_HS_USER_ZONE,
pwd=settings.LINUX_ADMIN_USER_PWD_FOR_HS_USER_ZONE,
exec_cmd=exec_cmd)
if output:
for out_str in output:
if 'ERROR:' in out_str.upper():
# there is an error from icommand run, report the error
self.assertRaises(SessionException(-1, out_str, out_str))
user_profile = UserProfile.objects.filter(user=self.user).first()
user_profile.create_irods_user_account = False
user_profile.save()
except Exception as ex:
# there is an error from icommand run, report the error
self.assertRaises(SessionException(-1, str(ex), str(ex)))
def save_files_to_user_zone(self, file_name_to_target_name_dict):
"""Save a list of files to iRODS user zone.
:param file_name_to_target_name_dict: a dictionary in the form of {ori_file, target_file}
where ori_file is the file to be save to, and the target_file is the full path file name
in iRODS user zone to save ori_file to
:return:
"""
for file_name, target_name in list(file_name_to_target_name_dict.items()):
self.irods_fed_storage.saveFile(file_name, target_name)
def check_file_exist(self, irods_path):
"""Check whether the input irods_path exist in iRODS.
:param irods_path: the iRODS path to check whether it exists or not
:return: True if exist, False otherwise.
"""
return self.irods_storage.exists(irods_path)
def delete_directory(self, irods_path):
"""delete the input irods_path.
:param irods_path: the iRODS path to be deleted
:return:
"""
self.irods_fed_storage.delete(irods_path)
def verify_user_quota_usage_avu_in_user_zone(self, attname, qsize):
'''
Have to use LINUX_ADMIN_USER_FOR_HS_USER_ZONE with rodsadmin role to get user type AVU
in user zone and verify its quota usage is set correctly
:param attname: quota usage attribute name set on iRODS proxy user in user zone
:param qsize: quota size (type string) to be verified to equal to the value set for attname.
'''
istorage = IrodsStorage()
istorage.set_user_session(username=settings.LINUX_ADMIN_USER_FOR_HS_USER_ZONE,
password=settings.LINUX_ADMIN_USER_PWD_FOR_HS_USER_ZONE,
host=settings.HS_USER_ZONE_HOST,
port=settings.IRODS_PORT,
zone=settings.HS_USER_IRODS_ZONE,
sess_id='user_proxy_session')
uz_bagit_path = os.path.join('/', settings.HS_USER_IRODS_ZONE, 'home',
settings.HS_IRODS_PROXY_USER_IN_USER_ZONE,
settings.IRODS_BAGIT_PATH)
get_qsize = istorage.getAVU(uz_bagit_path, attname)
self.assertEqual(qsize, get_qsize)
def resource_file_oprs(self):
"""Test common iRODS file operations.
This is a common test utility function to be called by both regular folder operation
testing and federated zone folder operation testing.
Make sure the calling TestCase object has the following attributes defined before calling
this method:
self.res: resource that has been created that contains files listed in file_name_list
self.user: owner of the resource
self.file_name_list: a list of three file names that have been added to the res object
self.test_file_1 needs to be present for the calling object for doing regular folder
operations without involving federated zone so that the same opened file can be re-added
to the resource for testing the case where zipping cannot overwrite existing file
"""
user = self.user
res = self.res
file_name_list = self.file_name_list
# create a folder, if folder is created successfully, no exception is raised, otherwise,
# an iRODS exception will be raised which will be caught by the test runner and mark as
# a test failure
create_folder(res.short_id, 'data/contents/sub_test_dir')
istorage = res.get_irods_storage()
res_path = res.file_path
store = istorage.listdir(res_path)
self.assertIn('sub_test_dir', store[0], msg='resource does not contain created sub-folder')
# create a temporary zips folder to make sure no duplicate folders are returned from listdir()
zip_res_coll_path = os.path.join('zips', '2020-02-03', res.short_id, 'data', 'contents', 'sub_test_dir')
istorage.session.run("imkdir", None, '-p', zip_res_coll_path)
store = istorage.listdir(res_path)
self.assertEqual(store[0].count('sub_test_dir'), 1, msg='duplicate folder: sub_test_dir occurred more '
'than once')
# rename the third file in file_name_list
move_or_rename_file_or_folder(user, res.short_id,
'data/contents/' + file_name_list[2],
'data/contents/new_' + file_name_list[2])
# move the first two files in file_name_list to the new folder
move_or_rename_file_or_folder(user, res.short_id,
'data/contents/' + file_name_list[0],
'data/contents/sub_test_dir/' + file_name_list[0])
move_or_rename_file_or_folder(user, res.short_id,
'data/contents/' + file_name_list[1],
'data/contents/sub_test_dir/' + file_name_list[1])
updated_res_file_names = []
for rf in ResourceFile.objects.filter(object_id=res.id):
updated_res_file_names.append(rf.short_path)
self.assertIn('new_' + file_name_list[2], updated_res_file_names,
msg="resource does not contain the updated file new_" + file_name_list[2])
self.assertNotIn(file_name_list[2], updated_res_file_names,
msg='resource still contains the old file ' + file_name_list[2] +
' after renaming')
self.assertIn('sub_test_dir/' + file_name_list[0], updated_res_file_names,
msg='resource does not contain ' + file_name_list[0] + ' moved to a folder')
self.assertNotIn(file_name_list[0], updated_res_file_names,
msg='resource still contains the old ' + file_name_list[0] +
'after moving to a folder')
self.assertIn('sub_test_dir/' + file_name_list[1], updated_res_file_names,
msg='resource does not contain ' + file_name_list[1] +
'moved to a new folder')
self.assertNotIn(file_name_list[1], updated_res_file_names,
msg='resource still contains the old ' + file_name_list[1] +
' after moving to a folder')
# zip the folder
output_zip_fname, size = \
zip_folder(user, res.short_id, 'data/contents/sub_test_dir',
'sub_test_dir.zip', True)
self.assertGreater(size, 0, msg='zipped file has a size of 0')
# Now resource should contain only two files: new_file3.txt and sub_test_dir.zip
# since the folder is zipped into sub_test_dir.zip with the folder deleted
self.assertEqual(res.files.all().count(), 2,
msg="resource file count didn't match-")
# test unzip does not allow override of existing files
# add an existing file in the zip to the resource
if res.resource_federation_path:
fed_test_file1_full_path = '/{zone}/home/{uname}/{fname}'.format(
zone=settings.HS_USER_IRODS_ZONE, uname=user.username, fname=file_name_list[0])
# TODO: why isn't this a method of resource?
# TODO: Why do we repeat the resource_federation_path?
add_resource_files(res.short_id, source_names=[fed_test_file1_full_path],
move=False)
else:
# TODO: Why isn't this a method of resource?
add_resource_files(res.short_id, self.test_file_1)
# TODO: use ResourceFile.create_folder, which doesn't require data/contents prefix
create_folder(res.short_id, 'data/contents/sub_test_dir')
# TODO: use ResourceFile.rename, which doesn't require data/contents prefix
move_or_rename_file_or_folder(user, res.short_id,
'data/contents/' + file_name_list[0],
'data/contents/sub_test_dir/' + file_name_list[0])
# Now resource should contain three files: file3_new.txt, sub_test_dir.zip, and file1.txt
self.assertEqual(res.files.all().count(), 3, msg="resource file count didn't match")
unzip_file(user, res.short_id, 'data/contents/sub_test_dir.zip', False)
# Resource should still contain 5 files: file3_new.txt (2), sub_test_dir.zip,
# and file1.txt (2)
file_cnt = res.files.all().count()
self.assertEqual(file_cnt, 5, msg="resource file count didn't match - " +
str(file_cnt) + " != 5")
# remove all files except the zippped file
remove_folder(user, res.short_id, 'data/contents/sub_test_dir')
remove_folder(user, res.short_id, 'data/contents/sub_test_dir-1')
# Now resource should contain two files: file3_new.txt sub_test_dir.zip
file_cnt = res.files.all().count()
self.assertEqual(file_cnt, 2, msg="resource file count didn't match - " +
str(file_cnt) + " != 2")
unzip_file(user, res.short_id, 'data/contents/sub_test_dir.zip', True)
# Now resource should contain three files: file1.txt, file2.txt, and file3_new.txt
self.assertEqual(res.files.all().count(), 3, msg="resource file count didn't match")
updated_res_file_names = []
for rf in ResourceFile.objects.filter(object_id=res.id):
updated_res_file_names.append(rf.short_path)
self.assertNotIn('sub_test_dir.zip', updated_res_file_names,
msg="resource still contains the zip file after unzipping")
self.assertIn('sub_test_dir/sub_test_dir/' + file_name_list[0], updated_res_file_names,
msg='resource does not contain unzipped file ' + file_name_list[0])
self.assertIn('sub_test_dir/sub_test_dir/' + file_name_list[1], updated_res_file_names,
msg='resource does not contain unzipped file ' + file_name_list[1])
self.assertIn('new_' + file_name_list[2], updated_res_file_names,
msg='resource does not contain unzipped file new_' + file_name_list[2])
# rename a folder
move_or_rename_file_or_folder(user, res.short_id,
'data/contents/sub_test_dir/sub_test_dir',
'data/contents/sub_dir')
updated_res_file_names = []
for rf in ResourceFile.objects.filter(object_id=res.id):
updated_res_file_names.append(rf.short_path)
self.assertNotIn('sub_test_dir/sub_test_dir/' + file_name_list[0], updated_res_file_names,
msg='resource still contains ' + file_name_list[0] +
' in the old folder after renaming')
self.assertIn('sub_dir/' + file_name_list[0], updated_res_file_names,
msg='resource does not contain ' + file_name_list[0] +
' in the new folder after renaming')
self.assertNotIn('sub_test_dir/sub_test_dir/' + file_name_list[1], updated_res_file_names,
msg='resource still contains ' + file_name_list[1] +
' in the old folder after renaming')
self.assertIn('sub_dir/' + file_name_list[1], updated_res_file_names,
msg='resource does not contain ' + file_name_list[1] +
' in the new folder after renaming')
# remove a folder
# TODO: utilize ResourceFile.remove_folder instead. Takes a short path.
remove_folder(user, res.short_id, 'data/contents/sub_dir')
# Now resource only contains one file
self.assertEqual(res.files.all().count(), 1, msg="resource file count didn't match")
updated_res_file_names = []
for rf in ResourceFile.objects.filter(object_id=res.id):
updated_res_file_names.append(rf.short_path)
self.assertEqual(len(updated_res_file_names), 1)
self.assertEqual(updated_res_file_names[0], 'new_' + file_name_list[2])
def raster_metadata_extraction(self):
"""Test raster metadata extraction.
This is a common test utility function to be called by both regular raster metadata
extraction testing and federated zone raster metadata extraction testing.
Make sure the calling TestCase object has self.resRaster attribute defined before calling
this method which is the raster resource that has been created containing valid raster
files.
"""
# there should be 2 content files
self.assertEqual(self.resRaster.files.all().count(), 2)
# test core metadata after metadata extraction
extracted_title = "My Test Raster Resource"
self.assertEqual(self.resRaster.metadata.title.value, extracted_title)
# there should be 1 creator
self.assertEqual(self.resRaster.metadata.creators.all().count(), 1)
# there should be 1 coverage element - box type
self.assertEqual(self.resRaster.metadata.coverages.all().count(), 1)
self.assertEqual(self.resRaster.metadata.coverages.all().filter(type='box').count(), 1)
box_coverage = self.resRaster.metadata.coverages.all().filter(type='box').first()
self.assertEqual(box_coverage.value['projection'], 'WGS 84 EPSG:4326')
self.assertEqual(box_coverage.value['units'], 'Decimal degrees')
self.assertEqual(float(box_coverage.value['northlimit']), 42.11270614966863)
self.assertEqual(float(box_coverage.value['eastlimit']), -111.45699925047542)
self.assertEqual(float(box_coverage.value['southlimit']), 41.66222054591102)
self.assertEqual(float(box_coverage.value['westlimit']), -111.81761887121905)
# there should be 2 format elements
self.assertEqual(self.resRaster.metadata.formats.all().count(), 2)
self.assertEqual(self.resRaster.metadata.formats.all().filter(
value='application/vrt').count(), 1)
self.assertEqual(self.resRaster.metadata.formats.all().filter(
value='image/tiff').count(), 1)
# testing extended metadata element: original coverage
ori_coverage = self.resRaster.metadata.originalCoverage
self.assertNotEqual(ori_coverage, None)
self.assertEqual(float(ori_coverage.value['northlimit']), 4662392.446916306)
self.assertEqual(float(ori_coverage.value['eastlimit']), 461954.01909127034)
self.assertEqual(float(ori_coverage.value['southlimit']), 4612592.446916306)
self.assertEqual(float(ori_coverage.value['westlimit']), 432404.01909127034)
self.assertEqual(ori_coverage.value['units'], 'meter')
self.assertEqual(ori_coverage.value['projection'], "NAD83 / UTM zone 12N")
self.assertEqual(ori_coverage.value['datum'], "North_American_Datum_1983")
projection_string = 'PROJCS["NAD83 / UTM zone 12N",GEOGCS["NAD83",' \
'DATUM["North_American_Datum_1983",' \
'SPHEROID["GRS 1980",6378137,298.257222101,' \
'AUTHORITY["EPSG","7019"]],' \
'TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6269"]],' \
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],' \
'UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],' \
'AUTHORITY["EPSG","4269"]],PROJECTION["Transverse_Mercator"],' \
'PARAMETER["latitude_of_origin",0],' \
'PARAMETER["central_meridian",-111],' \
'PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],' \
'PARAMETER["false_northing",0],' \
'UNIT["metre",1,AUTHORITY["EPSG","9001"]],' \
'AXIS["Easting",EAST],AXIS["Northing",' \
'NORTH],AUTHORITY["EPSG","26912"]]'
self.assertEqual(ori_coverage.value['projection_string'], projection_string)
# testing extended metadata element: cell information
cell_info = self.resRaster.metadata.cellInformation
self.assertEqual(cell_info.rows, 1660)
self.assertEqual(cell_info.columns, 985)
self.assertEqual(cell_info.cellSizeXValue, 30.0)
self.assertEqual(cell_info.cellSizeYValue, 30.0)
self.assertEqual(cell_info.cellDataType, 'Float32')
# testing extended metadata element: band information
self.assertEqual(self.resRaster.metadata.bandInformations.count(), 1)
band_info = self.resRaster.metadata.bandInformations.first()
self.assertEqual(band_info.noDataValue, '-3.4028234663852886e+38')
self.assertEqual(band_info.maximumValue, '3031.443115234375')
self.assertEqual(band_info.minimumValue, '1358.3345947265625')
def netcdf_metadata_extraction(self, expected_creators_count=1):
"""Test NetCDF metadata extraction.
This is a common test utility function to be called by both regular netcdf metadata
extraction testing and federated zone netCDF metadata extraction testing.
Make sure the calling TestCase object has self.resNetcdf attribute defined before calling
this method which is the netCDF resource that has been created containing valid netCDF
files.
"""
# there should 2 content file
self.assertEqual(self.resNetcdf.files.all().count(), 2)
# test core metadata after metadata extraction
extracted_title = "Snow water equivalent estimation at TWDEF site from " \
"Oct 2009 to June 2010"
self.assertEqual(self.resNetcdf.metadata.title.value, extracted_title)
# there should be an abstract element
self.assertNotEqual(self.resNetcdf.metadata.description, None)
extracted_abstract = "This netCDF data is the simulation output from Utah Energy " \
"Balance (UEB) model.It includes the simulation result " \
"of snow water equivalent during the period " \
"Oct. 2009 to June 2010 for TWDEF site in Utah."
self.assertEqual(self.resNetcdf.metadata.description.abstract, extracted_abstract)
# there should be one relation element of type 'source'
self.assertEqual(self.resNetcdf.metadata.relations.filter(type='source').count(), 1)
# there should be one license element:
self.assertNotEqual(self.resNetcdf.metadata.rights.statement, 1)
# there should be one relation element
self.assertEqual(self.resNetcdf.metadata.relations.all().filter(type='references').count(), 1)
# there should be creators equal to expected_creators_count
self.assertEqual(self.resNetcdf.metadata.creators.all().count(), expected_creators_count)
# there should be one contributor
self.assertEqual(self.resNetcdf.metadata.contributors.all().count(), 1)
# there should be 2 coverage element - box type and period type
self.assertEqual(self.resNetcdf.metadata.coverages.all().count(), 2)
self.assertEqual(self.resNetcdf.metadata.coverages.all().filter(type='box').count(), 1)
self.assertEqual(self.resNetcdf.metadata.coverages.all().filter(type='period').count(), 1)
box_coverage = self.resNetcdf.metadata.coverages.all().filter(type='box').first()
self.assertEqual(box_coverage.value['projection'], 'WGS 84 EPSG:4326')
self.assertEqual(box_coverage.value['units'], 'Decimal degrees')
self.assertEqual(float(box_coverage.value['northlimit']), 41.86712640899591)
self.assertEqual(float(box_coverage.value['eastlimit']), -111.50594036845686)
self.assertEqual(float(box_coverage.value['southlimit']), 41.8639080745171)
self.assertEqual(float(box_coverage.value['westlimit']), -111.51138807956221)
temporal_coverage = self.resNetcdf.metadata.coverages.all().filter(type='period').first()
self.assertEqual(parser.parse(temporal_coverage.value['start']).date(),
parser.parse('10/01/2009').date())
self.assertEqual(parser.parse(temporal_coverage.value['end']).date(),
parser.parse('05/30/2010').date())
# there should be 2 format elements
self.assertEqual(self.resNetcdf.metadata.formats.all().count(), 2)
self.assertEqual(self.resNetcdf.metadata.formats.all().
filter(value='text/plain').count(), 1)
self.assertEqual(self.resNetcdf.metadata.formats.all().
filter(value='application/x-netcdf').count(), 1)
# there should be one subject element
self.assertEqual(self.resNetcdf.metadata.subjects.all().count(), 1)
subj_element = self.resNetcdf.metadata.subjects.all().first()
self.assertEqual(subj_element.value, 'Snow water equivalent')
# testing extended metadata element: original coverage
ori_coverage = self.resNetcdf.metadata.ori_coverage.all().first()
self.assertNotEqual(ori_coverage, None)
self.assertEqual(ori_coverage.projection_string_type, 'Proj4 String')
proj_text = '+proj=tmerc +y_0=0.0 +x_0=500000.0 +k_0=0.9996 +lat_0=0.0 +lon_0=-111.0'
self.assertEqual(ori_coverage.projection_string_text, proj_text)
self.assertEqual(float(ori_coverage.value['northlimit']), 4.63515e+06)
self.assertEqual(float(ori_coverage.value['eastlimit']), 458010.0)
self.assertEqual(float(ori_coverage.value['southlimit']), 4.63479e+06)
self.assertEqual(float(ori_coverage.value['westlimit']), 457560.0)
self.assertEqual(ori_coverage.value['units'], 'Meter')
self.assertEqual(ori_coverage.value['projection'], 'transverse_mercator')
# testing extended metadata element: variables
self.assertEqual(self.resNetcdf.metadata.variables.all().count(), 5)
# test time variable
var_time = self.resNetcdf.metadata.variables.all().filter(name='time').first()
self.assertNotEqual(var_time, None)
self.assertEqual(var_time.unit, 'hours since 2009-10-1 0:0:00 UTC')
self.assertEqual(var_time.type, 'Float')
self.assertEqual(var_time.shape, 'time')
self.assertEqual(var_time.descriptive_name, 'time')
# test x variable
var_x = self.resNetcdf.metadata.variables.all().filter(name='x').first()
self.assertNotEqual(var_x, None)
self.assertEqual(var_x.unit, 'Meter')
self.assertEqual(var_x.type, 'Float')
self.assertEqual(var_x.shape, 'x')
self.assertEqual(var_x.descriptive_name, 'x coordinate of projection')
# test y variable
var_y = self.resNetcdf.metadata.variables.all().filter(name='y').first()
self.assertNotEqual(var_y, None)
self.assertEqual(var_y.unit, 'Meter')
self.assertEqual(var_y.type, 'Float')
self.assertEqual(var_y.shape, 'y')
self.assertEqual(var_y.descriptive_name, 'y coordinate of projection')
# test SWE variable
var_swe = self.resNetcdf.metadata.variables.all().filter(name='SWE').first()
self.assertNotEqual(var_swe, None)
self.assertEqual(var_swe.unit, 'm')
self.assertEqual(var_swe.type, 'Float')
self.assertEqual(var_swe.shape, 'y,x,time')
self.assertEqual(var_swe.descriptive_name, 'Snow water equivalent')
self.assertEqual(var_swe.method, 'model simulation of UEB model')
self.assertEqual(var_swe.missing_value, '-9999')
# test grid mapping variable
var_grid = self.resNetcdf.metadata.variables.all().\
filter(name='transverse_mercator').first()
self.assertNotEqual(var_grid, None)
self.assertEqual(var_grid.unit, 'Unknown')
self.assertEqual(var_grid.type, 'Unknown')
self.assertEqual(var_grid.shape, 'Not defined')
def timeseries_metadata_extraction(self):
"""Test timeseries metadata extraction.
This is a common test utility function to be called by both regular timeseries metadata
extraction testing and federated zone timeseries metadata extraction testing.
Make sure the calling TestCase object has self.resTimeSeries attribute defined before
calling this method which is the timeseries resource that has been created containing
valid timeseries file.
"""
# there should one content file
self.assertEqual(self.resTimeSeries.files.all().count(), 1)
# there should be one contributor element
self.assertEqual(self.resTimeSeries.metadata.contributors.all().count(), 1)
# test core metadata after metadata extraction
extracted_title = "Water temperature data from the Little Bear River, UT"
self.assertEqual(self.resTimeSeries.metadata.title.value, extracted_title)
# there should be an abstract element
self.assertNotEqual(self.resTimeSeries.metadata.description, None)
extracted_abstract = "This dataset contains time series of observations of water " \
"temperature in the Little Bear River, UT. Data were recorded every " \
"30 minutes. The values were recorded using a HydroLab MS5 " \
"multi-parameter water quality sonde connected to a Campbell " \
"Scientific datalogger."
self.assertEqual(self.resTimeSeries.metadata.description.abstract.strip(),
extracted_abstract)
# there should be 2 coverage element - box type and period type
self.assertEqual(self.resTimeSeries.metadata.coverages.all().count(), 2)
self.assertEqual(self.resTimeSeries.metadata.coverages.all().filter(type='box').count(), 1)
self.assertEqual(self.resTimeSeries.metadata.coverages.all().filter(
type='period').count(), 1)
box_coverage = self.resTimeSeries.metadata.coverages.all().filter(type='box').first()
self.assertEqual(box_coverage.value['projection'], 'WGS 84 EPSG:4326')
self.assertEqual(box_coverage.value['units'], 'Decimal degrees')
self.assertEqual(float(box_coverage.value['northlimit']), 41.718473)
self.assertEqual(float(box_coverage.value['eastlimit']), -111.799324)
self.assertEqual(float(box_coverage.value['southlimit']), 41.495409)
self.assertEqual(float(box_coverage.value['westlimit']), -111.946402)
temporal_coverage = self.resTimeSeries.metadata.coverages.all().filter(
type='period').first()
self.assertEqual(parser.parse(temporal_coverage.value['start']).date(),
parser.parse('01/01/2008').date())
self.assertEqual(parser.parse(temporal_coverage.value['end']).date(),
parser.parse('01/30/2008').date())
# there should be one format element
self.assertEqual(self.resTimeSeries.metadata.formats.all().count(), 1)
format_element = self.resTimeSeries.metadata.formats.all().first()
self.assertEqual(format_element.value, 'application/sqlite')
# there should be one subject element
self.assertEqual(self.resTimeSeries.metadata.subjects.all().count(), 1)
subj_element = self.resTimeSeries.metadata.subjects.all().first()
self.assertEqual(subj_element.value, 'Temperature')
# there should be a total of 7 timeseries
self.assertEqual(self.resTimeSeries.metadata.time_series_results.all().count(), 7)
# testing extended metadata elements
# test 'site' - there should be 7 sites
self.assertEqual(self.resTimeSeries.metadata.sites.all().count(), 7)
# each site be associated with one series id
for site in self.resTimeSeries.metadata.sites.all():
self.assertEqual(len(site.series_ids), 1)
# test the data for a specific site
site = self.resTimeSeries.metadata.sites.filter(site_code='USU-LBR-Paradise').first()
self.assertNotEqual(site, None)
site_name = 'Little Bear River at McMurdy Hollow near Paradise, Utah'
self.assertEqual(site.site_name, site_name)
self.assertEqual(site.elevation_m, 1445)
self.assertEqual(site.elevation_datum, 'NGVD29')
self.assertEqual(site.site_type, 'Stream')
# test 'variable' - there should be 1 variable element
self.assertEqual(self.resTimeSeries.metadata.variables.all().count(), 1)
variable = self.resTimeSeries.metadata.variables.all().first()
# there should be 7 series ids associated with this one variable
self.assertEqual(len(variable.series_ids), 7)
# test the data for a variable
self.assertEqual(variable.variable_code, 'USU36')
self.assertEqual(variable.variable_name, 'Temperature')
self.assertEqual(variable.variable_type, 'Water Quality')
self.assertEqual(variable.no_data_value, -9999)
self.assertEqual(variable.variable_definition, None)
self.assertEqual(variable.speciation, 'Not Applicable')
# test 'method' - there should be 1 method element
self.assertEqual(self.resTimeSeries.metadata.methods.all().count(), 1)
method = self.resTimeSeries.metadata.methods.all().first()
# there should be 7 series ids associated with this one method element
self.assertEqual(len(method.series_ids), 7)
self.assertEqual(method.method_code, '28')
method_name = 'Quality Control Level 1 Data Series created from raw QC Level 0 data ' \
'using ODM Tools.'
self.assertEqual(method.method_name, method_name)
self.assertEqual(method.method_type, 'Instrument deployment')
method_des = 'Quality Control Level 1 Data Series created from raw QC Level 0 data ' \
'using ODM Tools.'
self.assertEqual(method.method_description, method_des)
self.assertEqual(method.method_link, None)
# test 'processing_level' - there should be 1 processing_level element
self.assertEqual(self.resTimeSeries.metadata.processing_levels.all().count(), 1)
proc_level = self.resTimeSeries.metadata.processing_levels.all().first()
# there should be 7 series ids associated with this one element
self.assertEqual(len(proc_level.series_ids), 7)
self.assertEqual(proc_level.processing_level_code, '1')
self.assertEqual(proc_level.definition, 'Quality controlled data')
explanation = 'Quality controlled data that have passed quality assurance procedures ' \
'such as routine estimation of timing and sensor calibration or visual ' \
'inspection and removal of obvious errors. An example is USGS published ' \
'streamflow records following parsing through USGS quality control ' \
'procedures.'
self.assertEqual(proc_level.explanation, explanation)
# test 'timeseries_result' - there should be 7 timeseries_result element
self.assertEqual(self.resTimeSeries.metadata.time_series_results.all().count(), 7)
ts_result = self.resTimeSeries.metadata.time_series_results.filter(
series_ids__contains=['182d8fa3-1ebc-11e6-ad49-f45c8999816f']).first()
self.assertNotEqual(ts_result, None)
# there should be only 1 series id associated with this element
self.assertEqual(len(ts_result.series_ids), 1)
self.assertEqual(ts_result.units_type, 'Temperature')
self.assertEqual(ts_result.units_name, 'degree celsius')
self.assertEqual(ts_result.units_abbreviation, 'degC')
self.assertEqual(ts_result.status, 'Unknown')
self.assertEqual(ts_result.sample_medium, 'Surface Water')
self.assertEqual(ts_result.value_count, 1441)
self.assertEqual(ts_result.aggregation_statistics, 'Average')
# test for CV lookup tables
# there should be 23 CV_VariableType records
self.assertEqual(self.resTimeSeries.metadata.cv_variable_types.all().count(), 23)
# there should be 805 CV_VariableName records
self.assertEqual(self.resTimeSeries.metadata.cv_variable_names.all().count(), 805)
# there should be 145 CV_Speciation records
self.assertEqual(self.resTimeSeries.metadata.cv_speciations.all().count(), 145)
# there should be 51 CV_SiteType records
self.assertEqual(self.resTimeSeries.metadata.cv_site_types.all().count(), 51)
# there should be 5 CV_ElevationDatum records
self.assertEqual(self.resTimeSeries.metadata.cv_elevation_datums.all().count(), 5)
# there should be 25 CV_MethodType records
self.assertEqual(self.resTimeSeries.metadata.cv_method_types.all().count(), 25)
# there should be 179 CV_UnitsType records
self.assertEqual(self.resTimeSeries.metadata.cv_units_types.all().count(), 179)
# there should be 4 CV_Status records
self.assertEqual(self.resTimeSeries.metadata.cv_statuses.all().count(), 4)
# there should be 17 CV_Medium records
self.assertEqual(self.resTimeSeries.metadata.cv_mediums.all().count(), 18)
# there should be 17 CV_aggregationStatistics records
self.assertEqual(self.resTimeSeries.metadata.cv_aggregation_statistics.all().count(), 17)
# there should not be any UTCOffset element
self.assertEqual(self.resTimeSeries.metadata.utc_offset, None)
class ViewTestCase(TestCase):
"""Test basic view functionality."""
def setUp(self):
"""Create request factory and set temp_dir for testing."""
self.factory = RequestFactory()
self.temp_dir = tempfile.mkdtemp()
super(ViewTestCase, self).setUp()
@staticmethod
def set_request_message_attributes(request):
"""Set session and _messages attributies on request."""
# the following 3 lines are for preventing error in unit test due to the view being
# tested uses messaging middleware
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
@staticmethod
def add_session_to_request(request):
"""Use SessionMiddleware to add a session to the request."""
"""Annotate a request object with a session"""
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.save()
| |
import json
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.conf import settings
from django.views.decorators.http import require_http_methods, require_GET
from django.template import RequestContext
from django.contrib.auth import authenticate, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render_to_response
from django.utils.decorators import decorator_from_middleware
from lrs.util import req_validate, req_parse, req_process, XAPIVersionHeaderMiddleware, accept_middleware, StatementValidator
from lrs import forms, models, exceptions
from oauth_provider.consts import ACCEPTED, CONSUMER_STATES
import logging
logger = logging.getLogger(__name__)
@decorator_from_middleware(accept_middleware.AcceptMiddleware)
def home(request):
return render_to_response('home.html', context_instance=RequestContext(request))
@decorator_from_middleware(accept_middleware.AcceptMiddleware)
def stmt_validator(request):
if request.method == 'GET':
form = forms.ValidatorForm()
return render_to_response('validator.html', {"form": form}, context_instance=RequestContext(request))
elif request.method == 'POST':
form = forms.ValidatorForm(request.POST)
if form.is_valid():
# Initialize validator (validates incoming data structure)
try:
validator = StatementValidator.StatementValidator(form.cleaned_data['jsondata'])
except SyntaxError, se:
return render_to_response('validator.html', {"form": form, "error_message": "Statement is not a properly formatted dictionary"},
context_instance=RequestContext(request))
except ValueError, ve:
return render_to_response('validator.html', {"form": form, "error_message": "Statement is not a properly formatted dictionary"},
context_instance=RequestContext(request))
except Exception, e:
return render_to_response('validator.html', {"form": form, "error_message": e.message},
context_instance=RequestContext(request))
# Once know it's valid JSON, validate keys and fields
try:
valid = validator.validate()
except exceptions.ParamError, e:
return render_to_response('validator.html', {"form": form,"error_message": e.message},
context_instance=RequestContext(request))
else:
return render_to_response('validator.html', {"form": form,"valid_message": valid},
context_instance=RequestContext(request))
else:
return render_to_response('validator.html', {"form": form},
context_instance=RequestContext(request))
@decorator_from_middleware(accept_middleware.AcceptMiddleware)
def about(request):
lrs_data = {
"version": "1.0.0",
"Extensions":{
"xapi": {
"statements":
{
"name": "Statements",
"methods": ["GET", "POST", "PUT", "HEAD"],
"endpoint": reverse('lrs.views.statements'),
"description": "Endpoint to submit and retrieve XAPI statments.",
"content-types": []
},
"activities":
{
"name": "Activities",
"methods": ["GET", "HEAD"],
"endpoint": reverse('lrs.views.activities'),
"description": "Endpoint to retrieve a complete activity object.",
"content-types": []
},
"activities_state":
{
"name": "Activities State",
"methods": ["PUT","POST","GET","DELETE", "HEAD"],
"endpoint": reverse('lrs.views.activity_state'),
"description": "Stores, fetches, or deletes the document specified by the given stateId that exists in the context of the specified activity, agent, and registration (if specified).",
"content-types": []
},
"activities_profile":
{
"name": "Activities Profile",
"methods": ["PUT","POST","GET","DELETE", "HEAD"],
"endpoint": reverse('lrs.views.activity_profile'),
"description": "Saves/retrieves/deletes the specified profile document in the context of the specified activity.",
"content-types": []
},
"agents":
{
"name": "Agents",
"methods": ["GET", "HEAD"],
"endpoint": reverse('lrs.views.agents'),
"description": "Returns a special, Person object for a specified agent.",
"content-types": []
},
"agents_profile":
{
"name": "Agent Profile",
"methods": ["PUT","POST","GET","DELETE", "HEAD"],
"endpoint": reverse('lrs.views.agent_profile'),
"description": "Saves/retrieves/deletes the specified profile document in the context of the specified agent.",
"content-types": []
}
},
"lrs":{
"user_register":
{
"name": "User Registration",
"methods": ["POST"],
"endpoint": reverse('lrs.views.register'),
"description": "Registers a user within the LRS.",
"content-types": ["application/x-www-form-urlencoded"]
},
"client_register":
{
"name": "Client Registration",
"methods": ["POST"],
"endpoint": reverse('lrs.views.reg_client'),
"description": "Registers a client applicaton with the LRS.",
"content-types": ["application/x-www-form-urlencoded"]
}
},
"oauth":
{
"initiate":
{
"name": "Oauth Initiate",
"methods": ["POST"],
"endpoint": reverse('oauth_provider.views.request_token'),
"description": "Authorize a client and return temporary credentials.",
"content-types": ["application/x-www-form-urlencoded"]
},
"authorize":
{
"name": "Oauth Authorize",
"methods": ["GET"],
"endpoint": reverse('oauth_provider.views.user_authorization'),
"description": "Authorize a user.",
"content-types": []
},
"token":
{
"name": "Oauth Token",
"methods": ["POST"],
"endpoint": reverse('oauth_provider.views.access_token'),
"description": "Provides Oauth token to the client.",
"content-types": ["application/x-www-form-urlencoded"]
}
}
}
}
return HttpResponse(req_process.stream_response_generator(lrs_data), mimetype="application/json", status=200)
def actexample(request):
return render_to_response('actexample.json', mimetype="application/json")
def actexample2(request):
return render_to_response('actexample2.json', mimetype="application/json")
def actexample3(request):
return render_to_response('actexample3.json', mimetype="application/json")
def actexample4(request):
return render_to_response('actexample4.json', mimetype="application/json")
def register(request):
if request.method == 'GET':
form = forms.RegisterForm()
return render_to_response('register.html', {"form": form}, context_instance=RequestContext(request))
elif request.method == 'POST':
form = forms.RegisterForm(request.POST)
if form.is_valid():
name = form.cleaned_data['username']
pword = form.cleaned_data['password']
email = form.cleaned_data['email']
try:
user = User.objects.get(username__exact=name)
user = authenticate(username=name, password=pword)
if user is None:
return render_to_response('register.html', {"form": form, "error_message": "%s's password was incorrect." % name},
context_instance=RequestContext(request))
except User.DoesNotExist:
user = User.objects.create_user(name, email, pword)
d = {"info_message": "Thanks for registering %s" % user.username}
return render_to_response('reg_success.html', d, context_instance=RequestContext(request))
else:
return render_to_response('register.html', {"form": form}, context_instance=RequestContext(request))
else:
return Http404
@login_required(login_url="/XAPI/accounts/login")
def reg_client(request):
if request.method == 'GET':
form = forms.RegClientForm()
return render_to_response('regclient.html', {"form": form}, context_instance=RequestContext(request))
elif request.method == 'POST':
form = forms.RegClientForm(request.POST)
if form.is_valid():
name = form.cleaned_data['name']
description = form.cleaned_data['description']
scopes = form.cleaned_data['scopes']
try:
client = models.Consumer.objects.get(name__exact=name)
except models.Consumer.DoesNotExist:
client = models.Consumer.objects.create(name=name, description=description, user=request.user,
status=ACCEPTED, default_scopes=",".join(scopes))
else:
return render_to_response('regclient.html', {"form": form, "error_message": "%s alreay exists." % name}, context_instance=RequestContext(request))
d = {"name":client.name,"app_id":client.key, "secret":client.secret, "info_message": "Your Client Credentials"}
return render_to_response('reg_success.html', d, context_instance=RequestContext(request))
else:
return render_to_response('regclient.html', {"form": form}, context_instance=RequestContext(request))
else:
return Http404
@login_required(login_url="/XAPI/accounts/login")
def me(request):
client_apps = models.Consumer.objects.filter(user=request.user)
access_tokens = models.Token.objects.filter(user=request.user, token_type=models.Token.ACCESS, is_approved=True)
return render_to_response('me.html', {'client_apps':client_apps, 'access_tokens':access_tokens},
context_instance=RequestContext(request))
@login_required(login_url="/XAPI/accounts/login")
def my_statements(request):
try:
if request.method == "DELETE":
stmt_id = request.GET.get("stmt_id", None)
if stmt_id:
models.Statement.objects.filter(statement_id=stmt_id, user=request.user).delete()
stmt = models.Statement.objects.filter(statement_id=stmt_id, user=request.user)
if not stmt:
return HttpResponse(status=204)
else:
raise Exception("unable to delete statement")
else:
models.Statement.objects.filter(user=request.user).delete()
stmts = models.Statement.objects.filter(user=request.user)
if not stmts:
return HttpResponse(status=204)
else:
raise Exception("unable to delete statements")
stmt_id = request.GET.get("stmt_id", None)
if stmt_id:
s = models.Statement.objects.get(statement_id=stmt_id)
return HttpResponse(json.dumps(s.object_return()),mimetype="application/json",status=200)
else:
statements = {}
s = {}
slist = []
userFilter = request.GET.get("user_filter", None)
verbFilter = request.GET.get("verb_filter", None)
objectFilter = request.GET.get("object_filter", None)
if userFilter == "0":
statements = models.Statement.objects.order_by('-timestamp')
if verbFilter:
try:
vFilter = models.Verb.objects.get(verb_id=verbFilter)
statements = statements.filter(verb=vFilter)
except:
return HttpResponse(status=204)
elif userFilter == "1":
statements = models.Statement.objects.filter(user=request.user).order_by('-timestamp')
if verbFilter:
try:
vFilter = models.Verb.objects.get(verb_id=verbFilter)
statements = statements.filter(verb=vFilter)
#if objectFilter:
#statements = statements.filter(object_activity.activity_definition_name__icontains=objectFilter)
except:
return HttpResponse(status=204)
else:
userFilter = userFilter.strip()
try:
uFilter = User.objects.get(username=userFilter)
statements = models.Statement.objects.filter(user=uFilter).order_by('-timestamp')
if verbFilter:
try:
vFilter = models.Verb.objects.get(verb_id=verbFilter)
statements = statements.filter(verb=vFilter)
except:
vFilter = null
except:
if len(statements) == 0:
statements = {}
#try:
#uFilter = User.objects.get(username=userFilter)
#statements = models.Statement.objects.filter(user=uFilter).order_by('-timestamp')
#if verbFilter:
#try:
#vFilter = models.Verb.objects.get(verb_id=verbFilter)
#statements = statements.filter(verb=vFilter)
#if objectFilter:
#statements = statements.filter(object_activity.activity_definition_name__icontains=objectFilter)
#except:
#vFilter = null;
#except:
#statements = {}
for stmt in statements:
d = {}
d['timestamp'] = stmt.timestamp.isoformat()
d['statement_id'] = stmt.statement_id
d['actor_name'] = stmt.actor.get_a_name()
d['verb'] = stmt.verb.get_display()
d['verb_id'] = stmt.verb.get_id()
stmtobj = stmt.get_object()
d['object'] = stmtobj.get_a_name()
searchstring = stmtobj.get_search_index()
if "desktopapp" in searchstring:
splitUp = searchstring.split("desktopapp/")
searchstring = splitUp[1].strip("with")
if not objectFilter == "" :
objectFilter = objectFilter.strip()
andList = objectFilter.split("AND")
andCount = 0
for andStr in andList:
orList = andStr.split("OR")
for orStr in orList:
orStr.strip("AND")
orStr.strip("OR")
if orStr.strip().lower() in searchstring.lower():
andCount += 1
break
if andCount >= len(andList):
slist.append(d)
else:
slist.append(d)
paginator = Paginator(slist, settings.STMTS_PER_PAGE)
page_no = request.GET.get('page', 1)
try:
page = paginator.page(page_no)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
page = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
page = paginator.page(paginator.num_pages)
s['stmts'] = page.object_list
if page.has_previous():
s['previous'] = "%s?page=%s" % (reverse('lrs.views.my_statements'), page.previous_page_number())
if page.has_next():
s['next'] = "%s?page=%s" % (reverse('lrs.views.my_statements'), page.next_page_number())
return HttpResponse(json.dumps(s), mimetype="application/json", status=200)
except Exception as e:
return HttpResponse(e, status=400)
@login_required(login_url="/XAPI/accounts/login")
def my_groups(request):
try:
if request.method == "DELETE":
group_id = request.GET.get("group_id", None)
stmt_id = request.GET.get("stmt_id", None)
if stmt_id and group_id:
s = models.Statement.objects.get(statement_id=stmt_id)
models.Group.objects.get(user=request.user, id=group_id).statements.remove(s)
stmt = models.Group.objects.get(user=request.user, id=group_id).statements.filter(user=request.user, statement_id=stmt_id)
if not stmt:
return HttpResponse(status=204)
else:
raise Exception("Failed to remove stmt: " + stmt_id + ", from group: " + group_id)
if group_id:
models.Group.objects.get(user=request.user, id=group_id).delete()
g = models.Group.objects.filter(user=request.user, id=group_id)
if not g:
return HttpResponse(status=204)
else:
raise Exception("Failed To Delete Group: " + group_id)
else:
raise Exception("Invalid Parameters")
if request.method == "POST":
name = request.POST.get('name', None)
if name:
if name == "":
return HttpResponse(json.dumps({"error_message":"invalid group name"}), status=400)
new_group = models.Group(name=name, user=request.user)
new_group.save()
s = new_group.id
return HttpResponse(json.dumps(s), mimetype="application/json", status=200)
#return HttpResponse(status=204)
group_id = request.POST.get('group_id', None)
stmts = request.POST.getlist('stmts[]', None)
if group_id:
group = models.Group.objects.get(user=request.user, id=group_id)
for stmt_id in stmts:
stmt = models.Statement.objects.get(statement_id=stmt_id)
group.statements.add(stmt)
return HttpResponse(status=204)
else:
raise Exception("Invalid POST method")
group_id = request.GET.get("group_id", None)
if group_id:
g = {}
glist = []
for stmt in models.Group.objects.get(user=request.user, id=group_id).statements.all().order_by('-timestamp'):
d = {}
d['statement_id'] = stmt.statement_id
d['actor_name'] = stmt.actor.get_a_name()
d['verb'] = stmt.verb.get_display()
d['verb_id'] = stmt.verb.get_id()
stmtobj = stmt.get_object()
d['object'] = stmtobj.get_a_name()
glist.append(d)
paginator = Paginator(glist, 5)
page_no = request.GET.get('page', 1)
try:
page = paginator.page(page_no)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
page = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
page = paginator.page(paginator.num_pages)
g['stmts'] = page.object_list
if page.has_previous():
g['previous'] = "%s?group_id=%s&page=%s" % (reverse('lrs.views.my_groups'), group_id, page.previous_page_number())
if page.has_next():
g['next'] = "%s?group_id=%s&page=%s" % (reverse('lrs.views.my_groups'), group_id, page.next_page_number())
return HttpResponse(json.dumps(g), mimetype="application/json", status=200)
else:
s = {}
slist = []
for grp in models.Group.objects.filter(user=request.user).order_by('name'):
d = {}
d['name'] = grp.name
d['id'] = grp.id
slist.append(d)
paginator = Paginator(slist, settings.STMTS_PER_PAGE)
page_no = request.GET.get('page', 1)
try:
page = paginator.page(page_no)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
page = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
page = paginator.page(paginator.num_pages)
s['groups'] = page.object_list
if page.has_previous():
s['previous'] = "%s?page=%s" % (reverse('lrs.views.my_groups'), page.previous_page_number())
if page.has_next():
s['next'] = "%s?page=%s" % (reverse('lrs.views.my_groups'), page.next_page_number())
return HttpResponse(json.dumps(s), mimetype="application/json", status=200)
except Exception as e:
return HttpResponse(e, status=400)
@login_required(login_url="/XAPI/accounts/login")
def my_app_status(request):
try:
name = request.GET['app_name']
status = request.GET['status']
new_status = [s[0] for s in CONSUMER_STATES if s[1] == status][0] #should only be 1
client = models.Consumer.objects.get(name__exact=name, user=request.user)
client.status = new_status
client.save()
ret = {"app_name":client.name, "status":client.get_status_display()}
return HttpResponse(json.dumps(ret), mimetype="application/json", status=200)
except:
return HttpResponse(json.dumps({"error_message":"unable to fulfill request"}), mimetype="application/json", status=400)
@login_required(login_url="/XAPI/accounts/login")
def delete_token(request):
if request.method == "DELETE":
try:
ids = request.GET['id'].split("-")
token_key = ids[0]
consumer_id = ids[1]
ts = ids[2]
token = models.Token.objects.get(user=request.user,
key__startswith=token_key,
consumer__id=consumer_id,
timestamp=ts,
token_type=models.Token.ACCESS,
is_approved=True)
token.is_approved = False
token.save()
return HttpResponse("", status=204)
except:
return HttpResponse("Unknown token", status=400)
return Http404("Unknown Request")
def logout_view(request):
logout(request)
# Redirect to a success page.
return HttpResponseRedirect(reverse('lrs.views.home'))
# Called when user queries GET statement endpoint and returned list is larger than server limit (10)
@decorator_from_middleware(XAPIVersionHeaderMiddleware.XAPIVersionHeader)
@require_http_methods(["GET", "HEAD"])
def statements_more(request, more_id):
return handle_request(request, more_id)
@require_http_methods(["PUT","GET","POST", "HEAD"])
@decorator_from_middleware(XAPIVersionHeaderMiddleware.XAPIVersionHeader)
def statements(request):
return handle_request(request)
@require_http_methods(["PUT","POST","GET","DELETE", "HEAD"])
@decorator_from_middleware(XAPIVersionHeaderMiddleware.XAPIVersionHeader)
def activity_state(request):
return handle_request(request)
@require_http_methods(["PUT","POST","GET","DELETE", "HEAD"])
@decorator_from_middleware(XAPIVersionHeaderMiddleware.XAPIVersionHeader)
def activity_profile(request):
return handle_request(request)
@require_http_methods(["GET", "HEAD"])
@decorator_from_middleware(XAPIVersionHeaderMiddleware.XAPIVersionHeader)
def activities(request):
return handle_request(request)
@require_http_methods(["PUT","POST","GET","DELETE", "HEAD"])
@decorator_from_middleware(XAPIVersionHeaderMiddleware.XAPIVersionHeader)
def agent_profile(request):
return handle_request(request)
# returns a 405 (Method Not Allowed) if not a GET
@require_http_methods(["GET", "HEAD"])
@decorator_from_middleware(XAPIVersionHeaderMiddleware.XAPIVersionHeader)
def agents(request):
return handle_request(request)
# THIS VIEW IS BEING USED
def oauth_authorize(request, request_token, callback_url, params):
rsp = """
<html><head></head><body><h1>Oauth Authorize</h1><h2>%s</h2></body></html>""" % params
return HttpResponse(rsp)
@login_required
def user_profile(request):
return render_to_response('registration/profile.html')
validators = {
reverse(statements).lower() : {
"POST" : req_validate.statements_post,
"GET" : req_validate.statements_get,
"PUT" : req_validate.statements_put,
"HEAD" : req_validate.statements_get
},
reverse(activity_state).lower() : {
"POST": req_validate.activity_state_post,
"PUT" : req_validate.activity_state_put,
"GET" : req_validate.activity_state_get,
"HEAD" : req_validate.activity_state_get,
"DELETE" : req_validate.activity_state_delete
},
reverse(activity_profile).lower() : {
"POST": req_validate.activity_profile_post,
"PUT" : req_validate.activity_profile_put,
"GET" : req_validate.activity_profile_get,
"HEAD" : req_validate.activity_profile_get,
"DELETE" : req_validate.activity_profile_delete
},
reverse(activities).lower() : {
"GET" : req_validate.activities_get,
"HEAD" : req_validate.activities_get
},
reverse(agent_profile) : {
"POST": req_validate.agent_profile_post,
"PUT" : req_validate.agent_profile_put,
"GET" : req_validate.agent_profile_get,
"HEAD" : req_validate.agent_profile_get,
"DELETE" : req_validate.agent_profile_delete
},
reverse(agents).lower() : {
"GET" : req_validate.agents_get,
"HEAD" : req_validate.agents_get
},
"/xapi/statements/more" : {
"GET" : req_validate.statements_more_get,
"HEAD" : req_validate.statements_more_get
}
}
processors = {
reverse(statements).lower() : {
"POST" : req_process.statements_post,
"GET" : req_process.statements_get,
"HEAD" : req_process.statements_get,
"PUT" : req_process.statements_put
},
reverse(activity_state).lower() : {
"POST": req_process.activity_state_post,
"PUT" : req_process.activity_state_put,
"GET" : req_process.activity_state_get,
"HEAD" : req_process.activity_state_get,
"DELETE" : req_process.activity_state_delete
},
reverse(activity_profile).lower() : {
"POST": req_process.activity_profile_post,
"PUT" : req_process.activity_profile_put,
"GET" : req_process.activity_profile_get,
"HEAD" : req_process.activity_profile_get,
"DELETE" : req_process.activity_profile_delete
},
reverse(activities).lower() : {
"GET" : req_process.activities_get,
"HEAD" : req_process.activities_get
},
reverse(agent_profile).lower() : {
"POST": req_process.agent_profile_post,
"PUT" : req_process.agent_profile_put,
"GET" : req_process.agent_profile_get,
"HEAD" : req_process.agent_profile_get,
"DELETE" : req_process.agent_profile_delete
},
reverse(agents).lower() : {
"GET" : req_process.agents_get,
"HEAD" : req_process.agents_get
},
"/xapi/statements/more" : {
"GET" : req_process.statements_more_get,
"HEAD" : req_process.statements_more_get
}
}
def handle_request(request, more_id=None):
try:
r_dict = req_parse.parse(request, more_id)
path = request.path.lower()
if path.endswith('/'):
path = path.rstrip('/')
# Cutoff more_id
if '/xapi/statements/more' in path:
path = '/xapi/statements/more'
req_dict = validators[path][r_dict['method']](r_dict)
return processors[path][req_dict['method']](req_dict)
except exceptions.BadRequest as err:
log_exception(request.path, err)
return HttpResponse(err.message, status=400)
except ValidationError as ve:
log_exception(request.path, ve)
return HttpResponse(ve.messages[0], status=400)
except exceptions.Unauthorized as autherr:
log_exception(request.path, autherr)
r = HttpResponse(autherr, status = 401)
r['WWW-Authenticate'] = 'Basic realm="ADLLRS"'
return r
except exceptions.OauthUnauthorized as oauth_err:
log_exception(request.path, oauth_err)
return oauth_err.response
except exceptions.Forbidden as forb:
log_exception(request.path, forb)
return HttpResponse(forb.message, status=403)
except exceptions.NotFound as nf:
log_exception(request.path, nf)
return HttpResponse(nf.message, status=404)
except exceptions.Conflict as c:
log_exception(request.path, c)
return HttpResponse(c.message, status=409)
except exceptions.PreconditionFail as pf:
log_exception(request.path, pf)
return HttpResponse(pf.message, status=412)
except Exception as err:
log_exception(request.path, err)
return HttpResponse(err.message, status=500)
def log_exception(path, ex):
logger.info("\nException while processing: %s" % path)
logger.exception(ex)
def print_req_details(request):
print '=====================details==============='
print 'upload handlers: %s' % request.upload_handlers
print 'content disposition: %s' % request.META.get("Content-Disposition", None)
print 'method: %s' % request.method
print 'raw %s' % request.raw_post_data
print 'full path: %s' % request.get_full_path()
print 'REQUEST keys %s' % request.REQUEST.keys()
#print 'DEL keys %s' % request.DELETE.keys()
#print 'PUT keys %s' % request.PUT.keys()
print 'GET keys %s' % request.GET.keys()
print 'GET: %s' % request.GET
print 'POST keys %s' % request.POST.keys()
print 'POST: %s' % request.POST
try:
body = request.body
print 'body: %s' % body
except:
print 'busy body'
print 'META: %s' % request.META
print 'META content type: %s' % request.META['CONTENT_TYPE']
print '==========================================='
| |
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A connection to the VMware vCenter platform.
"""
import re
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_vmware import api
from oslo_vmware import exceptions as vexc
from oslo_vmware import pbm
from oslo_vmware import vim
from oslo_vmware import vim_util
from nova.compute import task_states
from nova.compute import vm_states
from nova import exception
from nova import utils
from nova.i18n import _, _LI, _LE, _LW
from nova import objects
from nova.virt import driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import host
from nova.virt.vmwareapi import vim_util as nova_vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import volumeops
LOG = logging.getLogger(__name__)
vmwareapi_opts = [
cfg.StrOpt('host_ip',
help='Hostname or IP address for connection to VMware '
'vCenter host.'),
cfg.IntOpt('host_port',
default=443,
min=1,
max=65535,
help='Port for connection to VMware vCenter host.'),
cfg.StrOpt('host_username',
help='Username for connection to VMware vCenter host.'),
cfg.StrOpt('host_password',
help='Password for connection to VMware vCenter host.',
secret=True),
cfg.StrOpt('ca_file',
help='Specify a CA bundle file to use in verifying the '
'vCenter server certificate.'),
cfg.BoolOpt('insecure',
default=False,
help='If true, the vCenter server certificate is not '
'verified. If false, then the default CA truststore is '
'used for verification. This option is ignored if '
'"ca_file" is set.'),
cfg.StrOpt('cluster_name',
help='Name of a VMware Cluster ComputeResource.'),
cfg.StrOpt('datastore_regex',
help='Regex to match the name of a datastore.'),
cfg.FloatOpt('task_poll_interval',
default=0.5,
help='The interval used for polling of remote tasks.'),
cfg.IntOpt('api_retry_count',
default=10,
help='The number of times we retry on failures, e.g., '
'socket error, etc.'),
cfg.IntOpt('vnc_port',
default=5900,
min=1,
max=65535,
help='VNC starting port'),
cfg.IntOpt('vnc_port_total',
default=10000,
help='Total number of VNC ports'),
cfg.BoolOpt('use_linked_clone',
default=True,
help='Whether to use linked clone'),
cfg.StrOpt('wsdl_location',
help='Optional VIM Service WSDL Location '
'e.g http://<server>/vimService.wsdl. '
'Optional over-ride to default location for bug '
'work-arounds')
]
spbm_opts = [
cfg.BoolOpt('pbm_enabled',
default=False,
help='The PBM status.'),
cfg.StrOpt('pbm_wsdl_location',
help='PBM service WSDL file location URL. '
'e.g. file:///opt/SDK/spbm/wsdl/pbmService.wsdl '
'Not setting this will disable storage policy based '
'placement of instances.'),
cfg.StrOpt('pbm_default_policy',
help='The PBM default policy. If pbm_wsdl_location is set and '
'there is no defined storage policy for the specific '
'request then this policy will be used.'),
]
CONF = cfg.CONF
CONF.register_opts(vmwareapi_opts, 'vmware')
CONF.register_opts(spbm_opts, 'vmware')
TIME_BETWEEN_API_CALL_RETRIES = 1.0
class VMwareVCDriver(driver.ComputeDriver):
"""The VC host connection object."""
capabilities = {
"has_imagecache": True,
"supports_recreate": False,
"supports_migrate_to_same_host": True
}
# Legacy nodename is of the form: <mo id>(<cluster name>)
# e.g. domain-26(TestCluster)
# We assume <mo id> consists of alphanumeric, _ and -.
# We assume cluster name is everything between the first ( and the last ).
# We pull out <mo id> for re-use.
LEGACY_NODENAME = re.compile('([\w-]+)\(.+\)')
# The vCenter driver includes API that acts on ESX hosts or groups
# of ESX hosts in clusters or non-cluster logical-groupings.
#
# vCenter is not a hypervisor itself, it works with multiple
# hypervisor host machines and their guests. This fact can
# subtly alter how vSphere and OpenStack interoperate.
def __init__(self, virtapi, scheme="https"):
super(VMwareVCDriver, self).__init__(virtapi)
if (CONF.vmware.host_ip is None or
CONF.vmware.host_username is None or
CONF.vmware.host_password is None):
raise Exception(_("Must specify host_ip, host_username and "
"host_password to use vmwareapi.VMwareVCDriver"))
self._datastore_regex = None
if CONF.vmware.datastore_regex:
try:
self._datastore_regex = re.compile(CONF.vmware.datastore_regex)
except re.error:
raise exception.InvalidInput(reason=
_("Invalid Regular Expression %s")
% CONF.vmware.datastore_regex)
self._session = VMwareAPISession(scheme=scheme)
self._check_min_version()
# Update the PBM location if necessary
if CONF.vmware.pbm_enabled:
self._update_pbm_location()
self._validate_configuration()
self._cluster_name = CONF.vmware.cluster_name
self._cluster_ref = vm_util.get_cluster_ref_by_name(self._session,
self._cluster_name)
if self._cluster_ref is None:
raise exception.NotFound(_("The specified cluster '%s' was not "
"found in vCenter")
% self._cluster_name)
self._vcenter_uuid = self._get_vcenter_uuid()
self._nodename = self._create_nodename(self._cluster_ref.value)
self._volumeops = volumeops.VMwareVolumeOps(self._session,
self._cluster_ref)
self._vmops = vmops.VMwareVMOps(self._session,
virtapi,
self._volumeops,
self._cluster_ref,
datastore_regex=self._datastore_regex)
self._vc_state = host.VCState(self._session,
self._nodename,
self._cluster_ref,
self._datastore_regex)
# Register the OpenStack extension
self._register_openstack_extension()
def _check_min_version(self):
min_version = utils.convert_version_to_int(constants.MIN_VC_VERSION)
vc_version = vim_util.get_vc_version(self._session)
LOG.info(_LI("VMware vCenter version: %s"), vc_version)
if min_version > utils.convert_version_to_int(vc_version):
# TODO(garyk): enforce this from M
LOG.warning(_LW('Running Nova with a VMware vCenter version less '
'than %(version)s is deprecated. The required '
'minimum version of vCenter will be raised to '
'%(version)s in the 13.0.0 release.'),
{'version': constants.MIN_VC_VERSION})
@property
def need_legacy_block_device_info(self):
return False
def _update_pbm_location(self):
if CONF.vmware.pbm_wsdl_location:
pbm_wsdl_loc = CONF.vmware.pbm_wsdl_location
else:
version = vim_util.get_vc_version(self._session)
pbm_wsdl_loc = pbm.get_pbm_wsdl_location(version)
self._session.pbm_wsdl_loc_set(pbm_wsdl_loc)
def _validate_configuration(self):
if CONF.vmware.pbm_enabled:
if not CONF.vmware.pbm_default_policy:
raise error_util.PbmDefaultPolicyUnspecified()
if not pbm.get_profile_id_by_name(
self._session,
CONF.vmware.pbm_default_policy):
raise error_util.PbmDefaultPolicyDoesNotExist()
if CONF.vmware.datastore_regex:
LOG.warning(_LW(
"datastore_regex is ignored when PBM is enabled"))
self._datastore_regex = None
def init_host(self, host):
vim = self._session.vim
if vim is None:
self._session._create_session()
def cleanup_host(self, host):
self._session.logout()
def _register_openstack_extension(self):
# Register an 'OpenStack' extension in vCenter
LOG.debug('Registering extension %s with vCenter',
constants.EXTENSION_KEY)
os_extension = self._session._call_method(vim_util, 'find_extension',
constants.EXTENSION_KEY)
if os_extension is None:
LOG.debug('Extension does not exist. Registering type %s.',
constants.EXTENSION_TYPE_INSTANCE)
self._session._call_method(vim_util, 'register_extension',
constants.EXTENSION_KEY,
constants.EXTENSION_TYPE_INSTANCE)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted."""
# Check if the instance is running already and avoid doing
# anything if it is.
state = vm_util.get_vm_state(self._session, instance)
ignored_states = ['poweredon', 'suspended']
if state.lower() in ignored_states:
return
# Instance is not up and could be in an unknown state.
# Be as absolute as possible about getting it back into
# a known and running state.
self.reboot(context, instance, network_info, 'hard',
block_device_info)
def list_instance_uuids(self):
"""List VM instance UUIDs."""
return self._vmops.list_instances()
def list_instances(self):
"""List VM instances from the single compute node."""
return self._vmops.list_instances()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
# TODO(PhilDay): Add support for timeout (clean shutdown)
return self._vmops.migrate_disk_and_power_off(context, instance,
dest, flavor)
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
self._vmops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize, powering back on the instance."""
self._vmops.finish_revert_migration(context, instance, network_info,
block_device_info, power_on)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
image_meta = objects.ImageMeta.from_dict(image_meta)
self._vmops.finish_migration(context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info, power_on)
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Live migration of an instance to another host."""
self._vmops.live_migration(context, instance, dest,
post_method, recover_method,
block_migration)
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
"""Clean up destination node after a failed live migration."""
self.destroy(context, instance, network_info, block_device_info)
def get_instance_disk_info(self, instance, block_device_info=None):
pass
def get_vnc_console(self, context, instance):
"""Return link to instance's VNC console using vCenter logic."""
# vCenter does not actually run the VNC service
# itself. You must talk to the VNC host underneath vCenter.
return self._vmops.get_vnc_console(instance)
def get_mks_console(self, context, instance):
return self._vmops.get_mks_console(instance)
def _get_vcenter_uuid(self):
"""Retrieves the vCenter UUID."""
about = self._session._call_method(nova_vim_util, 'get_about_info')
return about.instanceUuid
def _create_nodename(self, mo_id):
"""Return a nodename which uniquely describes a cluster.
The name will be of the form:
<mo id>.<vcenter uuid>
e.g.
domain-26.9d51f082-58a4-4449-beed-6fd205a5726b
"""
return '%s.%s' % (mo_id, self._vcenter_uuid)
def _get_available_resources(self, host_stats):
return {'vcpus': host_stats['vcpus'],
'memory_mb': host_stats['host_memory_total'],
'local_gb': host_stats['disk_total'],
'vcpus_used': 0,
'memory_mb_used': host_stats['host_memory_total'] -
host_stats['host_memory_free'],
'local_gb_used': host_stats['disk_used'],
'hypervisor_type': host_stats['hypervisor_type'],
'hypervisor_version': host_stats['hypervisor_version'],
'hypervisor_hostname': host_stats['hypervisor_hostname'],
# The VMWare driver manages multiple hosts, so there are
# likely many different CPU models in use. As such it is
# impossible to provide any meaningful info on the CPU
# model of the "host"
'cpu_info': None,
'supported_instances': jsonutils.dumps(
host_stats['supported_instances']),
'numa_topology': None,
}
def get_available_resource(self, nodename):
"""Retrieve resource info.
This method is called when nova-compute launches, and
as part of a periodic task.
:returns: dictionary describing resources
"""
host_stats = self._vc_state.get_host_stats(refresh=True)
stats_dict = self._get_available_resources(host_stats)
return stats_dict
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This driver supports only one compute node.
"""
return [self._nodename]
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create VM instance."""
image_meta = objects.ImageMeta.from_dict(image_meta)
self._vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach volume storage to VM instance."""
return self._volumeops.attach_volume(connection_info, instance)
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach volume storage to VM instance."""
return self._volumeops.detach_volume(connection_info, instance)
def get_volume_connector(self, instance):
"""Return volume connector information."""
return self._volumeops.get_volume_connector(instance)
def get_host_ip_addr(self):
"""Returns the IP address of the vCenter host."""
return CONF.vmware.host_ip
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance."""
self._vmops.snapshot(context, instance, image_id, update_task_state)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot VM instance."""
self._vmops.reboot(instance, network_info, reboot_type)
def _detach_instance_volumes(self, instance, block_device_info):
# We need to detach attached volumes
block_device_mapping = driver.block_device_info_get_mapping(
block_device_info)
if block_device_mapping:
# Certain disk types, for example 'IDE' do not support hot
# plugging. Hence we need to power off the instance and update
# the instance state.
self._vmops.power_off(instance)
# TODO(garyk): update the volumeops to read the state form the
# VM instead of relying on an instance flag
instance.vm_state = vm_states.STOPPED
for disk in block_device_mapping:
connection_info = disk['connection_info']
try:
self.detach_volume(connection_info, instance,
disk.get('device_name'))
except exception.StorageError:
# The volume does not exist
# NOTE(garyk): change to warning after string freeze
LOG.debug('%s does not exist!', disk.get('device_name'),
instance=instance)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to detach %(device_name)s. "
"Exception: %(exc)s"),
{'device_name': disk.get('device_name'),
'exc': e},
instance=instance)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy VM instance."""
# Destroy gets triggered when Resource Claim in resource_tracker
# is not successful. When resource claim is not successful,
# node is not set in instance. Perform destroy only if node is set
if not instance.node:
return
# A resize uses the same instance on the VC. We do not delete that
# VM in the event of a revert
if instance.task_state == task_states.RESIZE_REVERTING:
return
# We need to detach attached volumes
if block_device_info is not None:
try:
self._detach_instance_volumes(instance, block_device_info)
except vexc.ManagedObjectNotFoundException:
LOG.warning(_LW('Instance does not exists. Proceeding to '
'delete instance properties on datastore'),
instance=instance)
self._vmops.destroy(instance, destroy_disks)
def pause(self, instance):
"""Pause VM instance."""
self._vmops.pause(instance)
def unpause(self, instance):
"""Unpause paused VM instance."""
self._vmops.unpause(instance)
def suspend(self, context, instance):
"""Suspend the specified instance."""
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
"""Resume the suspended VM instance."""
self._vmops.resume(instance)
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance."""
image_meta = objects.ImageMeta.from_dict(image_meta)
self._vmops.rescue(context, instance, network_info, image_meta)
def unrescue(self, instance, network_info):
"""Unrescue the specified instance."""
self._vmops.unrescue(instance)
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance."""
# TODO(PhilDay): Add support for timeout (clean shutdown)
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance."""
self._vmops.power_on(instance)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
self._vmops.poll_rebooting_instances(timeout, instances)
def get_info(self, instance):
"""Return info about the VM instance."""
return self._vmops.get_info(instance)
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_diagnostics(instance)
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_instance_diagnostics(instance)
def host_power_action(self, action):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def set_host_enabled(self, enabled):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def get_host_uptime(self):
"""Host uptime operation not supported by VC driver."""
msg = _("Multiple hosts may be managed by the VMWare "
"vCenter driver; therefore we do not return "
"uptime for just one host.")
raise NotImplementedError(msg)
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
self._vmops.inject_network_info(instance, nw_info)
def manage_image_cache(self, context, all_instances):
"""Manage the local cache of images."""
self._vmops.manage_image_cache(context, all_instances)
def instance_exists(self, instance):
"""Efficient override of base instance_exists method."""
return self._vmops.instance_exists(instance)
def attach_interface(self, instance, image_meta, vif):
"""Attach an interface to the instance."""
image_meta = objects.ImageMeta.from_dict(image_meta)
self._vmops.attach_interface(instance, image_meta, vif)
def detach_interface(self, instance, vif):
"""Detach an interface from the instance."""
self._vmops.detach_interface(instance, vif)
class VMwareAPISession(api.VMwareAPISession):
"""Sets up a session with the VC/ESX host and handles all
the calls made to the host.
"""
def __init__(self, host_ip=CONF.vmware.host_ip,
host_port=CONF.vmware.host_port,
username=CONF.vmware.host_username,
password=CONF.vmware.host_password,
retry_count=CONF.vmware.api_retry_count,
scheme="https",
cacert=CONF.vmware.ca_file,
insecure=CONF.vmware.insecure):
super(VMwareAPISession, self).__init__(
host=host_ip,
port=host_port,
server_username=username,
server_password=password,
api_retry_count=retry_count,
task_poll_interval=CONF.vmware.task_poll_interval,
scheme=scheme,
create_session=True,
wsdl_loc=CONF.vmware.wsdl_location,
cacert=cacert,
insecure=insecure)
def _is_vim_object(self, module):
"""Check if the module is a VIM Object instance."""
return isinstance(module, vim.Vim)
def _call_method(self, module, method, *args, **kwargs):
"""Calls a method within the module specified with
args provided.
"""
if not self._is_vim_object(module):
return self.invoke_api(module, method, self.vim, *args, **kwargs)
else:
return self.invoke_api(module, method, *args, **kwargs)
def _wait_for_task(self, task_ref):
"""Return a Deferred that will give the result of the given task.
The task is polled until it completes.
"""
return self.wait_for_task(task_ref)
| |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for client.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import platform
import sys
import threading
import time
from tensorflow.python.distribute.client import client
from tensorflow.python.eager import cancellation
from tensorflow.python.eager import def_function
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import coordinator
from tensorflow.python.util import nest
class CoordinatedClosureQueueTest(test.TestCase):
def testBasic(self):
queue = client._CoordinatedClosureQueue()
closure1 = self._create_closure(queue._cancellation_mgr)
queue.put(closure1)
self.assertIs(closure1, queue.get())
self.assertFalse(queue.done())
queue.put_back(closure1)
self.assertEqual(closure1, queue.get())
queue.mark_finished()
self.assertTrue(queue.done())
queue.wait()
def testProcessAtLeaseOnce(self):
closure_queue = client._CoordinatedClosureQueue()
labels = ['A', 'B', 'C', 'D', 'E']
processed_count = collections.defaultdict(int)
coord = coordinator.Coordinator(clean_stop_exception_types=[])
def process_queue():
with coord.stop_on_exception():
has_been_put_back = False
while True:
closure = closure_queue.get(timeout=30)
if closure is None:
break
if not has_been_put_back:
has_been_put_back = True
closure_queue.put_back(closure)
continue
closure._function()
closure_queue.mark_finished()
def get_func(label):
def func():
time.sleep(3)
processed_count[label] += 1
return func
cm = cancellation.CancellationManager()
for label in labels:
closure_queue.put(client.Closure(get_func(label), cm))
t1 = threading.Thread(target=process_queue, daemon=True)
t1.start()
t2 = threading.Thread(target=process_queue, daemon=True)
t2.start()
# Make sure multiple wait() calls are fine.
closure_queue.wait()
closure_queue.wait()
closure_queue.wait()
closure_queue.wait()
self.assertEqual(processed_count, collections.Counter(labels))
coord.join([t1, t2])
def testNotifyBeforeWait(self):
closure_queue = client._CoordinatedClosureQueue()
def func():
logging.info('func running')
coord = coordinator.Coordinator(clean_stop_exception_types=[])
def process_queue():
with coord.stop_on_exception():
closure_queue.get()
closure_queue.mark_finished()
closure_queue.put(client.Closure(func, closure_queue._cancellation_mgr))
t = threading.Thread(target=process_queue)
t.start()
coord.join([t])
# This test asserts that waiting at the time the function has been processed
# doesn't time out.
closure_queue.wait()
def _assert_one_unblock_the_other(self, first_fn, second_fn):
"""Asserts `second_fn` wouldn't return before `first_fn` is finished."""
first_fn_done = threading.Event()
second_fn_done = threading.Event()
coord = coordinator.Coordinator(clean_stop_exception_types=[])
def wrapped_first_fn():
with coord.stop_on_exception():
self.assertFalse(second_fn_done.is_set())
first_fn()
first_fn_done.set()
self.assertFalse(first_fn_done.is_set())
t = threading.Thread(target=wrapped_first_fn)
t.start()
second_fn()
self.assertTrue(first_fn_done.is_set())
second_fn_done.set()
coord.join([t])
def testWaitRaiseErrorAfterMarkFailure(self):
if sys.version_info >= (3, 8) and platform.system() == 'Windows':
# TODO(b/165013260): Fix this
self.skipTest('Test is currently broken on Windows with Python 3.8')
closure_queue = client._CoordinatedClosureQueue()
closure_queue.put(self._create_closure(closure_queue._cancellation_mgr))
closure = closure_queue.get()
wait_finish_event = threading.Event()
coord = coordinator.Coordinator(clean_stop_exception_types=[])
# Using a thread to verify that closure_queue.wait() will not return until
# all inflight closures are finished.
def mark_finished_fn():
try:
raise ValueError('Some error.')
except ValueError as e:
closure_queue.mark_failed(e)
def wait_fn():
with self.assertRaises(ValueError):
closure_queue.wait()
self._assert_one_unblock_the_other(mark_finished_fn, wait_fn)
self.assertTrue(closure_queue.done())
def _create_closure(self, cancellation_mgr):
@def_function.function()
def some_function():
return 1.0
return client.Closure(some_function, cancellation_mgr)
def _put_two_closures_and_get_one(self):
closure_queue = client._CoordinatedClosureQueue()
closure1 = self._create_closure(closure_queue._cancellation_mgr)
closure_queue.put(closure1)
closure2 = self._create_closure(closure_queue._cancellation_mgr)
closure_queue.put(closure2)
closure_got = closure_queue.get() # returns closure1
self.assertIs(closure_got, closure1)
self.assertIsNot(closure_got, closure2)
return closure_queue, closure1, closure2
def testPutRaiseError(self):
if sys.version_info >= (3, 8) and platform.system() == 'Windows':
# TODO(b/165013260): Fix this
self.skipTest('Test is currently broken on Windows with Python 3.8')
closure_queue, _, closure2 = self._put_two_closures_and_get_one()
closure_queue.mark_failed(ValueError())
with self.assertRaises(ValueError):
closure_queue.put(self._create_closure(closure_queue._cancellation_mgr))
self.assertTrue(closure_queue.done())
with self.assertRaisesRegex(
client.FunctionRetryableError,
'The corresponding function is cancelled. Please reschedule the '
'function.'):
closure2._fetch_output_remote_values()
# The error is cleared.
closure_queue.put(self._create_closure(closure_queue._cancellation_mgr))
def testWaitRaiseError(self):
if sys.version_info >= (3, 8) and platform.system() == 'Windows':
# TODO(b/165013260): Fix this
self.skipTest('Test is currently broken on Windows with Python 3.8')
closure_queue, _, closure2 = self._put_two_closures_and_get_one()
closure_queue.mark_failed(ValueError())
with self.assertRaises(ValueError):
closure_queue.wait()
self.assertTrue(closure_queue.done())
with self.assertRaisesRegex(
client.FunctionRetryableError,
'The corresponding function is cancelled. Please reschedule the '
'function.'):
closure2._fetch_output_remote_values()
# The error is cleared.
closure_queue.wait()
def testDoneRaiseError(self):
if sys.version_info >= (3, 8) and platform.system() == 'Windows':
# TODO(b/165013260): Fix this
self.skipTest('Test is currently broken on Windows with Python 3.8')
closure_queue, _, _ = self._put_two_closures_and_get_one()
self.assertFalse(closure_queue.done())
closure_queue.mark_failed(ValueError())
with self.assertRaises(ValueError):
closure_queue.done()
def _set_error(self, closure_queue, closure, error):
try:
raise error
except Exception as e: # pylint: disable=broad-except
nest.map_structure(lambda x: x._set_error(e),
closure._output_remote_values)
closure_queue.mark_failed(e)
def _test_cancel_closure_when_error(self, call_wait):
if sys.version_info >= (3, 8) and platform.system() == 'Windows':
# TODO(b/165013260): Fix this
self.skipTest('Test is currently broken on Windows with Python 3.8')
closure_queue, closure1, closure2 = self._put_two_closures_and_get_one()
closure_queue.put(self._create_closure(closure_queue._cancellation_mgr))
closure_queue.get()
# At this moment, there are two inflight, one in queue.
self.assertEqual(closure_queue._inflight_closure_count, 2)
# Hold a copy of the queue's cancellation manager at this point
initial_cm = closure_queue._cancellation_mgr
# Simulating closure1 fails.
self._set_error(closure_queue, closure1, ValueError('Some error.'))
# At this moment, there are one inflight, one in queue.
self.assertEqual(closure_queue._queue.qsize(), 1)
self.assertEqual(closure_queue._inflight_closure_count, 1)
closure3 = self._create_closure(closure_queue._cancellation_mgr)
def fake_cancellation():
self._set_error(closure_queue, closure2,
ValueError('Fake cancellation error.'))
def report_error():
# It should not report the fake cancellation error.
with self.assertRaisesRegex(ValueError, 'Some error.'):
# Verifying `wait()` or `put()` raises even if one closure is in
# flight.
if call_wait:
closure_queue.wait()
else:
closure_queue.put(closure3)
self._assert_one_unblock_the_other(fake_cancellation, report_error)
# The original cancellation manager of the queue has been cancelled.
self.assertTrue(initial_cm.is_cancelled)
# At this moment, there is zero inflight, nothing in queue.
self.assertTrue(closure_queue._queue.empty())
self.assertEqual(closure_queue._inflight_closure_count, 0)
self.assertIsNone(closure_queue._error)
# This asserts that closure1 has errored.
with self.assertRaisesRegex(ValueError, 'Some error.'):
closure1._fetch_output_remote_values()
# The following asserts that closure3 should have been cancelled.
if not call_wait:
with self.assertRaisesRegex(
client.FunctionRetryableError,
'The corresponding function is cancelled. Please reschedule the '
'function.'):
closure3._fetch_output_remote_values()
# Closure2 was an inflight closure when it got cancelled.
self.assertEqual(closure2._output_remote_values._status,
client._RemoteValueStatus.READY)
with self.assertRaisesRegex(ValueError, 'Fake cancellation error.'):
closure2._fetch_output_remote_values()
# This asserts that the queue has a clear state.
self.testBasic()
def testWaitRaiseErrorAfterCancelClosure(self):
self._test_cancel_closure_when_error(call_wait=True)
def testPutRaiseErrorAfterCancelClosure(self):
self._test_cancel_closure_when_error(call_wait=False)
def testStateIsRestoredAfterJoinIsCalled(self):
if sys.version_info >= (3, 8) and platform.system() == 'Windows':
# TODO(b/165013260): Fix this
self.skipTest('Test is currently broken on Windows with Python 3.8')
closure_queue, _, _ = self._put_two_closures_and_get_one()
self.assertEqual(closure_queue._inflight_closure_count, 1)
closure_queue.mark_failed(ValueError('test error'))
with self.assertRaises(ValueError):
closure_queue.put(self._create_closure(closure_queue._cancellation_mgr))
# Its error should have been cleared.
self.assertIsNone(closure_queue._error)
closure_queue.put(self._create_closure(closure_queue._cancellation_mgr))
self.assertIsNone(closure_queue._error)
def testThreadSafey(self):
thread_count = 10
queue = client._CoordinatedClosureQueue()
# Each thread performs 20 queue actions: 10 are `put_back` and 10 are
# `mark_finished`.
action_count = 20
def func():
for i in range(action_count):
closure = queue.get()
if i % 2 == 0:
queue.put_back(closure)
else:
queue.mark_finished()
threads = [threading.Thread(target=func) for i in range(thread_count)]
for t in threads:
t.start()
for _ in range(thread_count * action_count // 2):
queue.put(self._create_closure(queue._cancellation_mgr))
queue.wait()
self.assertTrue(queue.done())
if __name__ == '__main__':
test.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.